blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eacbce1209788203c69e54775e0146e947dbb0b2
|
88b73a0ed9367f3a56db47e6d16089ae57c21744
|
/myblogsite/blog/feeds.py
|
7047ed4cf428d04b05beef9b7e4d8315f73d7046
|
[] |
no_license
|
Sholamide/django-blog-app
|
4635093f65eaae11634466452981d0218de0a046
|
7ce9b6faf6178da5f304aeee34bfb2b54aa4b998
|
refs/heads/master
| 2023-03-01T11:22:26.296854
| 2021-02-15T15:41:14
| 2021-02-15T15:41:14
| 334,110,921
| 0
| 0
| null | 2021-02-10T16:06:19
| 2021-01-29T10:27:12
|
Python
|
UTF-8
|
Python
| false
| false
| 522
|
py
|
from django.contrib.syndication.views import Feed
from django.template.defaultfilters import truncatewords
from django.urls import reverse_lazy
from .models import Post
class LatestPostsFeed(Feed):
title = 'My blog'
link = reverse_lazy('blog:post_list')
description = 'New posts of my blog.'
def items(self):
return Post.published.all()[:5]
def item_title(self, item):
return item.title
def item_description(self, item):
return truncatewords(item.body, 30)
|
[
"soluade101@gmail.com"
] |
soluade101@gmail.com
|
fbca74d02065c0ad6ae13ca3c9a6556c2d1050f2
|
f86f4f39e1e63bf2103faaad3b738074aa5ad214
|
/prml/nn/__init__.py
|
cf746ac7bb0d1e83a3c9e6bf760e6a86f5386383
|
[] |
no_license
|
zgcgreat/PRML-1
|
a918ec670fd756086cde860e408f81f980bb9a8d
|
77056922f23176065b056d5ca136a43971831969
|
refs/heads/master
| 2021-07-03T09:58:06.843226
| 2017-09-24T04:26:27
| 2017-09-24T04:26:27
| 105,968,426
| 0
| 1
| null | 2017-10-06T04:50:26
| 2017-10-06T04:50:26
| null |
UTF-8
|
Python
| false
| false
| 563
|
py
|
from prml.nn.function import (
convolve2d,
dropout,
log_softmax,
max_pooling2d,
relu,
sigmoid_cross_entropy,
sigmoid,
softmax_cross_entropy,
softmax,
softplus,
tanh,
weight_decay
)
from prml.nn import optimizer
from prml.nn.network import Network
__all__ = [
"convolve2d",
"dropout",
"log_softmax",
"max_pooling2d",
"relu",
"sigmoid_cross_entropy",
"sigmoid",
"softmax_cross_entropy",
"softmax",
"softplus",
"tanh",
"weight_decay",
"optimizer",
"Network"
]
|
[
"r0735nj5058@icloud.com"
] |
r0735nj5058@icloud.com
|
4687eb2c354a559fd6bbc8b620b8fc1f267e95c6
|
74c0aa4829fc57538ff3477eee8ecf2cf77ef9e1
|
/serverSetup.py
|
3225e65ba9974eb2da44496b1cce9e2c61c59048
|
[] |
no_license
|
diobat/KillYourFriends
|
42e2f9783a6b158b7872d6aa343b9b8bb309f36c
|
3b816ac642d1d979648902b23de72acbf2615b9f
|
refs/heads/master
| 2020-03-27T22:41:20.013675
| 2018-12-02T11:45:03
| 2018-12-02T11:45:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 952
|
py
|
import socketserver as ss
class MyTCPHandler(ss.BaseRequestHandler):
"""
The RequestHandler class for our server.
It is instantiated once per connection to the server, and must
override the handle() method to implement communication to the
client.
"""
def handle(self):
# self.request is the TCP socket connected to the client
self.data = self.request.recv(1024).strip()
print("%s wrote:" % self.client_address[0])
print(self.data)
# just send back the same data, but upper-cased
self.request.send(self.data)
#self.shutdown()
self.request.close()
if __name__ == "__main__":
HOST, PORT = "192.168.0.106", 9999
# Create the server, binding to localhost on port 9999
server = ss.TCPServer((HOST, PORT), MyTCPHandler)
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever()
|
[
"diogobatista@ua.pt"
] |
diogobatista@ua.pt
|
22b6f77efdeb0185dd0e87192dbd47ca2af677f2
|
2d4b531b9b04ceae6c4e0312463836ae4a7654cd
|
/scripts/parse_clips.py
|
53e87037c61c5bf93b054b174b47849445a9e548
|
[] |
no_license
|
intheory/web-app
|
e843ea066f7acc1be3377849e16cfe54df9b3acb
|
7680e3db1c1a1cbbbb7e1a9b8290ce4bf9cc49bb
|
refs/heads/master
| 2021-01-24T06:36:08.703649
| 2012-11-09T19:26:32
| 2012-11-09T19:26:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,214
|
py
|
'''
Created on 19 Sep 2012
@author: george
This module parses a txt file containing info about the hazard perception clips.
'''
import os
from mongoengine import connect
env = "ITENV" in os.environ and os.environ["ITENV"] or "dev"
if env=="prod":
parentdir = os.path.dirname(os.path.dirname(os.path.abspath("/www/virtualenv/intheory/src/app/model/content.py")))
connect("intheory_dev")
else:
parentdir = os.path.dirname(os.path.dirname(os.path.abspath("/home/george/intheoryenv/intheory/src/app/model/content.py")))
connect("intheory_dev")
os.sys.path.insert(0,parentdir)
from model.content import HazardPerceptionClip, HazardPoint
HazardPerceptionClip.drop_collection()
try:
file_path = os.path.expanduser("~/" + os.path.join("intheorydata", "content"))
f = open(file_path+"/clips.txt", "r")
while 1:
base_dir = f.readline().strip()
if len(base_dir) ==0:
break # EOF
clip = HazardPerceptionClip()
clip.base_dir = base_dir
clip.clip_name = f.readline().strip()
clip.solution_clip_name = f.readline().strip()
hp = HazardPoint()
hp.start = int(f.readline().strip())
hp.end = int(f.readline().strip())
clip.hazards.append(hp)
clip.save()
f.close()
except Exception, e:
print e
|
[
"basketballcy@gmail.com"
] |
basketballcy@gmail.com
|
55a4acdebc8d83005e9cc93dfef6fdaace57ff60
|
f6990693f6f8ac5c59cf7f2589f91ca53412ce74
|
/jenkins/urls.py
|
280f0ad9c3d934b892745aac3f98da2746890a41
|
[] |
no_license
|
daba0007/python-jenkins
|
3677ba2f176fe6d51fd408898c6342f2127ae689
|
fc5e93af96d77cde05b1559a45a75dee70ef8da8
|
refs/heads/master
| 2020-09-12T13:57:52.558410
| 2019-11-19T06:19:58
| 2019-11-19T06:19:58
| 222,446,458
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 597
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^getversion', views.getVersion, name='getversion'),
url(r'^getjoblist', views.getJobList, name='getjobList'),
url(r'^getconfig', views.getConfig, name='getconfig'),
url(r'^getjobstatus', views.getJobStatus, name='getjobStatus'),
url(r'^getbuildconsole', views.getBuildConsole, name='getbuildconsole'),
url(r'^getdownstream', views.getDownstream, name='getdownstream'),
url(r'^getupstream', views.getUpstream, name='getupstream')
]
|
[
"yld060631@163.com"
] |
yld060631@163.com
|
adb9a20f66c9656c23a420ee47e037f7a80d95f9
|
1fad3905860f078d076cac8ebd09fe1f619b77bd
|
/week01/assignment2/assignment2/spiders/maoyan.py
|
7fa4182967aa19db9011605207d862cb2aa0fa73
|
[] |
no_license
|
Farinosa/Python001-class01
|
7f832dfef1df7cf5a11b0909c7fb787789b94a99
|
0496e470246046cfa1d70aaeafbf3074a430d143
|
refs/heads/master
| 2022-12-07T06:04:16.303644
| 2020-09-05T17:25:36
| 2020-09-05T17:25:36
| 273,857,131
| 0
| 0
| null | 2020-06-21T07:36:06
| 2020-06-21T07:36:05
| null |
UTF-8
|
Python
| false
| false
| 1,264
|
py
|
# -*- coding: utf-8 -*-
import scrapy
from scrapy.selector import Selector
from assignment2.items import Assignment2Item
class MaoyanSpider(scrapy.Spider):
name = 'maoyan'
allowed_domains = ['maoyan.com']
start_urls = ['https://maoyan.com/films?showType=3']
def parse(self, response):
pipline_items = []
# select each movie
movies = Selector(response=response).xpath('//div[@class="movie-item film-channel"]')
for movie in movies[:10]:
movie_infos = movie.xpath('.//div[contains(@class,"movie-hover-title")]')
movie_title_selector = movie_infos[0].xpath('./@title')
movie_title = movie_title_selector.extract_first()
movie_type_selector = movie_infos[1].xpath('./text()')
movie_type = movie_type_selector.extract()[1].strip()
release_date_selector = movie_infos[3].xpath('./text()')
release_date = release_date_selector.extract()[1].strip()
# init new item for each movie
item = Assignment2Item()
item['movie_title'] = movie_title
item['movie_type'] = movie_type
item['release_date'] = release_date
pipline_items.append(item)
return pipline_items
|
[
"you@example.com575813104@qq.com"
] |
you@example.com575813104@qq.com
|
f6b460e2c17819c23b9ebca29e14420995355f2c
|
7a187cf8e86a0b72b864a189747891cf1cd5c0d5
|
/Final Project/report_email.py
|
eb5420ac58ad82a2afeb06ba3604745f0f9c4b97
|
[] |
no_license
|
Kirkkm/Google-Projects
|
743ca31bdc79b465e282be28147ae194245fda1e
|
268a68a80a23809d696fe966ea40274fec710c05
|
refs/heads/master
| 2022-11-15T08:18:36.482279
| 2020-06-30T19:21:29
| 2020-06-30T19:21:29
| 265,541,067
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,522
|
py
|
#!/usr/bin/env python3
import os
from datetime import date
import requests
from . import run, reports, emails
'''
Create another script named report_email.py to process supplier fruit description data from supplier-data/descriptions directory.
Use the following command to create report_email.py.
Import all the necessary libraries(os, datetime and reports) that will be used to process the text data from the
supplier-data/descriptions directory into the format below:
name: Apple
weight: 500 lbs
[blank line]
name: Avocado
weight: 200 lbs
[blank line]
...
Once you have completed this, call the main method which will process the data and call the generate_report method from the reports module
'''
def main():
# setting the PDF's file location of where it is going to live
report_attachment = "/tmp/processed.pdf"
# setting up the Report's Title
today = date.today()
report_title = "Processed Update on " + str(today) + "\n\n"
# this list will store all of the pdf parts in a specified order to be built later
report_body = []
# calls a method in run.py script to read the fruit data from the .txt files and turn it into JSON format
# if this method does not work then need to look at creating a REST GET request
fruit_data = run.read_data()
# iterates through the 2 dimensional dictionary to post each fruits data and image
for key, value in fruit_data.items():
report_body_name = value['name'] + "\n"
report_body_weight = value['weight'] + "\n\n"
report_body.append(report_body_name, report_body_weight)
# method's parameters: generate_report(self,attachment, title, paragraph)
# method to generate the pdf doc
reports.generate_report(report_attachment, report_title, report_body)
# method's parameters: generate_email(self, email_from, email_to, subject_line, body, attachment)
# method to generate the email that will be sent out
email_from = "automation@example.com"
email_to = "username@example.com"
email_subject = "Upload Completed - Online Fruit Store"
email_body = "All fruits are uploaded to our website successfully. A detailed list is attached to this email"
email = emails.generate_email(email_from, email_to, email_subject, email_body, report_attachment)
# method's parameters: generate_email(self, email_from, email_to, subject_line, body, attachment)
# method to send the generated email
emails.send_email(email)
if __name__ == "__main__":
main()
|
[
"matthewkk7@gmail.com"
] |
matthewkk7@gmail.com
|
c1a520252df4ebe42fe9f87657a7ec6d33bf15ad
|
15f66dc29c176891fe213f54469f8aa8e9b11172
|
/Emulation/autonetkit/plugins/graph_product.py
|
a3b571edc02c7c6a37d9a2d28cf3fce85fc36fce
|
[] |
no_license
|
wilko77/STRIP
|
24a707909d66d19f84ec8760798da45d50070e4d
|
6dc3867da249b4d7ea89286740f3eab5a3b8ee17
|
refs/heads/master
| 2020-05-20T04:00:57.051119
| 2013-10-01T06:24:57
| 2013-10-01T06:24:57
| 11,779,552
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,566
|
py
|
import networkx as nx
import autonetkit.ank_utils as ank_utils
import autonetkit.ank as ank
import os
import pprint
import autonetkit.log as log
import autonetkit.load.graphml
def expand(G_in):
""" Expands out graph products. G is the source "backbone" graph. H_x is the "PoP template" graphs
"""
graph_unwrapped = ank_utils.unwrap_graph(G_in)
G = graph_unwrapped.copy()
ank.set_node_default(G_in, G_in)
template_names = set(node.pop_template for node in G_in)
template_names.discard("None")
template_names.discard(None)
if not len(template_names):
log.debug("No PoP templates set")
return # no templates set
# Load these templates
templates = {}
for template in template_names:
template_filename = os.path.join("pop_templates", "%s.graphml" % template)
try:
pop_graph = autonetkit.load.graphml.load_graphml(template_filename) #TODO: pass in properties eg edge type = physical
except Exception, e:
log.warning("Unable to load pop template %s: %s" % (template, e))
return
pop_graph = pop_graph.to_undirected() # Undirected for now TODO: document this
templates[template] = pop_graph
# construct new graph
G_out = nx.Graph() #TODO: what about bidirectional graphs?
G_out.add_nodes_from(expand_nodes(G, templates))
G_out.add_edges_from(intra_pop_links(G, templates))
G_out.add_edges_from(inter_pop_links(G, templates))
for s, t in G_out.edges():
G_out[s][t]['type'] = 'physical' # ensure copied across
# Update properties based on co-ordinates
for node in G_out:
u, v = node
template = G.node[u]['pop_template']
u_properties = dict(G.node[u])
v_properties = dict(templates[template].node[v]) # create copy to append with
x = float(u_properties.get('x')) + float(v_properties.get('x'))
y = float(u_properties.get('y')) + float(v_properties.get('y'))
asn = u_properties['asn']
u_properties.update(v_properties)
u_properties['x'] = x
u_properties['y'] = y
u_properties['label'] = "%s_%s" % (v, u)
u_properties['id'] = "%s_%s" % (v, u)
u_properties['pop'] = u
u_properties['asn'] = asn # restore, don't inherit from pop
del u_properties['pop_template']
G_out.node[node] = u_properties
nx.relabel_nodes(G_out, dict( ((u, v), "%s_%s" % (v, u)) for (u, v) in G_out), copy = False)
#TODO: set edge_ids
for s, t in G_out.edges():
G_out[s][t]['edge_id'] = "%s_%s" % (s, t)
G_in._replace_graph(G_out)
return
#TODO: use "interpop" instead of "rooted"
def expand_nodes(G, templates):
# TODO: work out how to retain node attributes
return [ (u,v) for u in G for v in templates[G.node[u]['pop_template']] ]
def intra_pop_links(G, templates):
return [ ((u,v1), (u,v2)) for u in G for (v1, v2) in templates[G.node[u]['pop_template']].edges() ]
def inter_pop_links(G, templates, default_operator='cartesian'):
#TODO:: list any edges without operator marked on them
# for brevity, Hx refers to templatex
edges = []
cartesian_operators = set(["cartesian", "strong"])
tensor_operators = set(["tensor", "strong"])
for (u1, u2) in G.edges():
try:
operator = G[u1][u2]['operator']
except KeyError:
operator = default_operator
if operator == "None": # from Graphml
operator = default_operator
H1 = templates[G.node[u1]['pop_template']]
H2 = templates[G.node[u2]['pop_template']]
# Node lists - if 'root' set then only use root nodes
N1 = [n for n, d in H1.nodes(data=True) if 'interpop' in d and d['interpop']]
if not len(N1):
N1 = [n for n in H1] # no nodes marked interpop
N2 = [n for n, d in H2.nodes(data=True) if 'interpop' in d and d['interpop']]
if not len(N2):
N2 = [n for n in H2] # no nodes marked interpop
log.debug("Adding edges for (%s,%s) with operator %s" % (u1, u2, operator))
log.debug("H nodes for u1 %s: %s" % ( G.node[u1]['pop_template'], ", ".join(str(N1))))
log.debug("H nodes for u2 %s: %s" % ( G.node[u2]['pop_template'], ", ".join(str(N2))))
# 'root' not set
#TODO: fold rooted back into special case of cartesian - just do the same for now
if operator == 'rooted':
product_edges = [((u1, v1), (u2, v2)) for v1 in N1 for v2 in N2
if H1.node[v1].get("interpop") == H2.node[v2].get("interpop") == True ]
log.debug("Rooted product edges for (%s,%s): %s" % (u1, u2, product_edges))
edges += product_edges
if operator == 'lexical':
product_edges = [((u1, v1), (u2, v2)) for v1 in N1 for v2 in N2]
log.debug("Lexical product edges for (%s,%s): %s" % (u1, u2, product_edges))
edges += product_edges
if operator in cartesian_operators:
product_edges = [((u1, v1), (u2, v2)) for v1 in N1 for v2 in N2 if v1 == v2]
log.debug("Cartesian product edges for (%s,%s): %s" % (u1, u2, product_edges))
edges += product_edges
if operator in tensor_operators:
product_edges = [((u1, v1), (u2, v2)) for v1 in N1 for v2 in N2
if H1.has_edge(v1, v2) or H2.has_edge(v1,v2)]
log.debug("Tensor product edges for (%s,%s): %s" % (u1, u2, product_edges))
edges += product_edges
return edges
#TODO: What about edge ids?
|
[
"wilko.henecka@adelaide.edu.au"
] |
wilko.henecka@adelaide.edu.au
|
ca8251ca6103bd7ac332fc537961d3390d96ef1a
|
5ec4098d7d11ceb9b37feeb340e7815683864b19
|
/Timetable/server/backend/core/core_utils.py
|
4e0d591224d29f3339737eb8e04b130dcc25f6ba
|
[] |
no_license
|
sunyangkobe/Timetable
|
dfed21f717c0a93838c4c8f6bf86e9d743068122
|
79aac0ce48b9514cc1fa2b885f1c18cce87d85c8
|
refs/heads/master
| 2021-01-02T22:37:53.836555
| 2013-12-13T19:04:04
| 2013-12-13T19:04:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 375
|
py
|
__author__ = 'mimighostipad'
from django.core.cache import cache
AVATAR_PRE = "avatar_"
DEFAULT_AVATAR = "avatar_default"
def get_user_avatar_key(user):
return AVATAR_PRE + str(user.id)
def get_user_avatar(user):
avatar_key = get_user_avatar_key(user)
if cache.has_key(avatar_key):
return cache.get(avatar_key)
return cache.get(DEFAULT_AVATAR)
|
[
"yksun@cs.cmu.edu"
] |
yksun@cs.cmu.edu
|
a0a9f050439356ae3ab9c7b58a7ae193821ced2e
|
e3ca1ff203bd7a6d73dec1461999217ad2f34e85
|
/sana/api/.svn/text-base/widgets.py.svn-base
|
e9b8076203ff07926ba307893d05fdd1b6cbcf49
|
[
"BSD-3-Clause"
] |
permissive
|
satvikdhandhania/vit-11
|
1f8b2796f14e56c9a36d5c39d852c1ff344c42d7
|
e599f2b82a9194658c67bbd5c7e45f3b50d016da
|
refs/heads/master
| 2021-01-25T04:02:15.875370
| 2014-04-24T10:46:18
| 2014-04-24T10:46:18
| 19,178,144
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,837
|
'''Provides fields for api forms
@author: Sana Dev Team
Created on Jun 10, 2011
'''
from django.forms.widgets import Input, MultiWidget, TextInput, PasswordInput, mark_safe, force_unicode, flatatt
class JSONInput(Input):
input_type = 'hidden'
is_hidden = True
class BinaryWidget(MultiWidget):
''' Provides multi binary upload meta data with a dispatch '''
def __init__(self, attrs=None):
widgets = (TextInput(), TextInput(), TextInput(), TextInput() )
super(BinaryWidget, self).__init__(widgets, attrs=attrs)
def decompress(self, value):
'''
Returns the 'uid', 'client, 'size', 'content_type'
'''
if value:
return [value.uid, value.client, value.answer]
return [None, None, None ]
class ObservationWidget(MultiWidget):
''' Provides multi binary upload meta data with a dispatch '''
def __init__(self, attrs=None):
widget = (TextInput(), TextInput(), TextInput() )
super(ObservationWidget, self).__init__(widget, attrs=attrs)
def decompress(self, value):
'''
Returns the 'uid', 'client', 'dispatch', and 'binaries' attributes as a list
'''
if value:
return [value.concept, value.question, value.answer]
return [None, None, None ]
class EncounterWidget(MultiWidget):
''' Provides multi binary upload meta data with a dispatch '''
def __init__(self, attrs=None):
widget = (TextInput(), TextInput(), TextInput() )
super(EncounterWidget, self).__init__(widget, attrs=attrs)
def decompress(self, value):
'''
Returns the 'title', 'author', 'observations'
'''
if value:
return [value.title, value.author, value.observations]
return [None, None, None ]
class DispatchWidget(MultiWidget):
''' Represents the data sent with an api request from a client '''
def __init__(self, attrs=None):
widgets = (TextInput(attrs=attrs),
TextInput(attrs=attrs),
TextInput(attrs=attrs))
super(DispatchWidget, self).__init__(widgets, attrs)
def decompress(self, value):
'''
Returns the 'uid', 'client', and 'dispatch' attributes as a list
'''
if value:
return [value.uid, value.client, value.dispatch]
return [None, None, None, None]
def render(self, name, value, attrs=None):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(self._format_value(value))
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class LoginWidget(MultiWidget):
''' Username and password widget '''
def __init__(self, attrs=None):
widgets = (TextInput(attrs=attrs), PasswordInput(attrs=attrs))
super(DispatchWidget, self).__init__(widgets, attrs)
def decompress(self, value):
"""
Returns a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
if value:
return [value.username, value.password]
return [None, None]
def render(self, name, value, attrs=None):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(self._format_value(value))
return mark_safe(u'<input%s />' % flatatt(final_attrs))
|
[
"akshaydixi@gmail.com"
] |
akshaydixi@gmail.com
|
|
7910786e5c47a2f6784773af358697d67ebb761f
|
b30ec73f10f204c3fb7059c311283c43238275eb
|
/ss411/users/tests/test_models.py
|
ad365f91ba55cda427544104f90e4239c010c55b
|
[
"MIT"
] |
permissive
|
mbronstein/ss411
|
f8a5829db01eff90399da9acdd59264bc2f39db9
|
e901423e1fd16d4c414039083c6c7bc99b7e35ad
|
refs/heads/master
| 2023-03-24T08:47:44.344457
| 2021-03-03T22:13:34
| 2021-03-03T22:13:34
| 338,348,101
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 198
|
py
|
import pytest
from ss411.users.models import User
pytestmark = pytest.mark.django_db
def test_user_get_absolute_url(user: User):
assert user.get_absolute_url() == f"/users/{user.username}/"
|
[
"mark@bronsteinlaw.com"
] |
mark@bronsteinlaw.com
|
f3fee73504086589689015b432b61e3df713ce4c
|
cd72dbaf5ba1e250df76982e97830306cdb934de
|
/assignment1/assignment3.py
|
14b24b10e1c5d8f88b0600e8915a7c5ace93a0c8
|
[] |
no_license
|
Vinith69/collegeInternship
|
2dd8516bd73c76ac36de5f6ef30618b9d351ffa6
|
6f8beabd1b21d1d0632c5d7962c7d824e1fb89dc
|
refs/heads/master
| 2023-08-16T11:58:29.756082
| 2021-10-14T03:20:11
| 2021-10-14T03:20:11
| 416,967,982
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 238
|
py
|
n = int(input("Enter the size of array\n"))
if 1 < n < 11:
arr = map(int, input().split())
arr = list(set(list(arr)))
ar = len(arr)
arr = sorted(arr)
print(arr[ar-2])
else:
print("n size must be between 2 and 10")
|
[
"ramarock09@gmail.com"
] |
ramarock09@gmail.com
|
076a8187acddfb020c167468b814387208f56679
|
3a5c94dce25e38fc083619a2a0bf8a657b9afb8c
|
/vol_108/p10812.py
|
4c216f0874b42850ec067aa54142d74c4d6452df
|
[] |
no_license
|
tkoz0/problems-online-judge
|
a322e296fb14fdf4ca616352ead28895465879e9
|
3372c6fe75556a9dd3e6eb8b06ae65058290ab31
|
refs/heads/master
| 2022-07-23T01:20:57.380524
| 2022-07-10T07:15:56
| 2022-07-10T07:15:56
| 157,949,283
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 273
|
py
|
n = int(input())
for z in range(n):
s, d = map(int,input().split())
if (s+d)%2 != 0: print('impossible'); continue
small = s//2
large = small + s%2
small -= d//2
large += d//2
if small < 0: print('impossible'); continue
print(large,small)
|
[
"you@example.com"
] |
you@example.com
|
a8ca534232ac3e410b79c33d06e4dd2514b31e5a
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-roma/huaweicloudsdkroma/v2/model/update_special_throttling_configuration_v2_response.py
|
aa4225211245441353468dcae8e63dd285571101
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318
| 2023-08-31T08:28:59
| 2023-08-31T08:28:59
| 262,207,814
| 103
| 44
|
NOASSERTION
| 2023-06-22T14:50:48
| 2020-05-08T02:28:43
|
Python
|
UTF-8
|
Python
| false
| false
| 11,116
|
py
|
# coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UpdateSpecialThrottlingConfigurationV2Response(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'id': 'str',
'call_limits': 'int',
'apply_time': 'datetime',
'app_name': 'str',
'app_id': 'str',
'object_id': 'str',
'object_type': 'str',
'object_name': 'str',
'throttle_id': 'str'
}
attribute_map = {
'id': 'id',
'call_limits': 'call_limits',
'apply_time': 'apply_time',
'app_name': 'app_name',
'app_id': 'app_id',
'object_id': 'object_id',
'object_type': 'object_type',
'object_name': 'object_name',
'throttle_id': 'throttle_id'
}
def __init__(self, id=None, call_limits=None, apply_time=None, app_name=None, app_id=None, object_id=None, object_type=None, object_name=None, throttle_id=None):
"""UpdateSpecialThrottlingConfigurationV2Response
The model defined in huaweicloud sdk
:param id: 特殊配置的编号
:type id: str
:param call_limits: 特殊对象在流控时间内能够访问API的最大次数限制
:type call_limits: int
:param apply_time: 设置时间
:type apply_time: datetime
:param app_name: 作用的APP名称
:type app_name: str
:param app_id: 作用的APP编号
:type app_id: str
:param object_id: 特殊对象的身份标识
:type object_id: str
:param object_type: 特殊对象类型:APP、USER
:type object_type: str
:param object_name: [作用的APP或租户的名称](tag:hws;hws_hk;hcs;fcs;g42;)[作用的APP或租户ID](tag:Site)
:type object_name: str
:param throttle_id: 流控策略编号
:type throttle_id: str
"""
super(UpdateSpecialThrottlingConfigurationV2Response, self).__init__()
self._id = None
self._call_limits = None
self._apply_time = None
self._app_name = None
self._app_id = None
self._object_id = None
self._object_type = None
self._object_name = None
self._throttle_id = None
self.discriminator = None
if id is not None:
self.id = id
if call_limits is not None:
self.call_limits = call_limits
if apply_time is not None:
self.apply_time = apply_time
if app_name is not None:
self.app_name = app_name
if app_id is not None:
self.app_id = app_id
if object_id is not None:
self.object_id = object_id
if object_type is not None:
self.object_type = object_type
if object_name is not None:
self.object_name = object_name
if throttle_id is not None:
self.throttle_id = throttle_id
@property
def id(self):
"""Gets the id of this UpdateSpecialThrottlingConfigurationV2Response.
特殊配置的编号
:return: The id of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this UpdateSpecialThrottlingConfigurationV2Response.
特殊配置的编号
:param id: The id of this UpdateSpecialThrottlingConfigurationV2Response.
:type id: str
"""
self._id = id
@property
def call_limits(self):
"""Gets the call_limits of this UpdateSpecialThrottlingConfigurationV2Response.
特殊对象在流控时间内能够访问API的最大次数限制
:return: The call_limits of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: int
"""
return self._call_limits
@call_limits.setter
def call_limits(self, call_limits):
"""Sets the call_limits of this UpdateSpecialThrottlingConfigurationV2Response.
特殊对象在流控时间内能够访问API的最大次数限制
:param call_limits: The call_limits of this UpdateSpecialThrottlingConfigurationV2Response.
:type call_limits: int
"""
self._call_limits = call_limits
@property
def apply_time(self):
"""Gets the apply_time of this UpdateSpecialThrottlingConfigurationV2Response.
设置时间
:return: The apply_time of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: datetime
"""
return self._apply_time
@apply_time.setter
def apply_time(self, apply_time):
"""Sets the apply_time of this UpdateSpecialThrottlingConfigurationV2Response.
设置时间
:param apply_time: The apply_time of this UpdateSpecialThrottlingConfigurationV2Response.
:type apply_time: datetime
"""
self._apply_time = apply_time
@property
def app_name(self):
"""Gets the app_name of this UpdateSpecialThrottlingConfigurationV2Response.
作用的APP名称
:return: The app_name of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: str
"""
return self._app_name
@app_name.setter
def app_name(self, app_name):
"""Sets the app_name of this UpdateSpecialThrottlingConfigurationV2Response.
作用的APP名称
:param app_name: The app_name of this UpdateSpecialThrottlingConfigurationV2Response.
:type app_name: str
"""
self._app_name = app_name
@property
def app_id(self):
"""Gets the app_id of this UpdateSpecialThrottlingConfigurationV2Response.
作用的APP编号
:return: The app_id of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: str
"""
return self._app_id
@app_id.setter
def app_id(self, app_id):
"""Sets the app_id of this UpdateSpecialThrottlingConfigurationV2Response.
作用的APP编号
:param app_id: The app_id of this UpdateSpecialThrottlingConfigurationV2Response.
:type app_id: str
"""
self._app_id = app_id
@property
def object_id(self):
"""Gets the object_id of this UpdateSpecialThrottlingConfigurationV2Response.
特殊对象的身份标识
:return: The object_id of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: str
"""
return self._object_id
@object_id.setter
def object_id(self, object_id):
"""Sets the object_id of this UpdateSpecialThrottlingConfigurationV2Response.
特殊对象的身份标识
:param object_id: The object_id of this UpdateSpecialThrottlingConfigurationV2Response.
:type object_id: str
"""
self._object_id = object_id
@property
def object_type(self):
"""Gets the object_type of this UpdateSpecialThrottlingConfigurationV2Response.
特殊对象类型:APP、USER
:return: The object_type of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""Sets the object_type of this UpdateSpecialThrottlingConfigurationV2Response.
特殊对象类型:APP、USER
:param object_type: The object_type of this UpdateSpecialThrottlingConfigurationV2Response.
:type object_type: str
"""
self._object_type = object_type
@property
def object_name(self):
"""Gets the object_name of this UpdateSpecialThrottlingConfigurationV2Response.
[作用的APP或租户的名称](tag:hws;hws_hk;hcs;fcs;g42;)[作用的APP或租户ID](tag:Site)
:return: The object_name of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: str
"""
return self._object_name
@object_name.setter
def object_name(self, object_name):
"""Sets the object_name of this UpdateSpecialThrottlingConfigurationV2Response.
[作用的APP或租户的名称](tag:hws;hws_hk;hcs;fcs;g42;)[作用的APP或租户ID](tag:Site)
:param object_name: The object_name of this UpdateSpecialThrottlingConfigurationV2Response.
:type object_name: str
"""
self._object_name = object_name
@property
def throttle_id(self):
"""Gets the throttle_id of this UpdateSpecialThrottlingConfigurationV2Response.
流控策略编号
:return: The throttle_id of this UpdateSpecialThrottlingConfigurationV2Response.
:rtype: str
"""
return self._throttle_id
@throttle_id.setter
def throttle_id(self, throttle_id):
"""Sets the throttle_id of this UpdateSpecialThrottlingConfigurationV2Response.
流控策略编号
:param throttle_id: The throttle_id of this UpdateSpecialThrottlingConfigurationV2Response.
:type throttle_id: str
"""
self._throttle_id = throttle_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateSpecialThrottlingConfigurationV2Response):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"hwcloudsdk@huawei.com"
] |
hwcloudsdk@huawei.com
|
c6c5efb226333f27660bf83ad7d0642ec32736b3
|
69233f51c2beb492f273940756e19f41ece7f793
|
/nemo_text_processing/inverse_text_normalization/en/taggers/tokenize_and_classify.py
|
731e677d8ec9173657a53146d25485f205f82c08
|
[
"Apache-2.0"
] |
permissive
|
skgusrb12/NeMo
|
1e42f9a2799805ac0cc3b4fd14b1f199c26aed7d
|
dfb5bf5b741fdf3a2cad68f373cd42c74c600681
|
refs/heads/main
| 2023-06-28T17:09:39.975386
| 2021-08-02T17:49:54
| 2021-08-02T17:49:54
| 392,194,106
| 1
| 0
|
Apache-2.0
| 2021-08-03T04:55:53
| 2021-08-03T04:55:53
| null |
UTF-8
|
Python
| false
| false
| 4,261
|
py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
# Copyright 2015 and onwards Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nemo_text_processing.inverse_text_normalization.en.taggers.cardinal import CardinalFst
from nemo_text_processing.inverse_text_normalization.en.taggers.date import DateFst
from nemo_text_processing.inverse_text_normalization.en.taggers.decimal import DecimalFst
from nemo_text_processing.inverse_text_normalization.en.taggers.electronic import ElectronicFst
from nemo_text_processing.inverse_text_normalization.en.taggers.measure import MeasureFst
from nemo_text_processing.inverse_text_normalization.en.taggers.money import MoneyFst
from nemo_text_processing.inverse_text_normalization.en.taggers.ordinal import OrdinalFst
from nemo_text_processing.inverse_text_normalization.en.taggers.punctuation import PunctuationFst
from nemo_text_processing.inverse_text_normalization.en.taggers.telephone import TelephoneFst
from nemo_text_processing.inverse_text_normalization.en.taggers.time import TimeFst
from nemo_text_processing.inverse_text_normalization.en.taggers.whitelist import WhiteListFst
from nemo_text_processing.inverse_text_normalization.en.taggers.word import WordFst
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst, delete_extra_space, delete_space
try:
import pynini
from pynini.lib import pynutil
PYNINI_AVAILABLE = True
except (ModuleNotFoundError, ImportError):
PYNINI_AVAILABLE = False
class ClassifyFst(GraphFst):
"""
Final class that composes all other classification grammars. This class can process an entire sentence, that is lower cased.
For deployment, this grammar will be compiled and exported to OpenFst Finate State Archiv (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
"""
def __init__(self):
super().__init__(name="tokenize_and_classify", kind="classify")
cardinal = CardinalFst()
cardinal_graph = cardinal.fst
ordinal = OrdinalFst(cardinal)
ordinal_graph = ordinal.fst
decimal = DecimalFst(cardinal)
decimal_graph = decimal.fst
measure_graph = MeasureFst(cardinal=cardinal, decimal=decimal).fst
date_graph = DateFst(ordinal=ordinal).fst
word_graph = WordFst().fst
time_graph = TimeFst().fst
money_graph = MoneyFst(cardinal=cardinal, decimal=decimal).fst
whitelist_graph = WhiteListFst().fst
punct_graph = PunctuationFst().fst
electronic_graph = ElectronicFst().fst
telephone_graph = TelephoneFst().fst
classify = (
pynutil.add_weight(whitelist_graph, 1.01)
| pynutil.add_weight(time_graph, 1.1)
| pynutil.add_weight(date_graph, 1.09)
| pynutil.add_weight(decimal_graph, 1.1)
| pynutil.add_weight(measure_graph, 1.1)
| pynutil.add_weight(cardinal_graph, 1.1)
| pynutil.add_weight(ordinal_graph, 1.1)
| pynutil.add_weight(money_graph, 1.1)
| pynutil.add_weight(telephone_graph, 1.1)
| pynutil.add_weight(electronic_graph, 1.1)
| pynutil.add_weight(word_graph, 100)
)
punct = pynutil.insert("tokens { ") + pynutil.add_weight(punct_graph, weight=1.1) + pynutil.insert(" }")
token = pynutil.insert("tokens { ") + classify + pynutil.insert(" }")
token_plus_punct = (
pynini.closure(punct + pynutil.insert(" ")) + token + pynini.closure(pynutil.insert(" ") + punct)
)
graph = token_plus_punct + pynini.closure(delete_extra_space + token_plus_punct)
graph = delete_space + graph + delete_space
self.fst = graph.optimize()
|
[
"noreply@github.com"
] |
skgusrb12.noreply@github.com
|
2d757dfd5eb6b67f27916ee25bab7c274cc3e173
|
4cf5298b6670ed306033e10e7417a66dbc89953c
|
/jtheakst.py
|
f5b3f5dd9e86f3745cb744aed02b3d5bd8533a86
|
[] |
no_license
|
chiefspace/python_thinkful
|
c72331bfa10a26d85d098151785c5d3ab6d6f11e
|
c181d33f9447b6d21df6d340e13ee481884eb2bf
|
refs/heads/master
| 2020-12-24T06:57:21.475202
| 2016-06-30T13:50:33
| 2016-06-30T13:50:33
| 61,953,550
| 0
| 1
| null | 2016-06-30T13:48:18
| 2016-06-25T17:14:28
|
Python
|
UTF-8
|
Python
| false
| false
| 344
|
py
|
phone_book = {
"Sarah Hughes": "01234 567890",
"Tim Taylor": "02345 678901",
"Sam Smith": "03456 789012"
}
for key,val in phone_book.items():
print("{} = {}".format(key, val))
lookUp = "Jamie Theakston"
try:
phone_book[lookUp]
except KeyError:
print "Aparently, {} has an unlisted phone number".format(lookUp)
|
[
"ben@chiefspace.com"
] |
ben@chiefspace.com
|
3f889a3d9bfda4ea7f2372059485132b8d91da03
|
1a0bc226dfa6adfe4782c4dde3363ed4e8192540
|
/external_scripts/rank_metrics.py
|
e6620c41858c216f45054cad621b9e4f90937807
|
[] |
no_license
|
alexandervansomeren/msc_thesis
|
5decc0f4be9d47ff4f3146da9a078e8b16636692
|
49e7a853ebb678648607817d095e5ff280c4f79b
|
refs/heads/master
| 2021-09-16T00:04:04.308141
| 2018-06-13T11:18:53
| 2018-06-13T11:18:53
| 105,005,948
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,205
|
py
|
"""Information Retrieval metrics
Useful Resources:
http://www.cs.utexas.edu/~mooney/ir-course/slides/Evaluation.ppt
http://www.nii.ac.jp/TechReports/05-014E.pdf
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
http://hal.archives-ouvertes.fr/docs/00/72/67/60/PDF/07-busa-fekete.pdf
Learning to Rank for Information Retrieval (Tie-Yan Liu)
"""
import numpy as np
def mean_reciprocal_rank(rs):
"""Score is reciprocal of the rank of the first relevant item
First element is 'rank 1'. Relevance is binary (nonzero is relevant).
Example from http://en.wikipedia.org/wiki/Mean_reciprocal_rank
>>> rs = [[0, 0, 1], [0, 1, 0], [1, 0, 0]]
>>> mean_reciprocal_rank(rs)
0.61111111111111105
>>> rs = np.array([[0, 0, 0], [0, 1, 0], [1, 0, 0]])
>>> mean_reciprocal_rank(rs)
0.5
>>> rs = [[0, 0, 0, 1], [1, 0, 0], [1, 0, 0]]
>>> mean_reciprocal_rank(rs)
0.75
Args:
rs: Iterator of relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Mean reciprocal rank
"""
rs = (np.asarray(r).nonzero()[0] for r in rs)
return np.mean([1. / (r[0] + 1) if r.size else 0. for r in rs])
def r_precision(r):
"""Score is precision after all relevant documents have been retrieved
Relevance is binary (nonzero is relevant).
>>> r = [0, 0, 1]
>>> r_precision(r)
0.33333333333333331
>>> r = [0, 1, 0]
>>> r_precision(r)
0.5
>>> r = [1, 0, 0]
>>> r_precision(r)
1.0
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
R Precision
"""
r = np.asarray(r) != 0
z = r.nonzero()[0]
if not z.size:
return 0.
return np.mean(r[:z[-1] + 1])
def precision_at_k(r, k):
"""Score is precision @ k
Relevance is binary (nonzero is relevant).
>>> r = [0, 0, 1]
>>> precision_at_k(r, 1)
0.0
>>> precision_at_k(r, 2)
0.0
>>> precision_at_k(r, 3)
0.33333333333333331
>>> precision_at_k(r, 4)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
ValueError: Relevance score length < k
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Precision @ k
Raises:
ValueError: len(r) must be >= k
"""
assert k >= 1
r = np.asarray(r)[:k] != 0
if r.size != k:
raise ValueError('Relevance score length < k')
return np.mean(r)
def average_precision(r):
"""Score is average precision (area under PR curve)
Relevance is binary (nonzero is relevant).
>>> r = [1, 1, 0, 1, 0, 1, 0, 0, 0, 1]
>>> delta_r = 1. / sum(r)
>>> sum([sum(r[:x + 1]) / (x + 1.) * delta_r for x, y in enumerate(r) if y])
0.7833333333333333
>>> average_precision(r)
0.78333333333333333
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Average precision
"""
r = np.asarray(r) != 0
out = [precision_at_k(r, k + 1) for k in range(r.size) if r[k]]
if not out:
return 0.
return np.mean(out)
def mean_average_precision(rs):
"""Score is mean average precision
Relevance is binary (nonzero is relevant).
>>> rs = [[1, 1, 0, 1, 0, 1, 0, 0, 0, 1]]
>>> mean_average_precision(rs)
0.78333333333333333
>>> rs = [[1, 1, 0, 1, 0, 1, 0, 0, 0, 1], [0]]
>>> mean_average_precision(rs)
0.39166666666666666
Args:
rs: Iterator of relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Mean average precision
"""
return np.mean([average_precision(r) for r in rs])
def dcg_at_k(r, k, method=0):
"""Score is discounted cumulative gain (dcg)
Relevance is positive real values. Can use binary
as the previous methods.
Example from
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
>>> r = [3, 2, 3, 0, 0, 1, 2, 2, 3, 0]
>>> dcg_at_k(r, 1)
3.0
>>> dcg_at_k(r, 1, method=1)
3.0
>>> dcg_at_k(r, 2)
5.0
>>> dcg_at_k(r, 2, method=1)
4.2618595071429155
>>> dcg_at_k(r, 10)
9.6051177391888114
>>> dcg_at_k(r, 11)
9.6051177391888114
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
k: Number of results to consider
method: If 0 then weights are [1.0, 1.0, 0.6309, 0.5, 0.4307, ...]
If 1 then weights are [1.0, 0.6309, 0.5, 0.4307, ...]
Returns:
Discounted cumulative gain
"""
r = np.asfarray(r)[:k]
if r.size:
if method == 0:
return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1)))
elif method == 1:
return np.sum(r / np.log2(np.arange(2, r.size + 2)))
else:
raise ValueError('method must be 0 or 1.')
return 0.
def ndcg_at_k(r, k, method=0):
"""Score is normalized discounted cumulative gain (ndcg)
Relevance is positive real values. Can use binary
as the previous methods.
Example from
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
>>> r = [3, 2, 3, 0, 0, 1, 2, 2, 3, 0]
>>> ndcg_at_k(r, 1)
1.0
>>> r = [2, 1, 2, 0]
>>> ndcg_at_k(r, 4)
0.9203032077642922
>>> ndcg_at_k(r, 4, method=1)
0.96519546960144276
>>> ndcg_at_k([0], 1)
0.0
>>> ndcg_at_k([1], 2)
1.0
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
k: Number of results to consider
method: If 0 then weights are [1.0, 1.0, 0.6309, 0.5, 0.4307, ...]
If 1 then weights are [1.0, 0.6309, 0.5, 0.4307, ...]
Returns:
Normalized discounted cumulative gain
"""
dcg_max = dcg_at_k(sorted(r, reverse=True), k, method)
if not dcg_max:
return 0.
return dcg_at_k(r, k, method) / dcg_max
if __name__ == "__main__":
import doctest
doctest.testmod()
|
[
"alexander.vansomeren@gmail.com"
] |
alexander.vansomeren@gmail.com
|
982461cebcec680105d00ab0e89423333960302d
|
f415db347e2f3c92143439e382c28df61442de97
|
/beginner level/between even.py
|
0735b1cb6c338352404ef2e68d5ceccd534780a1
|
[] |
no_license
|
Maniramez/python-codes
|
dadd8d2272ae3c616892800ee551f4733a2b0ca4
|
6d866913b147cc0cd146f2da859e498a83acec2e
|
refs/heads/master
| 2021-09-13T12:56:40.769191
| 2018-04-30T04:48:04
| 2018-04-30T04:48:04
| 125,207,665
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 114
|
py
|
a=int(input("enter the value"))
b=int(input("enter the value"))
for i in range(a,b+1):
if(i%2==0):
print(i)
|
[
"noreply@github.com"
] |
Maniramez.noreply@github.com
|
760e5020cf69a86dafd45f9e8860eef44dcca520
|
3d1da50b7eff3bd38e2e398a63ed39a5d429cce9
|
/python-ai/python高级/06闭包装饰器_/15_类中call方法的使用.py
|
5325baaf4f635cddc88925ddf9da401e659f7557
|
[] |
no_license
|
wusanpi825117918/study
|
b10b3a646a66ff4f95daa98f8498d3ba78644bd2
|
ffdd358b95ed7b79a9cc11d46db9112a8664666d
|
refs/heads/master
| 2023-02-07T07:04:06.216477
| 2023-01-28T06:13:56
| 2023-01-28T06:13:56
| 224,645,256
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 151
|
py
|
# 定义一个类,实现__call__方法
class Check(object):
def __call__(self, *args, **kwargs):
print("我是call方法")
c = Check()
c()
|
[
"noreply@github.com"
] |
wusanpi825117918.noreply@github.com
|
f49ded29cbccc54c00c519ff5fd9283004c5cf81
|
c477588f2d0e924ee2a6d56b0784668a186ca340
|
/gtypes/gmessage_pass.py
|
cea41290fee4bacb11d4076cd3153dfedd041668
|
[] |
no_license
|
becharrens/MCProjectionTool
|
de09678d6ac8f7e3a610a541e8077b67e93ea21b
|
e1e64cf3b43f4c6ab6158d8724c4a94e2af21d6e
|
refs/heads/master
| 2023-03-05T17:57:35.424828
| 2020-09-29T13:27:14
| 2020-09-29T13:27:14
| 299,571,751
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,652
|
py
|
from typing import Set, Dict
import gtypes
from gtypes.gaction import GAction
from gtypes.gtype import GType
from ltypes.ltype import LType
from ltypes.lmessage_pass import LMessagePass
class GMessagePass(GType):
def __init__(self, action: GAction, cont: GType) -> None:
super().__init__()
self.action = action
self.cont = cont
def project(self, roles: Set[str]) -> Dict[str, LType]:
projections = self.cont.project(roles)
for role in roles:
local_action = self.action.project(role)
if local_action is not None:
projections[role] = LMessagePass(local_action, projections[role])
return projections
def first_actions(self, tvars: Set[str]) -> Set[GAction]:
return {self.action}
def set_rec_gtype(self, tvar: str, gtype: GType) -> None:
self.cont.set_rec_gtype(tvar, gtype)
def hash(self, tvars: Set[str]) -> int:
return (
self.action.__hash__() * gtypes.PRIME + self.cont.hash(tvars)
) % gtypes.HASH_SIZE
def to_string(self, indent: str) -> str:
return f"{indent}{self.action};\n{self.cont.to_string(indent)}"
def normalise(self):
self.cont: GType = self.cont.normalise()
return self
def has_rec_var(self, tvar: str) -> bool:
return self.cont.has_rec_var(tvar)
def __str__(self) -> str:
return self.to_string("")
def __eq__(self, o: object) -> bool:
if not isinstance(o, GMessagePass):
return False
return self.__hash__() == o.__hash__()
def __hash__(self) -> int:
return self.hash(set())
|
[
"bes.seb.98@gmail.com"
] |
bes.seb.98@gmail.com
|
6a6a056b69d8c1b0e499fe7b3da15e21acf1f8d9
|
e2045ea965ce2024c80d4d697384b1b5fddc27c8
|
/chess/chess.py
|
80a86801b51eea0e3231b08ed52222399ebd2c0a
|
[] |
no_license
|
AI-Factor-y/Games
|
c1e307b96017ae45a65b90cc200b40c206192387
|
7b7fcb92373ba2d25f228f340125f0643d36fb7e
|
refs/heads/main
| 2023-05-01T03:01:15.231519
| 2021-05-18T14:31:02
| 2021-05-18T14:31:02
| 368,551,074
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 55,327
|
py
|
import pygame
from copy import deepcopy
from network import Network
pygame.init()
menu=pygame.image.load("entrance.jpg")
globe=pygame.image.load("globe.png")
disp_width=900
disp_height=750
STAT_FONT = pygame.font.SysFont("comicsans",50)
stat_font = pygame.font.SysFont("comicsans",70)
stat_font2=pygame.font.SysFont("comicsans",100)
width_bet_sq=88
#global colors
white=(240,240,240)
black=(0,0,0)
blue=(0,0,255)
brown=(213,125,58)
red=(255,0,0)
gameDisplay=pygame.display.set_mode((disp_width,disp_height))
tile_black=pygame.image.load("background.jpg")
#pieces of white
b_bishop=pygame.image.load("blackBishop.png")
b_king=pygame.image.load("blackKing.png")
b_queen=pygame.image.load("blackQueen.png")
b_pawn=pygame.image.load("blackPawn.png")
b_rook=pygame.image.load("blackRook.png")
b_knight=pygame.image.load("blackKnight.png")
#pieces of black
w_bishop=pygame.image.load("whiteBishop.png")
w_king=pygame.image.load("whiteKing.png")
w_queen=pygame.image.load("whiteQueen.png")
w_pawn=pygame.image.load("whitePawn.png")
w_rook=pygame.image.load("whiteRook.png")
w_knight=pygame.image.load("whiteKnight.png")
back=pygame.image.load("back.jpeg")
frame=pygame.image.load("frame.png")
tile_white=pygame.image.load("tile.jpeg")
class board:
def __init__(self):
self.board_x=185
self.board_y=23
self.board_width=704
self.board_height=704
self.color=white
def draw_board(self):
back2=pygame.transform.scale(back,(900,750))
gameDisplay.blit(back2,(0,0,900,750))
frame2=pygame.transform.scale(frame,(self.board_height+110,self.board_width+70))
gameDisplay.blit(frame2,(self.board_x-40,self.board_y-40,self.board_height+110,self.board_width+70))
# pygame.draw.rect(gameDisplay,white,(self.board_x,self.board_y,self.board_height,self.board_width))
self.draw_checks()
def draw_checks(self):
color_alternate=1
# this is 700 pixel long 704/8=88 pixels for each square
check_width=88
check_height=88
start_check_x=self.board_x
start_check_y=self.board_y
for vert in range(8):
for hori in range(8):
if color_alternate==1:
check_color=white
else:
check_color=black
inc_x=check_width*hori
inc_y=check_height*vert
if check_color==white:
pygame.draw.rect(gameDisplay,check_color,(start_check_x+inc_x,start_check_y+inc_y,check_width,check_height))
elif check_color==black:
tile_black2=pygame.transform.scale(tile_black,(88,88))
gameDisplay.blit(tile_black2,(start_check_x+inc_x,start_check_y+inc_y,check_width,check_height))
color_alternate*=-1
color_alternate*=-1
chess_board=[[-2,-3,-4,-5,-6,-4,-3,-2],
[-1,-1,-1,-1,-1,-1,-1,-1],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[1,1,1,1,1,1,1,1],
[2,3,4,5,6,4,3,2]]
def conv_to_arr(pos):
return ((pos[0]-185)//88,(pos[1]-23)//88)
def conv_to_pixel(pos):
return ((185+pos[0]*88),(23+pos[1]*88))
def draw_rect(x,y,w,h,t,col):
col=col
pygame.draw.line(gameDisplay,col,(x,y),(x+w,y),t)
pygame.draw.line(gameDisplay,col,(x+w,y),(x+w,y+h),t)
pygame.draw.line(gameDisplay,col,(x+w,y+h),(x,y+h),t)
pygame.draw.line(gameDisplay,col,(x,y+h),(x,y),t)
def select_peice():
global chess_board,current_pos,turns
pos=pygame.mouse.get_pos()
arr_pos=conv_to_arr(pos)
pos=conv_to_pixel(arr_pos)
# print(arr_pos)
if arr_pos[0]>=0 and arr_pos[0]<8 and arr_pos[1]>=0 and arr_pos[1]<8:
if turns==1:
if pygame.mouse.get_pressed()[0] and (chess_board[arr_pos[1]][arr_pos[0]] >0):
draw_rect(pos[0],pos[1],width_bet_sq,width_bet_sq,4,(246,169,27))
current_pos=pos
if turns==-1:
if pygame.mouse.get_pressed()[0] and (chess_board[arr_pos[1]][arr_pos[0]] <0):
draw_rect(pos[0],pos[1],width_bet_sq,width_bet_sq,4,(246,169,27))
current_pos=pos
def mark_current_pos():
global current_pos
draw_rect(current_pos[0],current_pos[1],width_bet_sq,width_bet_sq,4,(0,0,255))
def draw_peices():
global chess_board
#actually internally the array id flipped by row and column rememebr to reflip the array while d
for i in range(8):
for j in range(8):
check_pos=chess_board[i][j]
if check_pos==1:
pixel_pos=conv_to_pixel((j,i))
w_pawn2=pygame.transform.scale(w_pawn,(88,88))
gameDisplay.blit(w_pawn2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==2:
pixel_pos=conv_to_pixel((j,i))
w_rook2=pygame.transform.scale(w_rook,(88,88))
gameDisplay.blit(w_rook2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==3:
pixel_pos=conv_to_pixel((j,i))
w_knight2=pygame.transform.scale(w_knight,(88,88))
gameDisplay.blit(w_knight2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==4:
pixel_pos=conv_to_pixel((j,i))
w_bishop2=pygame.transform.scale(w_bishop,(88,88))
gameDisplay.blit(w_bishop2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==5:
pixel_pos=conv_to_pixel((j,i))
w_queen2=pygame.transform.scale(w_queen,(88,88))
gameDisplay.blit(w_queen2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==6:
pixel_pos=conv_to_pixel((j,i))
w_king2=pygame.transform.scale(w_king,(88,88))
gameDisplay.blit(w_king2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==-1:
pixel_pos=conv_to_pixel((j,i))
b_pawn2=pygame.transform.scale(b_pawn,(88,88))
gameDisplay.blit(b_pawn2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==-2:
pixel_pos=conv_to_pixel((j,i))
b_rook2=pygame.transform.scale(b_rook,(88,88))
gameDisplay.blit(b_rook2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==-3:
pixel_pos=conv_to_pixel((j,i))
b_knight2=pygame.transform.scale(b_knight,(88,88))
gameDisplay.blit(b_knight2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==-4:
pixel_pos=conv_to_pixel((j,i))
b_bishop2=pygame.transform.scale(b_bishop,(88,88))
gameDisplay.blit(b_bishop2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==-5:
pixel_pos=conv_to_pixel((j,i))
b_queen2=pygame.transform.scale(b_queen,(88,88))
gameDisplay.blit(b_queen2,(pixel_pos[0],pixel_pos[1],88,88))
# pygame.draw.circle(gameDisplay,piece_color,(pixel_pos[0]+44,pixel_pos[1]+44),10)
if check_pos==-6:
pixel_pos=conv_to_pixel((j,i))
b_king2=pygame.transform.scale(b_king,(88,88))
gameDisplay.blit(b_king2,(pixel_pos[0],pixel_pos[1],88,88))
#showing moves
def pawn(use_curr_pos,p):
global current_pos,chess_board
if use_curr_pos==True:
pos=conv_to_arr(current_pos)
else:
pos=p
# print(pos)
avble_slots=[]
kill_slots=[]
try:
if chess_board[pos[1]][pos[0]]>0:
# print(pos)
if pos[1]==6:
if chess_board[pos[1]-1][pos[0]]==0:
avble_slots.append((pos[1]-1,pos[0]))
if chess_board[pos[1]-2][pos[0]]==0:
avble_slots.append((pos[1]-2,pos[0]))
if pos[0]-1>=0:
if chess_board[pos[1]-1][pos[0]-1]<0:
kill_slots.append((pos[1]-1,pos[0]-1))
if pos[0]+1<=7:
if chess_board[pos[1]-1][pos[0]+1]<0:
kill_slots.append((pos[1]-1,pos[0]+1))
if chess_board[pos[1]-1][pos[0]]==0:
# print((pos[1]-1,pos[0]))
avble_slots.append((pos[1]-1,pos[0]))
if chess_board[pos[1]][pos[0]]<0:
if pos[1]==1:
if chess_board[pos[1]+1][pos[0]]==0:
avble_slots.append((pos[1]+1,pos[0]))
if chess_board[pos[1]+2][pos[0]]==0:
avble_slots.append((pos[1]+2,pos[0]))
if pos[0]+1<=7:
if chess_board[pos[1]+1][pos[0]+1]>0:
kill_slots.append((pos[1]+1,pos[0]+1))
if pos[0]-1>=0:
if chess_board[pos[1]+1][pos[0]-1]>0:
kill_slots.append((pos[1]+1,pos[0]-1))
if chess_board[pos[1]+1][pos[0]]==0:
avble_slots.append((pos[1]+1,pos[0]))
except:
pass
return avble_slots,kill_slots
def rook(use_curr_pos,p):
global current_pos,chess_board
if use_curr_pos==True:
pos=conv_to_arr(current_pos)
else:
pos=p
# print(pos)
avble_slots=[]
kill_slots=[]
if chess_board[pos[1]][pos[0]]>0:
xl=1
while (pos[0]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]][pos[0]-xl]<0:
kill_slots.append((pos[1],pos[0]-xl))
break
if chess_board[pos[1]][pos[0]-xl]==0:
avble_slots.append((pos[1],pos[0]-xl))
else:
break
xl+=1
xl=1
while pos[0]+xl<8:
# print(pos[0]+xl)
if chess_board[pos[1]][pos[0]+xl]<0:
kill_slots.append((pos[1],pos[0]+xl))
break
if chess_board[pos[1]][pos[0]+xl]==0:
avble_slots.append((pos[1],pos[0]+xl))
else:
break
xl+=1
yl=1
while pos[1]-yl>-1:
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]]<0:
kill_slots.append((pos[1]-yl,pos[0]))
break
if chess_board[pos[1]-yl][pos[0]]==0:
avble_slots.append((pos[1]-yl,pos[0]))
else:
break
yl+=1
yl=1
while pos[1]+yl<8:
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]]<0:
kill_slots.append((pos[1]+yl,pos[0]))
break
if chess_board[pos[1]+yl][pos[0]]==0:
avble_slots.append((pos[1]+yl,pos[0]))
else:
break
yl+=1
else:
xl=1
while (pos[0]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]][pos[0]-xl]>0:
kill_slots.append((pos[1],pos[0]-xl))
break
if chess_board[pos[1]][pos[0]-xl]==0:
avble_slots.append((pos[1],pos[0]-xl))
else:
break
xl+=1
xl=1
while pos[0]+xl<8:
# print(pos[0]+xl)
if chess_board[pos[1]][pos[0]+xl]>0:
kill_slots.append((pos[1],pos[0]+xl))
break
if chess_board[pos[1]][pos[0]+xl]==0:
avble_slots.append((pos[1],pos[0]+xl))
else:
break
xl+=1
yl=1
while pos[1]-yl>-1:
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]]>0:
kill_slots.append((pos[1]-yl,pos[0]))
break
if chess_board[pos[1]-yl][pos[0]]==0:
avble_slots.append((pos[1]-yl,pos[0]))
else:
break
yl+=1
yl=1
while pos[1]+yl<8:
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]]>0:
kill_slots.append((pos[1]+yl,pos[0]))
break
if chess_board[pos[1]+yl][pos[0]]==0:
avble_slots.append((pos[1]+yl,pos[0]))
else:
break
yl+=1
return avble_slots,kill_slots
def knight(use_curr_pos,p):
global current_pos,chess_board
if use_curr_pos==True:
pos=conv_to_arr(current_pos)
else:
pos=p
# print(pos)
avble_slots=[]
kill_slots=[]
#case 1 2 hori right 1 vert , up
if chess_board[pos[1]][pos[0]]>0:
if pos[1]+1<=7 and pos[0]+2<=7:
if chess_board[pos[1]+1][pos[0]+2]==0:
avble_slots.append((pos[1]+1,pos[0]+2))
if chess_board[pos[1]+1][pos[0]+2]<0:
kill_slots.append((pos[1]+1,pos[0]+2))
#case 2 2hori right 1 vert down
if pos[1]-1>=0 and pos[0]+2<=7:
if chess_board[pos[1]-1][pos[0]+2]==0:
avble_slots.append((pos[1]-1,pos[0]+2))
if chess_board[pos[1]-1][pos[0]+2]<0:
kill_slots.append((pos[1]-1,pos[0]+2))
# case 3 2 hori left 1 vert up
if pos[1]+1<=7 and pos[0]-2>=0:
if chess_board[pos[1]+1][pos[0]-2]==0:
avble_slots.append((pos[1]+1,pos[0]-2))
if chess_board[pos[1]+1][pos[0]-2]<0:
kill_slots.append((pos[1]+1,pos[0]-2))
#case 4 2 hori left 1 vert down
if pos[1]-1>=0 and pos[0]-2>=0:
if chess_board[pos[1]-1][pos[0]-2]==0:
avble_slots.append((pos[1]-1,pos[0]-2))
if chess_board[pos[1]-1][pos[0]-2]<0:
kill_slots.append((pos[1]-1,pos[0]-2))
#case 5 2 vert up 1 hori right
if pos[1]+2<=7 and pos[0]+1<=7:
if chess_board[pos[1]+2][pos[0]+1]==0:
avble_slots.append((pos[1]+2,pos[0]+1))
if chess_board[pos[1]+2][pos[0]+1]<0:
kill_slots.append((pos[1]+2,pos[0]+1))
#case 6 2 vert up 1 hori left
if pos[1]+2<=7 and pos[0]-1>=0:
if chess_board[pos[1]+2][pos[0]-1]==0:
avble_slots.append((pos[1]+2,pos[0]-1))
if chess_board[pos[1]+2][pos[0]-1]<0:
kill_slots.append((pos[1]+2,pos[0]-1))
#case 7 2 vert down 1 hori right
if pos[1]-2>=0 and pos[0]+1<=7:
if chess_board[pos[1]-2][pos[0]+1]==0:
avble_slots.append((pos[1]-2,pos[0]+1))
if chess_board[pos[1]-2][pos[0]+1]<0:
kill_slots.append((pos[1]-2,pos[0]+1))
#case 8 2 vert down 1 hori left
if pos[1]-2>=0 and pos[0]-1>=0:
if chess_board[pos[1]-2][pos[0]-1]==0:
avble_slots.append((pos[1]-2,pos[0]-1))
if chess_board[pos[1]-2][pos[0]-1]<0:
kill_slots.append((pos[1]-2,pos[0]-1))
else:
if pos[1]+1<=7 and pos[0]+2<=7:
if chess_board[pos[1]+1][pos[0]+2]==0:
avble_slots.append((pos[1]+1,pos[0]+2))
if chess_board[pos[1]+1][pos[0]+2]>0:
kill_slots.append((pos[1]+1,pos[0]+2))
#case 2 2hori right 1 vert down
if pos[1]-1>=0 and pos[0]+2<=7:
if chess_board[pos[1]-1][pos[0]+2]==0:
avble_slots.append((pos[1]-1,pos[0]+2))
if chess_board[pos[1]-1][pos[0]+2]>0:
kill_slots.append((pos[1]-1,pos[0]+2))
# case 3 2 hori left 1 vert up
if pos[1]+1<=7 and pos[0]-2>=0:
if chess_board[pos[1]+1][pos[0]-2]==0:
avble_slots.append((pos[1]+1,pos[0]-2))
if chess_board[pos[1]+1][pos[0]-2]>0:
kill_slots.append((pos[1]+1,pos[0]-2))
#case 4 2 hori left 1 vert down
if pos[1]-1>=0 and pos[0]-2>=0:
if chess_board[pos[1]-1][pos[0]-2]==0:
avble_slots.append((pos[1]-1,pos[0]-2))
if chess_board[pos[1]-1][pos[0]-2]>0:
kill_slots.append((pos[1]-1,pos[0]-2))
#case 5 2 vert up 1 hori right
if pos[1]+2<=7 and pos[0]+1<=7:
if chess_board[pos[1]+2][pos[0]+1]==0:
avble_slots.append((pos[1]+2,pos[0]+1))
if chess_board[pos[1]+2][pos[0]+1]>0:
kill_slots.append((pos[1]+2,pos[0]+1))
#case 6 2 vert up 1 hori left
if pos[1]+2<=7 and pos[0]-1>=0:
if chess_board[pos[1]+2][pos[0]-1]==0:
avble_slots.append((pos[1]+2,pos[0]-1))
if chess_board[pos[1]+2][pos[0]-1]>0:
kill_slots.append((pos[1]+2,pos[0]-1))
#case 7 2 vert down 1 hori right
if pos[1]-2>=0 and pos[0]+1<=7:
if chess_board[pos[1]-2][pos[0]+1]==0:
avble_slots.append((pos[1]-2,pos[0]+1))
if chess_board[pos[1]-2][pos[0]+1]>0:
kill_slots.append((pos[1]-2,pos[0]+1))
#case 8 2 vert down 1 hori left
if pos[1]-2>=0 and pos[0]-1>=0:
if chess_board[pos[1]-2][pos[0]-1]==0:
avble_slots.append((pos[1]-2,pos[0]-1))
if chess_board[pos[1]-2][pos[0]-1]>0:
kill_slots.append((pos[1]-2,pos[0]-1))
return avble_slots,kill_slots
def bishop(use_curr_pos,p):
global current_pos,chess_board
if use_curr_pos==True:
pos=conv_to_arr(current_pos)
else:
pos=p
# print(pos)
avble_slots=[]
kill_slots=[]
if chess_board[pos[1]][pos[0]]>0:
xl=1
while (pos[0]-xl)>-1 and (pos[1]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]-xl][pos[0]-xl]<0:
kill_slots.append((pos[1]-xl,pos[0]-xl))
break
if chess_board[pos[1]-xl][pos[0]-xl]==0:
avble_slots.append((pos[1]-xl,pos[0]-xl))
else:
break
xl+=1
xl=1
while (pos[0]+xl<8) and (pos[1]+xl<8):
# print(pos[0]+xl)
if chess_board[pos[1]+xl][pos[0]+xl]<0:
kill_slots.append((pos[1]+xl,pos[0]+xl))
break
if chess_board[pos[1]+xl][pos[0]+xl]==0:
avble_slots.append((pos[1]+xl,pos[0]+xl))
else:
break
xl+=1
xl=1
yl=1
while (pos[1]-yl>-1) and (pos[0]+yl<8):
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]+yl]<0:
kill_slots.append((pos[1]-yl,pos[0]+yl))
break
if chess_board[pos[1]-yl][pos[0]+yl]==0:
avble_slots.append((pos[1]-yl,pos[0]+yl))
else:
break
yl+=1
yl=1
while (pos[1]+yl<8) and (pos[0]-yl>-1):
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]-yl]<0:
kill_slots.append((pos[1]+yl,pos[0]-yl))
break
if chess_board[pos[1]+yl][pos[0]-yl]==0:
avble_slots.append((pos[1]+yl,pos[0]-yl))
else:
break
yl+=1
else:
xl=1
while (pos[0]-xl)>-1 and (pos[1]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]-xl][pos[0]-xl]>0:
kill_slots.append((pos[1]-xl,pos[0]-xl))
break
if chess_board[pos[1]-xl][pos[0]-xl]==0:
avble_slots.append((pos[1]-xl,pos[0]-xl))
else:
break
xl+=1
xl=1
while (pos[0]+xl<8) and (pos[1]+xl<8):
# print(pos[0]+xl)
if chess_board[pos[1]+xl][pos[0]+xl]>0:
kill_slots.append((pos[1]+xl,pos[0]+xl))
break
if chess_board[pos[1]+xl][pos[0]+xl]==0:
avble_slots.append((pos[1]+xl,pos[0]+xl))
else:
break
xl+=1
xl=1
yl=1
while (pos[1]-yl>-1) and (pos[0]+yl<8):
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]+yl]>0:
kill_slots.append((pos[1]-yl,pos[0]+yl))
break
if chess_board[pos[1]-yl][pos[0]+yl]==0:
avble_slots.append((pos[1]-yl,pos[0]+yl))
else:
break
yl+=1
yl=1
while (pos[1]+yl<8) and (pos[0]-yl>-1):
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]-yl]>0:
kill_slots.append((pos[1]+yl,pos[0]-yl))
break
if chess_board[pos[1]+yl][pos[0]-yl]==0:
avble_slots.append((pos[1]+yl,pos[0]-yl))
else:
break
yl+=1
return avble_slots,kill_slots
def queen(use_curr_pos,p):
global current_pos,chess_board
if use_curr_pos==True:
pos=conv_to_arr(current_pos)
else:
pos=p
# print(pos)
avble_slots=[]
kill_slots=[]
if chess_board[pos[1]][pos[0]]>0:
xl=1
while (pos[0]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]][pos[0]-xl]<0:
kill_slots.append((pos[1],pos[0]-xl))
break
if chess_board[pos[1]][pos[0]-xl]==0:
avble_slots.append((pos[1],pos[0]-xl))
else:
break
xl+=1
xl=1
while pos[0]+xl<8:
# print(pos[0]+xl)
if chess_board[pos[1]][pos[0]+xl]<0:
kill_slots.append((pos[1],pos[0]+xl))
break
if chess_board[pos[1]][pos[0]+xl]==0:
avble_slots.append((pos[1],pos[0]+xl))
else:
break
xl+=1
yl=1
while pos[1]-yl>-1:
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]]<0:
kill_slots.append((pos[1]-yl,pos[0]))
break
if chess_board[pos[1]-yl][pos[0]]==0:
avble_slots.append((pos[1]-yl,pos[0]))
else:
break
yl+=1
yl=1
while pos[1]+yl<8:
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]]<0:
kill_slots.append((pos[1]+yl,pos[0]))
break
if chess_board[pos[1]+yl][pos[0]]==0:
avble_slots.append((pos[1]+yl,pos[0]))
else:
break
yl+=1
else:
xl=1
while (pos[0]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]][pos[0]-xl]>0:
kill_slots.append((pos[1],pos[0]-xl))
break
if chess_board[pos[1]][pos[0]-xl]==0:
avble_slots.append((pos[1],pos[0]-xl))
else:
break
xl+=1
xl=1
while pos[0]+xl<8:
# print(pos[0]+xl)
if chess_board[pos[1]][pos[0]+xl]>0:
kill_slots.append((pos[1],pos[0]+xl))
break
if chess_board[pos[1]][pos[0]+xl]==0:
avble_slots.append((pos[1],pos[0]+xl))
else:
break
xl+=1
yl=1
while pos[1]-yl>-1:
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]]>0:
kill_slots.append((pos[1]-yl,pos[0]))
break
if chess_board[pos[1]-yl][pos[0]]==0:
avble_slots.append((pos[1]-yl,pos[0]))
else:
break
yl+=1
yl=1
while pos[1]+yl<8:
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]]>0:
kill_slots.append((pos[1]+yl,pos[0]))
break
if chess_board[pos[1]+yl][pos[0]]==0:
avble_slots.append((pos[1]+yl,pos[0]))
else:
break
yl+=1
if chess_board[pos[1]][pos[0]]>0:
xl=1
while (pos[0]-xl)>-1 and (pos[1]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]-xl][pos[0]-xl]<0:
kill_slots.append((pos[1]-xl,pos[0]-xl))
break
if chess_board[pos[1]-xl][pos[0]-xl]==0:
avble_slots.append((pos[1]-xl,pos[0]-xl))
else:
break
xl+=1
xl=1
while (pos[0]+xl<8) and (pos[1]+xl<8):
# print(pos[0]+xl)
if chess_board[pos[1]+xl][pos[0]+xl]<0:
kill_slots.append((pos[1]+xl,pos[0]+xl))
break
if chess_board[pos[1]+xl][pos[0]+xl]==0:
avble_slots.append((pos[1]+xl,pos[0]+xl))
else:
break
xl+=1
yl=1
while (pos[1]-yl>-1) and (pos[0]+yl<8):
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]+yl]<0:
kill_slots.append((pos[1]-yl,pos[0]+yl))
break
if chess_board[pos[1]-yl][pos[0]+yl]==0:
avble_slots.append((pos[1]-yl,pos[0]+yl))
else:
break
yl+=1
yl=1
while (pos[1]+yl<8) and (pos[0]-yl>-1):
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]-yl]<0:
kill_slots.append((pos[1]+yl,pos[0]-yl))
break
if chess_board[pos[1]+yl][pos[0]-yl]==0:
avble_slots.append((pos[1]+yl,pos[0]-yl))
else:
break
yl+=1
else:
xl=1
while (pos[0]-xl)>-1 and (pos[1]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]-xl][pos[0]-xl]>0:
kill_slots.append((pos[1]-xl,pos[0]-xl))
break
if chess_board[pos[1]-xl][pos[0]-xl]==0:
avble_slots.append((pos[1]-xl,pos[0]-xl))
else:
break
xl+=1
xl=1
while (pos[0]+xl<8) and (pos[1]+xl<8):
# print(pos[0]+xl)
if chess_board[pos[1]+xl][pos[0]+xl]>0:
kill_slots.append((pos[1]+xl,pos[0]+xl))
break
if chess_board[pos[1]+xl][pos[0]+xl]==0:
avble_slots.append((pos[1]+xl,pos[0]+xl))
else:
break
xl+=1
yl=1
while (pos[1]-yl>-1) and (pos[0]+yl<8):
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]+yl]>0:
kill_slots.append((pos[1]-yl,pos[0]+yl))
break
if chess_board[pos[1]-yl][pos[0]+yl]==0:
avble_slots.append((pos[1]-yl,pos[0]+yl))
else:
break
yl+=1
yl=1
while (pos[1]+yl<8) and (pos[0]-yl>-1):
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]-yl]>0:
kill_slots.append((pos[1]+yl,pos[0]-yl))
break
if chess_board[pos[1]+yl][pos[0]-yl]==0:
avble_slots.append((pos[1]+yl,pos[0]-yl))
else:
break
yl+=1
return avble_slots,kill_slots
def king(use_curr_pos,p):
global current_pos,chess_board
if use_curr_pos==True:
pos=conv_to_arr(current_pos)
else:
pos=p
# print(pos)
avble_slots=[]
kill_slots=[]
if chess_board[pos[1]][pos[0]]>0:
if pos[1]-1>=0:
if chess_board[pos[1]-1][pos[0]]<0:
kill_slots.append((pos[1]-1,pos[0]))
if chess_board[pos[1]-1][pos[0]]==0:
avble_slots.append((pos[1]-1,pos[0]))
if pos[1]+1<8:
if chess_board[pos[1]+1][pos[0]]<0:
kill_slots.append((pos[1]+1,pos[0]))
if chess_board[pos[1]+1][pos[0]]==0:
avble_slots.append((pos[1]+1,pos[0]))
if pos[0]-1>=0:
if chess_board[pos[1]][pos[0]-1]<0:
kill_slots.append((pos[1],pos[0]-1))
if chess_board[pos[1]][pos[0]-1]==0:
avble_slots.append((pos[1],pos[0]-1))
if pos[0]+1<8:
if chess_board[pos[1]][pos[0]+1]<0:
kill_slots.append((pos[1],pos[0]+1))
if chess_board[pos[1]][pos[0]+1]==0:
avble_slots.append((pos[1],pos[0]+1))
if pos[1]-1>=0 and pos[0]-1>=0:
if chess_board[pos[1]-1][pos[0]-1]<0:
kill_slots.append((pos[1]-1,pos[0]-1))
if chess_board[pos[1]-1][pos[0]-1]==0:
avble_slots.append((pos[1]-1,pos[0]-1))
if pos[1]-1>=0 and pos[0]+1<8:
if chess_board[pos[1]-1][pos[0]+1]<0:
kill_slots.append((pos[1]-1,pos[0]+1))
if chess_board[pos[1]-1][pos[0]+1]==0:
avble_slots.append((pos[1]-1,pos[0]+1))
if pos[1]+1<8 and pos[1]-1>=0:
if chess_board[pos[1]+1][pos[0]-1]<0:
kill_slots.append((pos[1]+1,pos[0]-1))
if chess_board[pos[1]+1][pos[0]-1]==0:
avble_slots.append((pos[1]+1,pos[0]-1))
if pos[1]+1<8 and pos[0]+1>=0:
if chess_board[pos[1]+1][pos[0]+1]<0:
kill_slots.append((pos[1]+1,pos[0]+1))
if chess_board[pos[1]+1][pos[0]+1]==0:
avble_slots.append((pos[1]+1,pos[0]+1))
else:
if pos[1]-1>=0:
if chess_board[pos[1]-1][pos[0]]>0:
kill_slots.append((pos[1]-1,pos[0]))
if chess_board[pos[1]-1][pos[0]]==0:
avble_slots.append((pos[1]-1,pos[0]))
if pos[1]+1<8:
if chess_board[pos[1]+1][pos[0]]>0:
kill_slots.append((pos[1]+1,pos[0]))
if chess_board[pos[1]+1][pos[0]]==0:
avble_slots.append((pos[1]+1,pos[0]))
if pos[0]-1>=0:
if chess_board[pos[1]][pos[0]-1]>0:
kill_slots.append((pos[1],pos[0]-1))
if chess_board[pos[1]][pos[0]-1]==0:
avble_slots.append((pos[1],pos[0]-1))
if pos[0]+1<8:
if chess_board[pos[1]][pos[0]+1]>0:
kill_slots.append((pos[1],pos[0]+1))
if chess_board[pos[1]][pos[0]+1]==0:
avble_slots.append((pos[1],pos[0]+1))
if pos[1]-1>=0 and pos[0]-1>=0:
if chess_board[pos[1]-1][pos[0]-1]>0:
kill_slots.append((pos[1]-1,pos[0]-1))
if chess_board[pos[1]-1][pos[0]-1]==0:
avble_slots.append((pos[1]-1,pos[0]-1))
if pos[1]-1>=0 and pos[0]+1<8:
if chess_board[pos[1]-1][pos[0]+1]>0:
kill_slots.append((pos[1]-1,pos[0]+1))
if chess_board[pos[1]-1][pos[0]+1]==0:
avble_slots.append((pos[1]-1,pos[0]+1))
if pos[1]+1<8 and pos[0]-1>=0:
if chess_board[pos[1]+1][pos[0]-1]>0:
kill_slots.append((pos[1]+1,pos[0]-1))
if chess_board[pos[1]+1][pos[0]-1]==0:
avble_slots.append((pos[1]+1,pos[0]-1))
if pos[1]+1<8 and pos[0]+1<8:
if chess_board[pos[1]+1][pos[0]+1]>0:
kill_slots.append((pos[1]+1,pos[0]+1))
if chess_board[pos[1]+1][pos[0]+1]==0:
avble_slots.append((pos[1]+1,pos[0]+1))
return avble_slots,kill_slots
def determine_check_1():
global current_pos,chess_board
# print(pos)
for i in range(8):
for j in range(8):
if chess_board[i][j]==6:
pos=(j,i)
avble_slots=[]
kill_slots=[]
if chess_board[pos[1]][pos[0]]>0:
xl=1
while (pos[0]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]][pos[0]-xl]<0:
kill_slots.append((pos[1],pos[0]-xl))
break
if chess_board[pos[1]][pos[0]-xl]==0:
avble_slots.append((pos[1],pos[0]-xl))
else:
break
xl+=1
xl=1
while pos[0]+xl<8:
# print(pos[0]+xl)
if chess_board[pos[1]][pos[0]+xl]<0:
kill_slots.append((pos[1],pos[0]+xl))
break
if chess_board[pos[1]][pos[0]+xl]==0:
avble_slots.append((pos[1],pos[0]+xl))
else:
break
xl+=1
yl=1
while pos[1]-yl>-1:
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]]<0:
kill_slots.append((pos[1]-yl,pos[0]))
break
if chess_board[pos[1]-yl][pos[0]]==0:
avble_slots.append((pos[1]-yl,pos[0]))
else:
break
yl+=1
yl=1
while pos[1]+yl<8:
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]]<0:
kill_slots.append((pos[1]+yl,pos[0]))
break
if chess_board[pos[1]+yl][pos[0]]==0:
avble_slots.append((pos[1]+yl,pos[0]))
else:
break
yl+=1
if chess_board[pos[1]][pos[0]]>0:
xl=1
while (pos[0]-xl)>-1 and (pos[1]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]-xl][pos[0]-xl]<0:
kill_slots.append((pos[1]-xl,pos[0]-xl))
break
if chess_board[pos[1]-xl][pos[0]-xl]==0:
avble_slots.append((pos[1]-xl,pos[0]-xl))
else:
break
xl+=1
xl=1
while (pos[0]+xl<8) and (pos[1]+xl<8):
# print(pos[0]+xl)
if chess_board[pos[1]+xl][pos[0]+xl]<0:
kill_slots.append((pos[1]+xl,pos[0]+xl))
break
if chess_board[pos[1]+xl][pos[0]+xl]==0:
avble_slots.append((pos[1]+xl,pos[0]+xl))
else:
break
xl+=1
yl=1
while (pos[1]-yl>-1) and (pos[0]+yl<8):
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]+yl]<0:
kill_slots.append((pos[1]-yl,pos[0]+yl))
break
if chess_board[pos[1]-yl][pos[0]+yl]==0:
avble_slots.append((pos[1]-yl,pos[0]+yl))
else:
break
yl+=1
yl=1
while (pos[1]+yl<8) and (pos[0]-yl>-1):
# print(pos[0]+yl)
# print("hoooo")
if chess_board[pos[1]+yl][pos[0]-yl]<0:
kill_slots.append((pos[1]+yl,pos[0]-yl))
break
if chess_board[pos[1]+yl][pos[0]-yl]==0:
avble_slots.append((pos[1]+yl,pos[0]-yl))
else:
break
yl+=1
#case 1 2 hori right 1 vert , up
if chess_board[pos[1]][pos[0]]>0:
if pos[1]+1<=7 and pos[0]+2<=7:
if chess_board[pos[1]+1][pos[0]+2]==0:
avble_slots.append((pos[1]+1,pos[0]+2))
if chess_board[pos[1]+1][pos[0]+2]<0:
kill_slots.append((pos[1]+1,pos[0]+2))
#case 2 2hori right 1 vert down
if pos[1]-1>=0 and pos[0]+2<=7:
if chess_board[pos[1]-1][pos[0]+2]==0:
avble_slots.append((pos[1]-1,pos[0]+2))
if chess_board[pos[1]-1][pos[0]+2]<0:
kill_slots.append((pos[1]-1,pos[0]+2))
# case 3 2 hori left 1 vert up
if pos[1]+1<=7 and pos[0]-2>=0:
if chess_board[pos[1]+1][pos[0]-2]==0:
avble_slots.append((pos[1]+1,pos[0]-2))
if chess_board[pos[1]+1][pos[0]-2]<0:
kill_slots.append((pos[1]+1,pos[0]-2))
#case 4 2 hori left 1 vert down
if pos[1]-1>=0 and pos[0]-2>=0:
if chess_board[pos[1]-1][pos[0]-2]==0:
avble_slots.append((pos[1]-1,pos[0]-2))
if chess_board[pos[1]-1][pos[0]-2]<0:
kill_slots.append((pos[1]-1,pos[0]-2))
#case 5 2 vert up 1 hori right
if pos[1]+2<=7 and pos[0]+1<=7:
if chess_board[pos[1]+2][pos[0]+1]==0:
avble_slots.append((pos[1]+2,pos[0]+1))
if chess_board[pos[1]+2][pos[0]+1]<0:
kill_slots.append((pos[1]+2,pos[0]+1))
#case 6 2 vert up 1 hori left
if pos[1]+2<=7 and pos[0]-1>=0:
if chess_board[pos[1]+2][pos[0]-1]==0:
avble_slots.append((pos[1]+2,pos[0]-1))
if chess_board[pos[1]+2][pos[0]-1]<0:
kill_slots.append((pos[1]+2,pos[0]-1))
#case 7 2 vert down 1 hori right
if pos[1]-2>=0 and pos[0]+1<=7:
if chess_board[pos[1]-2][pos[0]+1]==0:
avble_slots.append((pos[1]-2,pos[0]+1))
if chess_board[pos[1]-2][pos[0]+1]<0:
kill_slots.append((pos[1]-2,pos[0]+1))
#case 8 2 vert down 1 hori left
if pos[1]-2>=0 and pos[0]-1>=0:
if chess_board[pos[1]-2][pos[0]-1]==0:
avble_slots.append((pos[1]-2,pos[0]-1))
if chess_board[pos[1]-2][pos[0]-1]<0:
kill_slots.append((pos[1]-2,pos[0]-1))
check=False
check_available_slot=[]
check_kill_slot=[]
# print(kill_slots)
for pieces in kill_slots:
if chess_board[pieces[0]][pieces[1]]==-1:
check_kill_slot+=pawn(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==-2:
check_kill_slot+=rook(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==-3:
check_kill_slot+=knight(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==-4:
check_kill_slot+=bishop(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==-5:
# print("hooo)")
check_kill_slot+=queen(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==-6:
check_kill_slot+=king(False,(pieces[1],pieces[0]))[1]
# print(check_kill_slot)
# print(pos)
for kill in check_kill_slot:
if kill==(pos[1],pos[0]):
check=True
return check
def determine_check_2():
global current_pos,chess_board
# print(pos)
for i in range(8):
for j in range(8):
if chess_board[i][j]==-6:
pos=(j,i)
avble_slots=[]
kill_slots=[]
xl=1
while (pos[0]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]][pos[0]-xl]>0:
kill_slots.append((pos[1],pos[0]-xl))
break
if chess_board[pos[1]][pos[0]-xl]==0:
avble_slots.append((pos[1],pos[0]-xl))
else:
break
xl+=1
xl=1
while pos[0]+xl<8:
# print(pos[0]+xl)
if chess_board[pos[1]][pos[0]+xl]>0:
kill_slots.append((pos[1],pos[0]+xl))
break
if chess_board[pos[1]][pos[0]+xl]==0:
avble_slots.append((pos[1],pos[0]+xl))
else:
break
xl+=1
yl=1
while pos[1]-yl>-1:
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]]>0:
kill_slots.append((pos[1]-yl,pos[0]))
break
if chess_board[pos[1]-yl][pos[0]]==0:
avble_slots.append((pos[1]-yl,pos[0]))
else:
break
yl+=1
yl=1
while pos[1]+yl<8:
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]]>0:
# print("bbbbbbbbbbbbbad")
kill_slots.append((pos[1]+yl,pos[0]))
break
if chess_board[pos[1]+yl][pos[0]]==0:
avble_slots.append((pos[1]+yl,pos[0]))
else:
break
yl+=1
xl=1
while (pos[0]-xl)>-1 and (pos[1]-xl)>-1:
# print(pos[0]-xl)
if chess_board[pos[1]-xl][pos[0]-xl]>0:
kill_slots.append((pos[1]-xl,pos[0]-xl))
break
if chess_board[pos[1]-xl][pos[0]-xl]==0:
avble_slots.append((pos[1]-xl,pos[0]-xl))
else:
break
xl+=1
xl=1
while (pos[0]+xl<8) and (pos[1]+xl<8):
# print(pos[0]+xl)
if chess_board[pos[1]+xl][pos[0]+xl]>0:
kill_slots.append((pos[1]+xl,pos[0]+xl))
break
if chess_board[pos[1]+xl][pos[0]+xl]==0:
avble_slots.append((pos[1]+xl,pos[0]+xl))
else:
break
xl+=1
yl=1
while (pos[1]-yl>-1) and (pos[0]+yl<8):
# print(pos[0]-yl)
if chess_board[pos[1]-yl][pos[0]+yl]>0:
kill_slots.append((pos[1]-yl,pos[0]+yl))
break
if chess_board[pos[1]-yl][pos[0]+yl]==0:
avble_slots.append((pos[1]-yl,pos[0]+yl))
else:
break
yl+=1
yl=1
while (pos[1]+yl<8) and (pos[0]-yl>-1):
# print(pos[0]+yl)
if chess_board[pos[1]+yl][pos[0]-yl]>0:
kill_slots.append((pos[1]+yl,pos[0]-yl))
break
if chess_board[pos[1]+yl][pos[0]-yl]==0:
avble_slots.append((pos[1]+yl,pos[0]-yl))
else:
break
yl+=1
if pos[1]+1<=7 and pos[0]+2<=7:
if chess_board[pos[1]+1][pos[0]+2]==0:
avble_slots.append((pos[1]+1,pos[0]+2))
if chess_board[pos[1]+1][pos[0]+2]>0:
kill_slots.append((pos[1]+1,pos[0]+2))
#case 2 2hori right 1 vert down
if pos[1]-1>=0 and pos[0]+2<=7:
if chess_board[pos[1]-1][pos[0]+2]==0:
avble_slots.append((pos[1]-1,pos[0]+2))
if chess_board[pos[1]-1][pos[0]+2]>0:
kill_slots.append((pos[1]-1,pos[0]+2))
# case 3 2 hori left 1 vert up
if pos[1]+1<=7 and pos[0]-2>=0:
if chess_board[pos[1]+1][pos[0]-2]==0:
avble_slots.append((pos[1]+1,pos[0]-2))
if chess_board[pos[1]+1][pos[0]-2]>0:
kill_slots.append((pos[1]+1,pos[0]-2))
#case 4 2 hori left 1 vert down
if pos[1]-1>=0 and pos[0]-2>=0:
if chess_board[pos[1]-1][pos[0]-2]==0:
avble_slots.append((pos[1]-1,pos[0]-2))
if chess_board[pos[1]-1][pos[0]-2]>0:
kill_slots.append((pos[1]-1,pos[0]-2))
#case 5 2 vert up 1 hori right
if pos[1]+2<=7 and pos[0]+1<=7:
if chess_board[pos[1]+2][pos[0]+1]==0:
avble_slots.append((pos[1]+2,pos[0]+1))
if chess_board[pos[1]+2][pos[0]+1]>0:
kill_slots.append((pos[1]+2,pos[0]+1))
#case 6 2 vert up 1 hori left
if pos[1]+2<=7 and pos[0]-1>=0:
if chess_board[pos[1]+2][pos[0]-1]==0:
avble_slots.append((pos[1]+2,pos[0]-1))
if chess_board[pos[1]+2][pos[0]-1]>0:
kill_slots.append((pos[1]+2,pos[0]-1))
#case 7 2 vert down 1 hori right
if pos[1]-2>=0 and pos[0]+1<=7:
if chess_board[pos[1]-2][pos[0]+1]==0:
avble_slots.append((pos[1]-2,pos[0]+1))
if chess_board[pos[1]-2][pos[0]+1]>0:
kill_slots.append((pos[1]-2,pos[0]+1))
#case 8 2 vert down 1 hori left
if pos[1]-2>=0 and pos[0]-1>=0:
if chess_board[pos[1]-2][pos[0]-1]==0:
avble_slots.append((pos[1]-2,pos[0]-1))
if chess_board[pos[1]-2][pos[0]-1]>0:
kill_slots.append((pos[1]-2,pos[0]-1))
check=False
check_available_slot=[]
check_kill_slot=[]
# print(kill_slots)
for pieces in kill_slots:
if chess_board[pieces[0]][pieces[1]]==1:
check_kill_slot+=pawn(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==2:
check_kill_slot+=rook(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==3:
check_kill_slot+=knight(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==4:
check_kill_slot+=bishop(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==5:
check_kill_slot+=queen(False,(pieces[1],pieces[0]))[1]
if chess_board[pieces[0]][pieces[1]]==6:
check_kill_slot+=king(False,(pieces[1],pieces[0]))[1]
# print(check_kill_slot)
for kill in check_kill_slot:
if kill==(pos[1],pos[0]):
check=True
return check
def check_for_checkmates_2():
global chess_board
checking_pieces=[]
checkmate=True
for i in range(8):
for j in range(8):
if chess_board[i][j]<0:
checking_pieces.append((j,i))
temp_arr=deepcopy(chess_board)
# print(turns)
for pos in checking_pieces:
if chess_board[pos[1]][pos[0]]==-1:
available_slot,kill_slot=pawn(False,pos)
if chess_board[pos[1]][pos[0]]==-2:
available_slot,kill_slot=rook(False,pos)
if chess_board[pos[1]][pos[0]]==-3:
available_slot,kill_slot=knight(False,pos)
if chess_board[pos[1]][pos[0]]==-4:
available_slot,kill_slot=bishop(False,pos)
if chess_board[pos[1]][pos[0]]==-5:
available_slot,kill_slot=queen(False,pos)
if chess_board[pos[1]][pos[0]]==-6:
available_slot,kill_slot=king(False,pos)
for slot in available_slot:
king_found=False
chess_board=deepcopy(temp_arr)
move_iterator(pos,slot)
for i in range(8):
for j in range(8):
if chess_board[i][j]==-6:
king_found=True
if king_found==False:
chess_board=deepcopy(temp_arr)
# for i in range(8):
# print(chess_board[i])
if determine_check_2()==False:
checkmate=False
break
if determine_check_2()==False:
break
for slot in kill_slot:
king_found=False
chess_board=deepcopy(temp_arr)
move_iterator(pos,slot)
# for i in range(8):
# print(chess_board[i])
# print(slot)
for i in range(8):
for j in range(8):
if chess_board[i][j]==-6:
king_found=True
if king_found==False:
chess_board=deepcopy(temp_arr)
if determine_check_2()==False:
checkmate=False
break
if determine_check_2()==False:
break
chess_board=temp_arr
return checkmate
def check_for_checkmates_1():
global chess_board
checking_pieces=[]
checkmate=True
for i in range(8):
for j in range(8):
if chess_board[i][j]>0:
checking_pieces.append((j,i))
# print(chess_board)
temp_arr=deepcopy(chess_board)
# print(turns)
for pos in checking_pieces:
if chess_board[pos[1]][pos[0]]==1:
available_slot,kill_slot=pawn(False,pos)
if chess_board[pos[1]][pos[0]]==2:
available_slot,kill_slot=rook(False,pos)
if chess_board[pos[1]][pos[0]]==3:
available_slot,kill_slot=knight(False,pos)
if chess_board[pos[1]][pos[0]]==4:
available_slot,kill_slot=bishop(False,pos)
if chess_board[pos[1]][pos[0]]==5:
available_slot,kill_slot=queen(False,pos)
if chess_board[pos[1]][pos[0]]==6:
available_slot,kill_slot=king(False,pos)
for slot in available_slot:
king_found=False
move_iterator(pos,slot)
for i in range(8):
for j in range(8):
if chess_board[i][j]==6:
king_found=True
if king_found==False:
chess_board=deepcopy(temp_arr)
if determine_check_1()==False:
checkmate=False
break
chess_board=temp_arr
for slot in kill_slot:
king_found=False
move_iterator(pos,slot)
# for i in range(8):
# print(chess_board[i])
# print(slot)
for i in range(8):
for j in range(8):
if chess_board[i][j]==6:
king_found=True
if king_found==False:
chess_board=deepcopy(temp_arr)
if determine_check_1()==False:
checkmate=False
break
chess_board=temp_arr
chess_board=temp_arr
return checkmate
def move_iterator(pos,slot):
global chess_board
#worked fine at 0 1 trying 1 0 to fix bugs
chess_board[slot[0]][slot[1]]=chess_board[pos[1]][pos[0]]
chess_board[pos[1]][pos[0]]=0
def move_peice(avble_slots,kill_slots):
global current_pos,chess_board,killed_pieces,turns
global posit,cur_pos
pos=pygame.mouse.get_pos()
pos=conv_to_arr(pos)
cur_pos_conv=conv_to_arr(current_pos)
selected_slot=(-1,-1)
# print("================")
# print(pos)
# print(cur_pos_conv)
# print(avble_slots)
# print("+++++++++++++++++++")
# for i in range(8):
# print(chess_board[i])
if pygame.mouse.get_pressed()[0]:
for slot in avble_slots:
if (pos[1],pos[0])==slot:
selected_slot=pos
posit=pos
cur_pos=cur_pos_conv
chess_board[pos[1]][pos[0]]=chess_board[cur_pos_conv[1]][cur_pos_conv[0]]
chess_board[cur_pos_conv[1]][cur_pos_conv[0]]=0
turns*=-1
break
for slot in kill_slots:
if (pos[1],pos[0])==slot:
selected_slot=pos
posit=pos
cur_pos=cur_pos_conv
killed_pieces.append(chess_board[pos[1]][pos[0]])
chess_board[pos[1]][pos[0]]=chess_board[cur_pos_conv[1]][cur_pos_conv[0]]
chess_board[cur_pos_conv[1]][cur_pos_conv[0]]=0
turns*=-1
break
return selected_slot
def show_possible_moves():
global current_pos,chess_board,turns,check2,check1,checked_pos,update_check_pos
pos=conv_to_arr(current_pos)
available_slot=[]
kill_slot=[]
# 1 stands for white turn and -1 stands for blacks turn
if chess_board[pos[1]][pos[0]]==1*turns:
available_slot,kill_slot=pawn(True,(0,0))
if chess_board[pos[1]][pos[0]]==2*turns:
available_slot,kill_slot=rook(True,(0,0))
if chess_board[pos[1]][pos[0]]==3*turns:
available_slot,kill_slot=knight(True,(0,0))
if chess_board[pos[1]][pos[0]]==4*turns:
available_slot,kill_slot=bishop(True,(0,0))
if chess_board[pos[1]][pos[0]]==5*turns:
available_slot,kill_slot=queen(True,(0,0))
if chess_board[pos[1]][pos[0]]==6*turns:
available_slot,kill_slot=king(True,(0,0))
for slot in available_slot:
pixel_pos=conv_to_pixel((slot[1],slot[0]))
draw_rect(pixel_pos[0],pixel_pos[1],88,88,5,(0,255,0))
# pygame.draw.circle(gameDisplay,(239,216,69),(pixel_pos[0]+44,pixel_pos[1]+44),10)
for slot in kill_slot:
pixel_pos=conv_to_pixel((slot[1],slot[0]))
draw_rect(pixel_pos[0],pixel_pos[1],88,88,5,(255,0,0))
# pygame.draw.circle(gameDisplay,(0,255,0),(pixel_pos[0]+44,pixel_pos[1]+44),20)
#checking conditions
temp_chess_board=deepcopy(chess_board)
# temp_chess_board=deepcopy(chess_board)
# if check1==True or check2==True:
# chess_board=temp_chess_board
selected_pos=move_peice(available_slot,kill_slot)
# print(turns)
# if check2==True or check1==True:
# selected_pos=checked_pos
if determine_check_1():
# pass
print("check 1")
text = STAT_FONT.render("CHECK",3, (255,0,0))
gameDisplay.blit(text, (5,600))
test=deepcopy(chess_board)
result1=check_for_checkmates_1()
if result1==True:
print("black wins CHECKMATE")
pygame.draw.rect(gameDisplay,(255,255,255),(230,350,620,60))
text2 = stat_font.render("black wins CHECKMATE",3, (0,0,0))
gameDisplay.blit(text2, (250,350))
# exit()
chess_board=test
if turns==-1:
chess_board=temp_chess_board
turns*=-1
if determine_check_2():
# pass
print("check 2")
text = STAT_FONT.render("CHECK",3, (255,0,0))
gameDisplay.blit(text, (5,100))
test=deepcopy(chess_board)
result2=check_for_checkmates_2()
if result2==True:
print("player 1 wins CHECKMATE")
pygame.draw.rect(gameDisplay,(255,255,255),(230,350,620,60))
text2 = stat_font.render("white wins CHECKMATE",3, (0,0,0))
gameDisplay.blit(text2, (250,350))
# exit()
chess_board=test
if turns==1:
chess_board=temp_chess_board
turns*=-1
def checkpos(pos,x,y,w,h):
if pos[0]>=x and pos[0]<=x+w:
if pos[1]>=y and pos[1]<=y+h:
if pygame.mouse.get_pressed()[0]:
return True
else:
return False
def P_Moves(board,color):
possiblemoves=[]
checking_pieces=[]
for i in range(8):
for j in range(8):
if color==-1:
if board[i][j]<0:
checking_pieces.append((j,i))
if color==1:
if board[i][j]>0:
checking_pieces.append((j,i))
for pos in checking_pieces:
if board[pos[1]][pos[0]]==1*color:
available_slot,kill_slot=pawn(False,pos)
if board[pos[1]][pos[0]]==2*color:
available_slot,kill_slot=rook(False,pos)
if board[pos[1]][pos[0]]==3*color:
available_slot,kill_slot=knight(False,pos)
if board[pos[1]][pos[0]]==4*color:
available_slot,kill_slot=bishop(False,pos)
if board[pos[1]][pos[0]]==5*color:
available_slot,kill_slot=queen(False,pos)
if board[pos[1]][pos[0]]==6*color:
available_slot,kill_slot=king(False,pos)
possiblemoves.append(((pos[1],pos[0]),available_slot+kill_slot))
return possiblemoves
def move_maker(board,mover):
move=mover[1]
idd=mover[0]
board[move[0]][move[1]]=board[idd[0]][idd[1]]
board[idd[0]][idd[1]]=0
return board
def minimaxR(depth,board,isMaximizing):
possibleMoves=P_Moves(board,-1)
bestmove=-9999
bestFinalMove=((1,3),(3,3))
for element in possibleMoves:
idd =element[0]
moves=element[1]
# print("*"*50)
# print(moves)
# print("*"*50)
for move in moves:
b_board=deepcopy(board)
t_board=move_maker(b_board,(idd,move))
# for i in range(8):
# print(t_board[i])
value=max(bestmove,minimax(depth-1,t_board,-10000,10000,not isMaximizing))
if value>bestmove:
# print("best move :",bestmove)
bestmove=value
bestFinalMove=(idd,move)
return bestFinalMove
def minimax(depth,board,alpha,beta,isMaximizing):
if depth==0:
return -evaluation(board)
if isMaximizing:
breaking=False
possibleMoves=P_Moves(board,-1)
bestmove=-9999
for element in possibleMoves:
idd=element[0]
moves=element[1]
for move in moves:
t_board=move_maker(board,(idd,move))
bestmove=max(bestmove,minimax(depth-1,t_board,alpha,beta,not isMaximizing))
alpha=max(alpha,bestmove)
if beta<=alpha:
breaking=True
return bestmove
if breaking:
break
return bestmove
else:
possibleMoves=P_Moves(board,1)
bestmove=9999
breaking=False
for element in possibleMoves:
idd=element[0]
moves=element[1]
for move in moves:
t_board=move_maker(board,(idd,move))
bestmove=min(bestmove,minimax(depth-1,t_board,alpha,beta,not isMaximizing))
beta=min(beta,bestmove)
if(beta<=alpha):
breaking=True
return bestmove
if breaking:
break
return bestmove
def evaluation(board):
evaluate=0
for i in range(8):
for j in range(8):
if board[i][j]<0:
evaluate+=getpiece_val(abs(board[i][j]))
elif board[i][j]>0:
evaluate-=getpiece_val(abs(board[i][j]))
# print(evaluate)
return evaluate
def getpiece_val(piece):
value=0
if piece==1:
value=10 #10
elif piece==2:
value=50 #50
elif piece==3:
value=30 # 30
elif piece==4:
value=30 #30
elif piece==5:
value=90#90
elif piece==6:
value=900#900
return value
def hover(pos,x,y,w,h):
if pos[0]>=x and pos[0]<=x+w:
if pos[1]>=y and pos[1]<=y+h:
return True
else:
return False
def front_logo():
t=0
for t in range(150):
menu1=pygame.transform.scale(menu,(900,750))
gameDisplay.blit(menu1,(0,0,900,750))
text = stat_font2.render("Chess 360",3, (255,255,255))
globe1=pygame.transform.scale(globe,(200,200))
gameDisplay.blit(globe1,(430,340,200,200))
gameDisplay.blit(text, (200,425))
pygame.display.update()
for event in pygame.event.get():
if event.type==pygame.QUIT:
# run=False
exit()
def main_menu():
global choice
enter=True
col1=(255,255,255)
col2=(255,255,255)
col3=(255,255,255)
while enter:
menu1=pygame.transform.scale(menu,(900,750))
gameDisplay.blit(menu1,(0,0,900,750))
text = stat_font.render("Chess 360",3, (255,255,255))
gameDisplay.blit(text, (350,125))
for event in pygame.event.get():
if event.type==pygame.QUIT:
run=False
# exit()
if event.type==pygame.KEYDOWN:
if event.key==pygame.K_b:
enter=False
p=pygame.mouse.get_pos()
box1=pygame.transform.scale(frame,(740,105))
gameDisplay.blit(box1,(130,282,740,105))
pygame.draw.rect(gameDisplay,col1,(200,300,600,75))
text = STAT_FONT.render("pvp offline",3, (0,0,0))
gameDisplay.blit(text, (360,325))
# box2=pygame.transform.scale(frame,(740,105))
gameDisplay.blit(box1,(130,410,740,105))
pygame.draw.rect(gameDisplay,col2,(200,425,600,75))
text = STAT_FONT.render("pvp online",3, (0,0,0))
gameDisplay.blit(text, (360,450))
gameDisplay.blit(box1,(130,534,740,105))
pygame.draw.rect(gameDisplay,col3,(200,550,600,75))
text = STAT_FONT.render("play against computer",3, (0,0,0))
gameDisplay.blit(text, (360,575))
if hover(p,200,300,600,75):
col1=(230,230,230)
else:
col1=(255,255,255)
if hover(p,200,425,600,75):
col2=(230,230,230)
else:
col2=(255,255,255)
if hover(p,200,550,600,75):
col3=(230,230,230)
else:
col3=(255,255,255)
if checkpos(p,200,300,600,75):
choice=1
enter=False
if checkpos(p,200,425,600,75):
choice=2
enter=False
if checkpos(p,200,550,600,75):
choice=3
enter=False
pygame.display.update()
if __name__=="__main__":
#185,23
# 704-42,23
choice=-1
pygame.display.set_caption("chess 360")
checked_pos=0
update_check_pos=False
gameloop=True
front_logo()
while gameloop:
main_menu()
n=Network()
current_pos=(185,639)
check1=False
check2=False
posit=(0,0)
cur_pos=(0,0)
killed_pieces=[]
turns=1
run=True
b=board()
while run:
if choice==1:
gameDisplay.fill(brown)
for event in pygame.event.get():
if event.type==pygame.QUIT:
run=False
# exit()
b.draw_board()
text3 = STAT_FONT.render("TURN",3, (255,255,255))
gameDisplay.blit(text3, (10,350))
if turns==1:
text4 = STAT_FONT.render("white's",3, (255,255,255))
gameDisplay.blit(text4, (12,300))
else:
text4 = STAT_FONT.render("black's",3, (255,255,255))
gameDisplay.blit(text4, (12,300))
select_peice()
mark_current_pos()
draw_peices()
show_possible_moves()
pygame.display.update()
if choice==3:
gameDisplay.fill(brown)
b.draw_board()
text3 = STAT_FONT.render("TURN",3, (255,255,255))
gameDisplay.blit(text3, (10,350))
if turns==1:
text4 = STAT_FONT.render("white's",3, (255,255,255))
gameDisplay.blit(text4, (12,300))
else:
text4 = STAT_FONT.render("black's",3, (255,255,255))
gameDisplay.blit(text4, (12,300))
for event in pygame.event.get():
if event.type==pygame.QUIT:
run=False
# exit()
if turns==-1 :
# chess_board[2][3]=-1
chess_board_copy=deepcopy(chess_board)
move=minimaxR(3,chess_board,True)
# print(move)
chess_board=move_maker(chess_board,move)
# for i in range(8):
# print(chess_board[i])
if determine_check_1():
# pass
print("check 1")
text = STAT_FONT.render("CHECK",3, (255,0,0))
gameDisplay.blit(text, (5,600))
chess_board=chess_board_copy
test=deepcopy(chess_board)
result1=check_for_checkmates_1()
if result1==True:
print("player 2 wins CHECKMATE")
print("black wins CHECKMATE")
pygame.draw.rect(gameDisplay,(255,255,255),(230,350,620,60))
text2 = stat_font.render("black wins CHECKMATE",3, (0,0,0))
gameDisplay.blit(text2, (250,350))
# exit()
chess_board=test
if turns==-1:
chess_board=test
turns*=-1
if determine_check_2():
# pass
print("check 2")
chess_board=chess_board_copy
test=deepcopy(chess_board)
result2=check_for_checkmates_2()
if result2==True:
print("player 1 wins CHECKMATE")
print("player 1 wins CHECKMATE")
pygame.draw.rect(gameDisplay,(255,255,255),(230,350,620,60))
text2 = stat_font.render("white wins CHECKMATE",3, (0,0,0))
gameDisplay.blit(text2, (250,350))
# exit()
chess_board=test
if turns==1:
chess_board=test
turns*=-1
if turns==-1:
turns=1
b.draw_board()
select_peice()
mark_current_pos()
if turns==1:
show_possible_moves()
draw_peices()
pygame.display.update()
if choice==2:
try:
gameDisplay.fill(brown)
# num+=1
for event in pygame.event.get():
if event.type==pygame.QUIT:
run=False
# exit()
b.draw_board()
# if turns==1:
select_peice()
mark_current_pos()
# chess_board_recvd=n.send(chess_board)
# for i in range(8):
# print(chess_board_recvd)
draw_peices()
# pos_arr=n.send([(-1,-1),(-1,-1)])
if turns==1:
show_possible_moves()
pos_arr=n.send([posit,cur_pos])
if turns==-1:
pos_arr=n.send([(-1,-1),(-1,-1)])
print(pos_arr)
pos=pos_arr[0]
cur_pos_conv=pos_arr[1]
chess_board[pos[1]][pos[0]]=chess_board[cur_pos_conv[1]][cur_pos_conv[0]]
chess_board[cur_pos_conv[1]][cur_pos_conv[0]]=0
turns*=-1
chess_board[0][0]=-2
pygame.display.update()
except:
text2 = stat_font.render("waiting for other player..",3, (255,0,0))
gameDisplay.blit(text2, (250,350))
pygame.display.update()
for event in pygame.event.get():
if event.type==pygame.KEYDOWN:
if event.key==pygame.K_e:
exit()
for event in pygame.event.get():
if event.type==pygame.KEYDOWN:
if event.key==pygame.K_e:
exit()
|
[
"noreply@github.com"
] |
AI-Factor-y.noreply@github.com
|
3622120dddd6b2f0521775b7eeaec59ce961df1b
|
efd3aa99a1669ef547a1625fd62e881d2281acfb
|
/Energy.py
|
31bdd135104066868f19753397f775b54ef3661a
|
[] |
no_license
|
MaxLu0428/TensorNetworkPartitionFn
|
9b2e803d530027557c2724d22fb09e3a99d9c8f2
|
531955c6dac60a3a24e43de5cacb189c51425b2e
|
refs/heads/master
| 2023-03-11T23:37:53.064128
| 2021-02-25T07:17:23
| 2021-02-25T07:17:23
| 305,578,756
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,308
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 26 13:38:42 2020
@author: chingyuhuang
"""
import numpy as np
from math import pi
from scipy import linalg
import time, itertools
import matplotlib.pyplot as plt
import HOTRG_two
import pandas as pd
from pathlib import Path
def SAVE_dATA( Cdata,RG_step,Dcut,name):
dataframe = pd.DataFrame( Cdata )
dataframe.index=['i={}'.format(num_steps) for num_steps in range((2**RG_step)*2) ]
dataframe.columns = [ a1 for a1 in range( (2**RG_step)*2) ]
dataframe = dataframe.stack()
dataframe = dataframe.unstack(0)
dataframe.index.name="Dcut="+str(Dcut)
dataframe.to_csv(name )
#================================================#
trgstep = 2
N = 2
dcut = 16
FILE = 'Ising_square_'+ 'RG'+str(trgstep)
step= 50
deltT = (2.0/step)
T_list = [ 1.0 +deltT*i for i in range(0,step+1) ]
dir_path = Path(FILE)
if not dir_path.exists():
dir_path.mkdir()
.
for T in T_list:
Cij_list = dict();
for i in range( (2**trgstep)*2):
Cj_list = np.zeros(2**trgstep*2)
# for j in range((2**trgstep)*2):
for j in range(i,(2**trgstep)*2):
DT, IDT = HOTRG_two.Ising_square(T)
To = DT; Ti = IDT;
si = i ; sj = j; Pi = Ti ; To0 = To
if i==0 and j==0:
Ti = To
Pi = To
for ii in range(trgstep):
Lsi,si = divmod(si,2)
Lsj,sj = divmod(sj,2)
#
## update along y-direction
To1,UU,UUT,N1 = HOTRG_two.updat_pure2( To0,To0,'y',dcut)
To1 /= N1
if Lsi==0 and Lsj==0:
T0,T1,T2,T3 = HOTRG_two.DetT (si,sj,To0,Ti,Pi)
else:
T0,T1,T2,T3 = HOTRG_two.DetT (si,sj,To0,Ti,To0)
iTL,_,_,_ = HOTRG_two.updat_pure2( T0, T3,'y',dcut)
iTL = iTL / N1
iTR,_,_,_ = HOTRG_two.updat_pure2( T1, T2,'y',dcut)
iTR = iTR / N1
if Lsi==0 and Lsj==0:
P0 =To0; P1 = To0; P2 = To0; P3= To0
else:
P0,P1,P2,P3 = HOTRG_two.DetP (si,sj,To0,Pi)
iPL,_,_,_ = HOTRG_two.updat_pure2( P0, P3,'y',dcut)
iPL = iPL / N1
iPR,_,_,_ = HOTRG_two.updat_pure2( P1, P2,'y',dcut)
iPR = iPR / N1
si = Lsi; sj = Lsj;
## update along x-direction
To2,UU,UUT,N1 = HOTRG_two.updat_pure2( To1,To1,'x',dcut)
To2 /= N1
Tij,_,_,_ = HOTRG_two.updat_pure2( iTL, iTR,'x',dcut)
Tij = Tij / N1
Pij,_,_,_ = HOTRG_two.updat_pure2( iPL, iPR,'x',dcut)
Pij = Pij / N1
To0 =To2
Ti = Tij
Pi = Pij
T0,T1,T2,T3 = HOTRG_two.DetTLIST (si,sj,To0,Ti,Pi)
if i==0 and j==0:
T0 =Ti; T1=To0; T2=To0; T3=To0
Norm = HOTRG_two.merge_four( [To0,To0,To0,To0] )
Cij = HOTRG_two.merge_four( [T0, T1, T2, T3])/Norm
Cj_list[j] = Cij
Cij_list[i] = Cj_list;
SAVE_dATA( Cij_list, trgstep,dcut,FILE+'/Cij_T'+str(format( T, '.3f'))+'_D'+str(dcut)+'.csv' )
|
[
"maxlu0428@gmail.com"
] |
maxlu0428@gmail.com
|
264641448c559a31731c347e5aa9462dc5ccb1f9
|
14f413371efe349646b737167c26c66ddb2fa898
|
/answer/views.py
|
88b40d6647fa0eb73d190827d5154963d5b9c07e
|
[] |
no_license
|
pabissonnier/project8__purbeurre
|
bace2341a76dc1281d25dce59d9e23f8fec4e252
|
f59f57d78b04075ac7fa9c104cd88fb2278e8820
|
refs/heads/master
| 2022-12-15T19:10:22.821407
| 2019-12-05T18:15:52
| 2019-12-05T18:15:52
| 186,840,027
| 0
| 1
| null | 2022-12-08T01:47:21
| 2019-05-15T14:09:45
|
CSS
|
UTF-8
|
Python
| false
| false
| 6,596
|
py
|
# -*- coding: utf-8 -*-
from django.shortcuts import render, get_object_or_404, redirect
from django.core.exceptions import MultipleObjectsReturned
from django.core.paginator import Paginator
from .models import Product
def index(request):
return render(request, 'answer/index.html')
def search(request):
query = request.GET.get('query')
global message
if not query:
products_all_list = Product.objects.all().order_by('name')
paginator = Paginator(products_all_list, 9)
page = request.GET.get('page')
products_all = paginator.get_page(page)
title = "Résultats pour la recherche : '%s'" % query
context = {
'products': products_all,
'title': title,
'query': query,
}
return render(request, 'answer/search.html', context)
else:
products_list = Product.objects.filter(name__icontains=query).order_by('name')
paginator = Paginator(products_list, 9)
page = request.GET.get('page')
products = paginator.get_page(page)
if not products_list.exists():
message = "Aucun produit trouvé"
title = "Résultats pour la recherche : '%s'" % query
context = {
'products': products,
'title': title,
'query': query
}
return render(request, 'answer/search.html', context)
def app(request):
query = request.GET.get('app-query')
products_datas = Product()
if not query:
products_list = Product.objects.all().order_by('name')
paginator = Paginator(products_list, 9)
page = request.GET.get('page')
products_all = paginator.get_page(page)
title = "Aucun produit n'a été renseigné, choisissez dans la liste de produits ou recherchez un produit"
context = {
'title': title,
'products': products_all,
'query': query,
}
return render(request, 'answer/list.html', context)
else:
try:
products = Product.objects.filter(name__iexact=query)
if not products.exists():
space = ' '
if space in query:
products_ratio_list = Product.find_similar_name(products_datas, query)
products = Product.objects.filter(name__in=products_ratio_list).order_by('name')
paginator = Paginator(products, 9)
page = request.GET.get('page')
products_all = paginator.get_page(page)
title = "Aucun produit pour : '%s', choisissez un produit dans la liste ci-dessous" % query
context = {
'title': title,
'products': products_all,
'query': query,
}
return render(request, 'answer/list.html', context)
else:
products = Product.objects.filter(name__icontains=query).order_by('name')
paginator = Paginator(products, 9)
page = request.GET.get('page')
products_all = paginator.get_page(page)
title = "Plusieurs produits contiennent : '%s', choisissez un produit dans la liste ci-dessous" % query
context = {
'title': title,
'products': products_all,
'query': query,
}
return render(request, 'answer/list.html', context)
product_name, product_picture, product_nutriscore, product_category, product_link, product_id = \
Product.product_chosen(products_datas, query)
better_nutriscore = Product.get_better_nutriscore(products_datas, product_nutriscore)
best_ratio_list = Product.get_same_names(products_datas, product_name, product_category)
better_products = Product.extract_products_for_replace(products_datas, better_nutriscore, product_category,
best_ratio_list, product_link)
title = "Voici de meilleurs produits pour remplacer : '%s'" % query
if not better_products:
title = "Désolé, nous n'avons pas de meilleurs produits pour remplacer : '%s'" % query
context = {
'title': title,
'better_products': better_products,
'query': query,
}
return render(request, 'answer/results.html', context)
except MultipleObjectsReturned:
products = Product.multiple_product_name(products_datas, query)
title = "Plusieurs produits pour : '%s', choisissez un produit dans la liste ci-dessous" % query
context = {
'title': title,
'products': products,
'query': query,
}
return render(request, 'answer/simlist.html', context)
def app_sim(request):
query = request.GET.get('app-query-sim')
products_datas = Product()
product = Product.objects.get(id=query)
better_nutriscore = Product.get_better_nutriscore(products_datas, product.nutriscore)
best_ratio_list = Product.get_same_names(products_datas, product.name, product.category)
better_products = Product.extract_products_for_replace(products_datas, better_nutriscore, product.category,
best_ratio_list, product.link)
title = "Voici de meilleurs produits pour remplacer : '%s'" % product.name
if not better_products:
title = "Désolé, nous n'avons pas de meilleurs produits pour remplacer : '%s'" % product.name
context = {
'title': title,
'better_products': better_products,
'query': query,
}
return render(request, 'answer/results.html', context)
def detail(request, product_id):
""" Display details for the product clicked"""
products_datas = Product()
product = get_object_or_404(Product, pk=product_id)
is_bio = Product.bio_or_not(products_datas, product)
context = {
'name': product.name,
'picture': product.picture,
'nutriscore': product.nutriscore,
'ingredients': product.ingredients,
'shops': product.shops,
'link': product.link,
'labels' : product.labels,
'product': product,
'is_bio': is_bio
}
return render(request, 'answer/detail.html', context)
|
[
"pabissonnier@gmail.com"
] |
pabissonnier@gmail.com
|
b7a956e9d522c03754567cfb5a37b9907762ee17
|
fce517b931838629ecdf1f61de763eaa208409ec
|
/category/models.py
|
9972266c3276ed893d76489f4a88563f6495284c
|
[] |
no_license
|
bird50/rs_cat
|
19abec386815d3b0b0511f17260049a63f5dbd02
|
5adb4d49f3a6a9a4870305d3717183980c860c1c
|
refs/heads/master
| 2022-11-14T16:42:28.853472
| 2020-07-07T23:54:33
| 2020-07-07T23:54:33
| 277,946,338
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,081
|
py
|
from django.db import models
'''
[datacat]
- name (ชื่อข้อมูล)
- link (ลิ๊งค์ของแหล่งข้อมูล)
- agency (หน่วยงานเจ้าของข้อมูล)
- agency_provided_data (หน่วยงานที่ให้ข้อมูล)
- keyword (หัวเรื่องหรือคําสําคัญ ที่ใช้สําหรับอ้างอิงในการค้นหาข้อมูล)
- description (รายละเอียดของข้อมูล)
- Time_period (ช่วงเวลาของข้อมูล)
- source (อธิบายแหล่งที่มา)
- type_of_data (รูปแบบการเก็บข้อมูล)
- date_recived (วันที่ได้รับข้อมูล)
- reference_doc (เอกสารอ้างอิงประกอบ)
- related_subject (เรื่องที่เกี่ยวข้อง)
- access_policy (ระเบียบในการเข้าถึงข้อมูล)
- data_licensing (ระเบียบในการใช้ข้อมูล)
- tags (แท็ก)
'''
class Datacat(models.Model):
class Type_of_data(models.TextChoices):
CSV = 'CSV', _('CSV')
TEXT = 'TXT', _('Text file')
EXCEL = 'XLS', _('Excel file,xlsx,xls,xl*')
SHP = 'SHP', _('ESRI Shape file')
ETC = 'ETC', _('Other formats')
UNKNOWN = 'UNK', _('Unknown format')
## Data licences
## ref https://www.cessda.eu/Training/Training-Resources/Library/Data-Management-Expert-Guide/6.-Archive-Publish/Publishing-with-CESSDA-archives/Licensing-your-data
class Access_policy(models.TextChoices):
CC0 = 'CC0', _('copy และ แจกจ่าย(Y) ,อางอง ผรบผดชอบเดม(N) , ใชเชงพาณชยได(Y) ,อนญาตใหปรบปรงได(Y) ,แกไขไลเซนสได(Y)')
CC_BY = 'CC_BY', _('copy และ แจกจ่าย(Y) ,อางอง ผรบผดชอบเดม(Y) , ใชเชงพาณชยได(Y) ,อนญาตใหปรบปรงได(Y) ,แกไขไลเซนสได(Y)')
name = models.CharField(max_length=255)
link = models.URLField(blank=True)
agency = models.ForeignKey(Agency, blank=True, null=True)
agency_provided_data = models.ForeignKey(Agency, blank=True, null=True)
keyword = models.CharField(max_length=255, blank=True)
description = models.TextField(blank=True)
Time_period = models.TextField(blank=True)
source = models.TextField(blank=True)
type_of_data = models.CharField(
max_length=3,
choices=Type_of_data.choices,
default=Type_of_data.UNKNOWN
)
date_recived = models.DateTimeField()
reference_doc = models.TextField()
related_subject = models.TextField()
access_policy = models.TextField()
created = models.DateTimeField(auto_now_add=True)
# Create your models here.
|
[
"siratis.w@gmail.com"
] |
siratis.w@gmail.com
|
c88a0f750789bbd73b0cdbec3214fd73b3684bf5
|
07f814b6b165825bab75f01b30113547f9dfc8ca
|
/src/074. Digit factorial chains/074.py
|
7a76248279038db2be6bed273644bca548f01ed8
|
[
"MIT"
] |
permissive
|
yuhao600/project-euler
|
d1800ceba53c0bc66947e0b373ba42d3e3c372b2
|
201fc68aa9cca63b751036bb61623c12939dcac4
|
refs/heads/master
| 2021-01-18T23:17:33.956922
| 2020-12-22T08:19:26
| 2020-12-22T08:19:26
| 27,674,349
| 15
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 395
|
py
|
factorial = [1]
for d in range(1, 10):
factorial.append(factorial[-1] * d)
def digit_fact(n):
return sum([factorial[int(d)] for d in str(n)])
counter = 0
for n in range(10 ** 6):
m = n
sequence = set()
while True:
sequence.add(m)
m = digit_fact(m)
if m in sequence:
break
if len(sequence) == 60:
counter += 1
print(counter)
|
[
"yuhao10@baidu.com"
] |
yuhao10@baidu.com
|
73c3b2c1b683f77bbfe7516ef0d32e2e5c86737d
|
24f97e9bd7eddca0ddcc174bedb012257c483921
|
/module/gr-round/python/deci.py
|
b537bfcc3411b538b3c1905022674eefc84b3dc9
|
[] |
no_license
|
13717630148/graduation-project
|
8afb90b8aa7fc1544cc144522105869456e36372
|
743cda2b956e71e8d0323baf0b3aba9a347105be
|
refs/heads/master
| 2020-08-09T13:32:54.563870
| 2018-04-17T07:34:48
| 2018-04-17T07:34:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,429
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import numpy
from gnuradio import gr
class deci(gr.decim_block):
"""
docstring for block deci
"""
def __init__(self, deci=2000):
gr.decim_block.__init__(self,
name="deci",
in_sig=[numpy.float32],
out_sig=[numpy.float32], decim=deci)
self.decim = deci
def work(self, input_items, output_items):
in0 = input_items[0]
out = output_items[0]
# <+signal processing here+>
print(len(in0))
for i in range(0,len(in0)/self.decim):
out[i] = in0[i*self.decim]
#out[:] = in0
return len(output_items[0])
|
[
"464271301@qq.com"
] |
464271301@qq.com
|
c263f0cb8caa0efb7c52441a920dbdc751510535
|
f525e83095eff72a2452495aae4d800ffe5a553b
|
/main.py
|
72cc15b9b9fcfbbd84208dce47b6cbe2b51a68b1
|
[
"Apache-2.0"
] |
permissive
|
maysrp/daolao
|
942b518eb86433f956b00b2f0962ffc0bec17ccc
|
42c9d5acb85e39f878f9f4a97cb2a7afeb2421e2
|
refs/heads/main
| 2023-02-17T10:17:18.949634
| 2021-01-14T14:32:52
| 2021-01-14T14:32:52
| 329,247,918
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 590
|
py
|
from cnfont import chinese,pixel,image,anime1,anime2,anime3,flx,anime0
import max7219
from machine import Pin, SPI
import time
spi = SPI(1, baudrate=4000000, polarity=1, phase=0, sck=Pin(4), mosi=Pin(2))
ss = Pin(5, Pin.OUT)
display = max7219.Matrix8x8(spi, ss, 16)
l1="大佬让我过去吧"
l2="祝您身体健康"
l3="财源广进"
# flx(l1,display)
# flx(l2)
# flx(l3)
# time.sleep(2)
anime0("爱一直在",display)
# anime1("下次一定",display)
# anime2("一键三连",display)
# anime3("拒绝白嫖",display)
image("b.json",display)
|
[
"noreply@github.com"
] |
maysrp.noreply@github.com
|
97572804913cc220df3cf3925bd1a06df91f8c6e
|
385554a930b259412b3e843d5936a12b2bb35bd5
|
/quest2.3.py
|
67061739c87f5818d6b039722865e1d2b376ad39
|
[] |
no_license
|
prajwalacharya016/CrackingCodingInterviewSolutions
|
ea60d2c125434ffc657143d0c22e64c81ecc9c1f
|
ca6e1a35220b83e799d8e81a7e96e3276dad19ab
|
refs/heads/master
| 2020-12-30T16:41:35.029676
| 2017-05-17T20:10:52
| 2017-05-17T20:10:52
| 91,015,518
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 512
|
py
|
from linkedlist import LinkedList, Node
def deleteNode(l2,data):
head=l2.first
n=None
while head:
if head.data == data:
n = head
break
else:
head=head.get_next()
if n is None or n.get_next() is None:
return False
nextd = n.get_next()
n.data = nextd.get_data()
n.set_next(nextd.get_next())
return True
l2 = LinkedList()
l2.insert(1)
l2.insert(2)
l2.insert(3)
l2.insert(5)
l2.insert(7)
deleteNode(l2,7)
l2.printlist()
|
[
"callmeprajwal@gmail.com"
] |
callmeprajwal@gmail.com
|
02a898bb283bd23b227d789c20e524113a8a8126
|
beba988ee3440ee0549b23f8dfc8c11d1452b3c0
|
/login/views.py
|
9f2939d4f0fbabb2ae28e783dc5479d023b58eaf
|
[] |
no_license
|
pdf2e/Tektiles
|
c4c91f910466acf9e539d32e0e6ba62c2a71df7c
|
a25dd385757ce8f463c590863a5363dd8fb0fbae
|
refs/heads/master
| 2020-12-11T02:03:08.003218
| 2014-04-14T18:04:26
| 2014-04-14T18:04:26
| 17,325,359
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,133
|
py
|
from django.template import RequestContext
from django.shortcuts import render_to_response
from login.forms import PersonForm
def add_user(request):
# Get the context from the request.
context = RequestContext(request)
# A HTTP POST?
if request.method == 'POST':
form = PersonForm(request.POST)
# Have we been provided with a valid form?
if form.is_valid():
# the user to the database
form.save(commit=True)
# Now call the index() view.
# The user will be shown the homepage.
return registration(request)
else:
# The supplied form contained errors - just print them to the terminal.
print form.errors
else:
# If the request was not a POST, display the form to enter details.
form = PersonForm()
# Bad form (or form details), no form supplied...
# Render the form with error messages (if any).
return render_to_response('login/../templates/registration.html', {'form': form}, context)
def registration(request):
return render_to_response('registration.html')
|
[
"angelotodaro92@gmail.com"
] |
angelotodaro92@gmail.com
|
b2586349d2cf431fa0748ebd0657102088fb88cf
|
1d96db84225301d972f07cad95c2a13f4fbafa84
|
/python/my_PyFeyn/feynman_diagram_lfv_charm/gen_diagrams.py
|
c622629912d615fbabe66878c68815279854cad8
|
[] |
no_license
|
mattbellis/matts-work-environment
|
9eb9b25040dd8fb4a444819b01a80c2d5342b150
|
41988f3c310f497223445f16e2537e8d1a3f71bc
|
refs/heads/master
| 2023-08-23T09:02:37.193619
| 2023-08-09T05:36:32
| 2023-08-09T05:36:32
| 32,194,439
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 156
|
py
|
#!/usr/bin/env python
from diagrams import *
tag = "_blk_bkg"
for i in range(0,4):
name = "fd_lfv_c_quark_%s_%d" % (tag,i)
fd_lfv_c_quark(name,i)
|
[
"matthew.bellis@gmail.com"
] |
matthew.bellis@gmail.com
|
3d2911a8f45cd81863e800a596cc087613d79b6b
|
039d8f2beb0991e2f1a4b3580b33250ae04440d1
|
/RPi/rpi_led.py
|
88c8eddb546b780f76b32c626ee19a3bb8edf57f
|
[] |
no_license
|
a1ali/smartstreetlight
|
ff91eef204e16e582d3bac4ce4fe0923b062a1f9
|
e6a8c5b737f82d45bf3cd92ec38c6283bfa7c0d7
|
refs/heads/master
| 2023-02-20T05:11:27.711806
| 2021-01-26T22:09:22
| 2021-01-26T22:09:22
| 293,435,170
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,810
|
py
|
import RPi.GPIO as GPIO
import time
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
import json
import pigpio
led = 18
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
#GPIO.setup(PIR, GPIO.IN)
GPIO.setup(led, GPIO.OUT)
#led = GPIO.PWM(LED, 500)
#led.start(1)
pi = pigpio.pi()
pi.set_mode(led, pigpio.OUTPUT)
pi.set_PWM_frequency(led, 8000)
pi.set_PWM_dutycycle(led, 1)
def helloworld(self, params, packet):
print(json.loads(packet.payload))
mypayload = json.loads(packet.payload)
if mypayload['rpi_motion'] == 'true':
raise_brightness()
elif mypayload['rpi_motion'] == 'false':
lower_brightness()
myMQTTClient = AWSIoTMQTTClient("rpi2") #tandom key can be anything
# For TLS mutual authentication
myMQTTClient.configureEndpoint("a2fj01nuikd9c7-ats.iot.us-east-2.amazonaws.com", 8883) #Provide your AWS IoT Core endpoint (Example: "abcdef12345-ats.iot.us-east-1.amazonaws.com")
myMQTTClient.configureCredentials("root-ca.pem.txt", "private.pem.key", "certificate.pem.crt") #Set path for Root CA and provisioning claim credentials
myMQTTClient.configureOfflinePublishQueueing(-1)
myMQTTClient.configureDrainingFrequency(2)
myMQTTClient.configureConnectDisconnectTimeout(10)
myMQTTClient.configureMQTTOperationTimeout(5)
print('Initiating Iot Core Topic')
myMQTTClient.connect()
print('connecting')
myMQTTClient.subscribe('home/motion', 1, helloworld)
print('subscribing')
def raise_brightness():
for dc in range(0, 256, 1):
pi.set_PWM_dutycycle(led, dc)
time.sleep(0.01)
#time.sleep(10)
def lower_brightness():
for dc in range(255, 0, -1):
pi.set_PWM_dutycycle(led, dc)
time.sleep(0.01)
try:
while True:
time.sleep(100)
except KeyboardInterrupt:
GPIO.cleanup()
|
[
"noreply@github.com"
] |
a1ali.noreply@github.com
|
e6e44ed46fd2a04ce3ee66aa3b01fd54f6275b00
|
f525b7beb6d15d6e0ede93d4efdbe8918074f19c
|
/venv/game_settings.py
|
0e759cbe0da464b1c4afc0ff221d96bc6975a7c9
|
[] |
no_license
|
MarcLiander/CPSC-386-02-Pong
|
5543a1ede459fd037d834bbbafd2c40df0919181
|
640d101d4c1bd49d841e0c0db972af8d2b7f6079
|
refs/heads/master
| 2020-04-13T04:33:56.856359
| 2018-12-29T07:15:52
| 2018-12-29T07:15:52
| 162,965,510
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 350
|
py
|
class Settings():
def __init__(self):
self.screen_width = 1200
self.screen_height = 800
self.bg_color = (50, 50, 50)
self.item_color = (230, 230, 230)
self.paddle_short = 20
self.paddle_long = 150
self.ball_speedup = 1.05
self.ball_size = 16
self.ball_touch_edge = False
|
[
"digizaku@gmail.com"
] |
digizaku@gmail.com
|
96db2632d69d94830bd67115dadcaeb6e2e707c7
|
9cd8801c3ed2206bf731986a628f33a3577e10dd
|
/assignment1/cs231n/classifiers/neural_net.py
|
640b81e9abfb3f355b9c90edfc8f725b011063b7
|
[] |
no_license
|
mfouda/CS231n
|
d82d47756070d721e342a28a67fc2c3cb4f45c8b
|
37e3252d69a84be7d94c452dce4f787f00c5dec9
|
refs/heads/master
| 2021-01-19T13:15:16.573273
| 2017-02-16T06:13:30
| 2017-02-16T06:13:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,287
|
py
|
import numpy as np
import matplotlib.pyplot as plt
class TwoLayerNet(object):
"""
A two-layer fully-connected neural network. The net has an input dimension of
N, a hidden layer dimension of H, and performs classification over C classes.
We train the network with a softmax loss function and L2 regularization on the
weight matrices. The network uses a ReLU nonlinearity after the first fully
connected layer.
In other words, the network has the following architecture:
input - fully connected layer - ReLU - fully connected layer - softmax
The outputs of the second fully-connected layer are the scores for each class.
"""
def __init__(self, input_size, hidden_size, output_size, std=1e-4):
"""
Initialize the model. Weights are initialized to small random values and
biases are initialized to zero. Weights and biases are stored in the
variable self.params, which is a dictionary with the following keys:
W1: First layer weights; has shape (D, H)
b1: First layer biases; has shape (H,)
W2: Second layer weights; has shape (H, C)
b2: Second layer biases; has shape (C,)
Inputs:
- input_size: The dimension D of the input data.
- hidden_size: The number of neurons H in the hidden layer.
- output_size: The number of classes C.
"""
self.params = {}
self.params['W1'] = std * np.random.randn(input_size, hidden_size)
self.params['b1'] = np.zeros(hidden_size)
self.params['W2'] = std * np.random.randn(hidden_size, output_size)
self.params['b2'] = np.zeros(output_size)
def loss(self, X, y=None, reg=0.0):
"""
Compute the loss and gradients for a two layer fully connected neural
network.
Inputs:
- X: Input data of shape (N, D). Each X[i] is a training sample.
- y: Vector of training labels. y[i] is the label for X[i], and each y[i] is
an integer in the range 0 <= y[i] < C. This parameter is optional; if it
is not passed then we only return scores, and if it is passed then we
instead return the loss and gradients.
- reg: Regularization strength.
Returns:
If y is None, return a matrix scores of shape (N, C) where scores[i, c] is
the score for class c on input X[i].
If y is not None, instead return a tuple of:
- loss: Loss (data loss and regularization loss) for this batch of training
samples.
- grads: Dictionary mapping parameter names to gradients of those parameters
with respect to the loss function; has the same keys as self.params.
"""
# Unpack variables from the params dictionary
W1, b1 = self.params['W1'], self.params['b1']
W2, b2 = self.params['W2'], self.params['b2']
N, D = X.shape
H = W1.shape[1]
# Compute the forward pass
scores = None
#############################################################################
# TODO: Perform the forward pass, computing the class scores for the input. #
# Store the result in the scores variable, which should be an array of #
# shape (N, C). #
#############################################################################
z1 = X.dot(W1) + b1
a1 = np.maximum(np.zeros((N, H)), z1)
z2 = a1.dot(W2) + b2
scores = z2
#############################################################################
# END OF YOUR CODE #
#############################################################################
# If the targets are not given then jump out, we're done
if y is None:
return scores
# Compute the loss
loss = None
#############################################################################
# TODO: Finish the forward pass, and compute the loss. This should include #
# both the data loss and L2 regularization for W1 and W2. Store the result #
# in the variable loss, which should be a scalar. Use the Softmax #
# classifier loss. So that your results match ours, multiply the #
# regularization loss by 0.5 #
#############################################################################
row_max = np.max(z2, axis=1).reshape(N, 1)
z2 = z2 - row_max
correct_scores = -z2[range(N), y]
row_sum = np.log(np.sum(np.exp(z2), axis=1))
loss = np.sum(correct_scores + row_sum)
loss /= N
loss = loss + 0.5 * reg * (np.sum(W1 * W1) + np.sum(W2 * W2))
#############################################################################
# END OF YOUR CODE #
#############################################################################
# Backward pass: compute gradients
grads = {}
#############################################################################
# TODO: Compute the backward pass, computing the derivatives of the weights #
# and biases. Store the results in the grads dictionary. For example, #
# grads['W1'] should store the gradient on W1, and be a matrix of same size #
#############################################################################
exp_z2 = np.exp(z2)
prob = exp_z2 / np.sum(exp_z2, axis=1, keepdims=True)
d2 = prob
d2[range(N), y] -= 1
d2 /= N
d1 = np.dot(d2, W2.T)
d1[z1 <= 0] = 0
dW2 = np.dot(a1.T, d2) + reg*W2
dW1 = np.dot(X.T, d1) + reg*W1
db2 = np.sum(d2, axis=0, keepdims=True)
db1 = np.sum(d1, axis=0, keepdims=True)
grads['W1'] = dW1
grads['W2'] = dW2
grads['b1'] = db1
grads['b2'] = db2
#############################################################################
# END OF YOUR CODE #
#############################################################################
return loss, grads
def train(self, X, y, X_val, y_val,
learning_rate=1e-3, learning_rate_decay=0.95,
reg=1e-5, num_iters=100,
batch_size=200, verbose=False):
"""
Train this neural network using stochastic gradient descent.
Inputs:
- X: A numpy array of shape (N, D) giving training data.
- y: A numpy array f shape (N,) giving training labels; y[i] = c means that
X[i] has label c, where 0 <= c < C.
- X_val: A numpy array of shape (N_val, D) giving validation data.
- y_val: A numpy array of shape (N_val,) giving validation labels.
- learning_rate: Scalar giving learning rate for optimization.
- learning_rate_decay: Scalar giving factor used to decay the learning rate
after each epoch.
- reg: Scalar giving regularization strength.
- num_iters: Number of steps to take when optimizing.
- batch_size: Number of training examples to use per step.
- verbose: boolean; if true print progress during optimization.
"""
num_train = X.shape[0]
iterations_per_epoch = max(num_train / batch_size, 1)
# Use SGD to optimize the parameters in self.model
loss_history = []
train_acc_history = []
val_acc_history = []
for it in xrange(num_iters):
r_idxs = np.random.choice(num_train, batch_size, replace=True)
#########################################################################
# TODO: Create a random minibatch of training data and labels, storing #
# them in X_batch and y_batch respectively. #
#########################################################################
X_batch = X[r_idxs, :]
y_batch = y[r_idxs]
#########################################################################
# END OF YOUR CODE #
#########################################################################
# Compute loss and gradients using the current minibatch
loss, grads = self.loss(X_batch, y=y_batch, reg=reg)
loss_history.append(loss)
#########################################################################
# TODO: Use the gradients in the grads dictionary to update the #
# parameters of the network (stored in the dictionary self.params) #
# using stochastic gradient descent. You'll need to use the gradients #
# stored in the grads dictionary defined above. #
#########################################################################
self.params['W1'] = self.params['W1'] - learning_rate * grads['W1']
self.params['W2'] = self.params['W2'] - learning_rate * grads['W2']
self.params['b1'] = self.params['b1'] - learning_rate * grads['b1']
self.params['b2'] = self.params['b2'] - learning_rate * grads['b2']
#########################################################################
# END OF YOUR CODE #
#########################################################################
if verbose and it % 100 == 0:
print 'iteration %d / %d: loss %f' % (it, num_iters, loss)
# Every epoch, check train and val accuracy and decay learning rate.
if it % iterations_per_epoch == 0:
# Check accuracy
train_acc = (self.predict(X_batch) == y_batch).mean()
val_acc = (self.predict(X_val) == y_val).mean()
train_acc_history.append(train_acc)
val_acc_history.append(val_acc)
# Decay learning rate
learning_rate *= learning_rate_decay
return {
'loss_history': loss_history,
'train_acc_history': train_acc_history,
'val_acc_history': val_acc_history,
}
def predict(self, X):
"""
Use the trained weights of this two-layer network to predict labels for
data points. For each data point we predict scores for each of the C
classes, and assign each data point to the class with the highest score.
Inputs:
- X: A numpy array of shape (N, D) giving N D-dimensional data points to
classify.
Returns:
- y_pred: A numpy array of shape (N,) giving predicted labels for each of
the elements of X. For all i, y_pred[i] = c means that X[i] is predicted
to have class c, where 0 <= c < C.
"""
num_test = X.shape[0]
y_pred = None
H = self.params['W1'].shape[1]
W1, W2, b1, b2 = self.params['W1'], self.params['W2'], self.params['b1'], self.params['b2']
###########################################################################
# TODO: Implement this function; it should be VERY simple! #
###########################################################################
z1 = X.dot(W1) + b1
a1 = np.maximum(np.zeros((num_test, H)), z1)
z2 = a1.dot(W2) + b2
row_max = np.max(z2, axis=1).reshape(num_test, 1)
z2 = z2 - row_max
exp_z2 = np.exp(z2)
prob = exp_z2 / np.sum(exp_z2, axis=1, keepdims=True)
y_pred = np.argmax(prob, axis=1)
###########################################################################
# END OF YOUR CODE #
###########################################################################
return y_pred
|
[
"gspat27@gmail.com"
] |
gspat27@gmail.com
|
65ad5ba93a0830380d7d18ea1f7bc82e2d821bf9
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2526/60593/257150.py
|
58ca4c9267f800e30bf8438b260eda4e11fca89f
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 295
|
py
|
sa=input().replace('[','')
sb=input().replace('[','')
sb=sb.replace(']','')
b=list(sb.split(','))
sa=sa.replace(']','')
a=list(sa.split(','))
ans=[]
for i in a:
if(i!='null' and i!=''):
ans.append(i)
for i in b:
if(i!='null'and i!=''):
ans.append(i)
ans.sort()
print(ans)
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
8c79106855988b0ea3d04868c80e1f5dfc736257
|
29225f283c3c0cccf87ecc839ff0d369864e8ae3
|
/Quiz/p4.py
|
94d308c1ab1397ce7afeb6149d47aaadafdb6ada
|
[] |
no_license
|
Darmaiad/mit-602-computational-thinking
|
275059cb6aaab3dd31e9ca6c6de593fe936f9bbd
|
591a01583f1057d09d61de1f76f1d2040ada0978
|
refs/heads/master
| 2020-09-09T02:07:25.547970
| 2019-12-22T09:54:11
| 2019-12-22T09:54:11
| 221,312,480
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 609
|
py
|
def max_contig_sum(L):
""" L, a list of integers, at least one positive
Returns the maximum sum of a contiguous subsequence in L
"""
maxSum = -999999999
currentSum = 0
for l in L:
currentSum += l
if (maxSum < currentSum):
maxSum = currentSum
if currentSum < 0:
currentSum = 0
return maxSum
# in the list [3, 4, -1, 5, -4], the maximum sum is 3+4-1+5 = 11
print(max_contig_sum([3, 4, -1, 5, -4]))
# in the list [3, 4, -8, 15, -1, 2], the maximum sum is 15-1+2 = 16
print(max_contig_sum([3, 4, -8, 15, -1, 2]))
|
[
"geo.filippakis@gmail.com"
] |
geo.filippakis@gmail.com
|
6063fa9c544db5cf55f7ae1fa2e3d530b67afdd6
|
7e842bd81a28869ebbcc989f60df7b64f83be531
|
/Easy/Chevaux_de_course.py
|
6cb45ddaad5fbd643ca7d9688b49ea86ba572b2b
|
[
"MIT"
] |
permissive
|
Alumet/Codingame
|
06a39b5b8a5139bc38fc9db03dca65a66e54d447
|
5cb2f779bb7107864283744f7bbac260f4641af6
|
refs/heads/master
| 2021-01-13T00:50:22.607406
| 2017-11-28T19:02:09
| 2017-11-28T19:02:09
| 55,340,918
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 284
|
py
|
'''
Author Alumet 2015
https://github.com/Alumet/Codingame
'''
n = int(input())
power=[]
for i in range(n):
pi = int(input())
power.append(pi)
power.sort()
best=abs(power[1]-power[0])
for i in range(n-1):
best = min( best, abs(power[i]-power[i+1]))
print (best)
|
[
"Alumet@users.noreply.github.com"
] |
Alumet@users.noreply.github.com
|
f1cd1bef9505f22695630dcfe93ee310ffded7c1
|
fc3c1a6f9b4fb171b191f931a2bc26106058b897
|
/python/p149.py
|
617382ae2a10ff315286f42cd4810cab13a0706b
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
MvdB/Project-Euler-solutions
|
ff425f63f1b9fbb34de38037f8fbdc25c39b8f04
|
57b6d6ac7a6562ec3d76b83f74faf0d5d9e990cc
|
refs/heads/master
| 2021-01-16T19:48:24.907038
| 2016-02-26T19:53:56
| 2016-02-26T19:53:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,750
|
py
|
#
# Solution to Project Euler problem 149
# by Project Nayuki
#
# https://www.nayuki.io/page/project-euler-solutions
# https://github.com/nayuki/Project-Euler-solutions
#
def compute():
SIZE = 2000
# Generate the pseudorandom sequence according to the lagged Fibonacci generator
randseq = []
for i in range(SIZE**2):
k = i + 1
if k <= 55:
randseq.append((100003 - 200003*k + 300007*k*k*k) % 1000000 - 500000)
else:
randseq.append((randseq[-24] + randseq[-55]) % 1000000 - 500000)
# Reshape the sequence into into a 2D array
grid = [randseq[i * SIZE : (i + 1) * SIZE] for i in range(SIZE)]
# For the sequence of numbers in the grid at positions (x, y), (x+dx, y+dy), (x+2*dx, y+2*dy), ... until the
# last in-bounds indices, this function returns the maximum sum among all possible substrings of this sequence.
def get_max_substring_sum(x, y, dx, dy):
result = 0
current = 0
while 0 <= x < SIZE and 0 <= y < SIZE:
current = max(current + grid[y][x], 0) # Reset the running sum if it goes negative
result = max(current, result) # Keep track of the best seen running sum
x += dx
y += dy
return result
# Scan along all line directions and positions
maximum = 0
for i in range(SIZE):
maximum = max(maximum,
get_max_substring_sum(0, i, +1, 0), # Horizontal from left edge
get_max_substring_sum(i, 0, 0, +1), # Vertical from top edge
get_max_substring_sum(0, i, +1, +1), # Diagonal from left edge
get_max_substring_sum(i, 0, +1, +1), # Diagonal from top edge
get_max_substring_sum(i, 0, -1, +1), # Anti-diagonal from top edge
get_max_substring_sum(SIZE - 1, i, -1, +1)) # Anti-diagonal from right edge
return str(maximum)
if __name__ == "__main__":
print(compute())
|
[
"nayuki@eigenstate.org"
] |
nayuki@eigenstate.org
|
be15189d73c5ffd5055c724d0a430d4b2da75912
|
5a8dd4591bfe79df8d7d7ade144ca40f4f88d1ab
|
/alive-speed/alive/scheduler/urls.py
|
cccde518f66003b21e0909486b838b56d2d1642a
|
[] |
no_license
|
liudaihua/test_study
|
822e7891d024eaf60fdd6763d624353dfe2ef6a4
|
932fd634c02779a8171d64764b3f3611df831473
|
refs/heads/master
| 2020-06-19T02:53:10.671279
| 2019-07-12T08:26:12
| 2019-07-12T08:26:12
| 196,529,250
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 345
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('blocking', views.blocking, name='blocking'),
path('background', views.background, name='background'),
path('async_with_block', views.async_with_block, name='async_with_block'),
path('async_no_block', views.async_no_block, name='async_no_block'),
]
|
[
"noreply@github.com"
] |
liudaihua.noreply@github.com
|
296182028cf9ce3ce55c1ef80a55e7111f7d8a8f
|
8e24e8bba2dd476f9fe612226d24891ef81429b7
|
/geeksforgeeks/algorithm/hard_algo/6_11.py
|
7d522d31b34bcdff5ab37adea8e334f1a2a51be6
|
[] |
no_license
|
qmnguyenw/python_py4e
|
fb56c6dc91c49149031a11ca52c9037dc80d5dcf
|
84f37412bd43a3b357a17df9ff8811eba16bba6e
|
refs/heads/master
| 2023-06-01T07:58:13.996965
| 2021-06-15T08:39:26
| 2021-06-15T08:39:26
| 349,059,725
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,993
|
py
|
Find the longest subsequence of an array having LCM at most K
Given an array **arr[]** of **N** elements and a positive integer **K**. The
task is to find the longest sub-sequence in the array having LCM (Least Common
Multiple) at most **K**. Print the LCM and the length of the sub-sequence,
following the indexes (starting from 0) of the elements of the obtained sub-
sequence. Print **-1** if it is not possible to do so.
**Examples:**
> **Input:** arr[] = {2, 3, 4, 5}, K = 14
> **Output:**
> LCM = 12, Length = 3
> Indexes = 0 1 2
>
> **Input:** arr[] = {12, 33, 14, 52}, K = 4
> **Output:** -1
## Recommended: Please try your approach on **__{IDE}__** first, before moving
on to the solution.
**Approach:** Find all the unique elements of the array and their respective
frequencies. Now the highest LCM that you are supposed to get is **K**.
Suppose you have a number **X** such that **1 ≤ X ≤ K** , obtain all the
unique numbers from the array whom **X** is a multiple of and add their
frequencies to **numCount** of **X**. The answer will be the number with
highest **numCount** , let it be your LCM. Now, to obtain the indexes of the
numbers of the sub-sequence, start traversing the array from the beginning and
print the index **i** if **LCM % arr[i] = 0**.
Below is the implementation of the above approach:
## C++
__
__
__
__
__
__
__
// C++ implementation of the approach
#include <bits/stdc++.h>
using namespace std;
// Function to find the longest subsequence
// having LCM less than or equal to K
void findSubsequence(int* arr, int n, int k)
{
// Map to store unique elements
// and their frequencies
map<int, int> M;
// Update the frequencies
for (int i = 0; i < n; ++i)
++M[arr[i]];
// Array to store the count of numbers whom
// 1 <= X <= K is a multiple of
int* numCount = new int[k + 1];
for (int i = 0; i <= k; ++i)
numCount[i] = 0;
// Check every unique element
for (auto p : M) {
if (p.first <= k) {
// Find all its multiples <= K
for (int i = 1;; ++i) {
if (p.first * i > k)
break;
// Store its frequency
numCount[p.first * i] += p.second;
}
}
else
break;
}
int lcm = 0, length = 0;
// Obtain the number having maximum count
for (int i = 1; i <= k; ++i) {
if (numCount[i] > length) {
length = numCount[i];
lcm = i;
}
}
// Condition to check if answer
// doesn't exist
if (lcm == 0)
cout << -1 << endl;
else {
// Print the answer
cout << "LCM = " << lcm
<< ", Length = " << length << endl;
cout << "Indexes = ";
for (int i = 0; i < n; ++i)
if (lcm % arr[i] == 0)
cout << i << " ";
}
}
// Driver code
int main()
{
int k = 14;
int arr[] = { 2, 3, 4, 5 };
int n = sizeof(arr) / sizeof(arr[0]);
findSubsequence(arr, n, k);
return 0;
}
---
__
__
## Java
__
__
__
__
__
__
__
// Java implementation of the approach
import java.util.*;
class GFG
{
// Function to find the longest subsequence
// having LCM less than or equal to K
static void findSubsequence(int []arr, int n, int k)
{
// Map to store unique elements
// and their frequencies
HashMap<Integer, Integer> M = new HashMap<Integer,Integer>();
// Update the frequencies
for (int i = 0; i < n; ++i)
{
if(M.containsKey(arr[i]))
M.put(arr[i], M.get(arr[i])+1);
else
M.put(arr[i], 1);
}
// Array to store the count of numbers whom
// 1 <= X <= K is a multiple of
int [] numCount = new int[k + 1];
for (int i = 0; i <= k; ++i)
numCount[i] = 0;
Iterator<HashMap.Entry<Integer, Integer>> itr = M.entrySet().iterator();
// Check every unique element
while(itr.hasNext())
{
HashMap.Entry<Integer, Integer> entry = itr.next();
if (entry.getKey() <= k)
{
// Find all its multiples <= K
for (int i = 1;; ++i)
{
if (entry.getKey() * i > k)
break;
// Store its frequency
numCount[entry.getKey() * i] += entry.getValue();
}
}
else
break;
}
int lcm = 0, length = 0;
// Obtain the number having maximum count
for (int i = 1; i <= k; ++i)
{
if (numCount[i] > length)
{
length = numCount[i];
lcm = i;
}
}
// Condition to check if answer
// doesn't exist
if (lcm == 0)
System.out.println(-1);
else
{
// Print the answer
System.out.println("LCM = " + lcm
+ " Length = " + length );
System.out.print( "Indexes = ");
for (int i = 0; i < n; ++i)
if (lcm % arr[i] == 0)
System.out.print(i + " ");
}
}
// Driver code
public static void main (String[] args)
{
int k = 14;
int arr[] = { 2, 3, 4, 5 };
int n = arr.length;
findSubsequence(arr, n, k);
}
}
// This code is contributed by ihritik
---
__
__
## Python3
__
__
__
__
__
__
__
# Python3 implementation of the approach
from collections import defaultdict
# Function to find the longest subsequence
# having LCM less than or equal to K
def findSubsequence(arr, n, k):
# Map to store unique elements
# and their frequencies
M = defaultdict(lambda:0)
# Update the frequencies
for i in range(0, n):
M[arr[i]] += 1
# Array to store the count of numbers
# whom 1 <= X <= K is a multiple of
numCount = [0] * (k + 1)
# Check every unique element
for p in M:
if p <= k:
# Find all its multiples <= K
i = 1
while p * i <= k:
# Store its frequency
numCount[p * i] += M[p]
i += 1
else:
break
lcm, length = 0, 0
# Obtain the number having maximum count
for i in range(1, k + 1):
if numCount[i] > length:
length = numCount[i]
lcm = i
# Condition to check if answer doesn't exist
if lcm == 0:
print(-1)
else:
# Print the answer
print("LCM = {0}, Length = {1}".format(lcm, length))
print("Indexes = ", end = "")
for i in range(0, n):
if lcm % arr[i] == 0:
print(i, end = " ")
# Driver code
if __name__ == "__main__":
k = 14
arr = [2, 3, 4, 5]
n = len(arr)
findSubsequence(arr, n, k)
# This code is contributed by Rituraj Jain
---
__
__
## C#
__
__
__
__
__
__
__
// C# implementation of the approach
using System;
using System.Collections.Generic;
class GFG
{
// Function to find the longest subsequence
// having LCM less than or equal to K
static void findSubsequence(int []arr, int n, int k)
{
// Map to store unique elements
// and their frequencies
Dictionary<int, int> M = new Dictionary<int, int>();
// Update the frequencies
for (int i = 0; i < n; ++i)
{
if(M.ContainsKey(arr[i]))
M[arr[i]]++;
else
M[arr[i]] = 1;
}
// Array to store the count of numbers whom
// 1 <= X <= K is a multiple of
int [] numCount = new int[k + 1];
for (int i = 0; i <= k; ++i)
numCount[i] = 0;
Dictionary<int, int>.KeyCollection keyColl = M.Keys;
// Check every unique element
foreach(int key in keyColl)
{
if ( key <= k)
{
// Find all its multiples <= K
for (int i = 1;; ++i)
{
if (key * i > k)
break;
// Store its frequency
numCount[key * i] += M[key];
}
}
else
break;
}
int lcm = 0, length = 0;
// Obtain the number having maximum count
for (int i = 1; i <= k; ++i)
{
if (numCount[i] > length)
{
length = numCount[i];
lcm = i;
}
}
// Condition to check if answer
// doesn't exist
if (lcm == 0)
Console.WriteLine(-1);
else
{
// Print the answer
Console.WriteLine("LCM = " + lcm
+ " Length = " + length );
Console.Write( "Indexes = ");
for (int i = 0; i < n; ++i)
if (lcm % arr[i] == 0)
Console.Write(i + " ");
}
}
// Driver code
public static void Main ()
{
int k = 14;
int []arr = { 2, 3, 4, 5 };
int n = arr.Length;
findSubsequence(arr, n, k);
}
}
// This code is contributed by ihritik
---
__
__
## PHP
__
__
__
__
__
__
__
<?php
// PHP implementation of the approach
// Function to find the longest subsequence
// having LCM less than or equal to K
function findSubsequence($arr, $n, $k)
{
// Map to store unique elements
// and their frequencies
$M = array();
for($i = 0; $i < $n; $i++)
$M[$arr[$i]] = 0 ;
// Update the frequencies
for ($i = 0; $i < $n; ++$i)
++$M[$arr[$i]];
// Array to store the count of numbers
// whom 1 <= X <= K is a multiple of
$numCount = array();
for ($i = 0; $i <= $k; ++$i)
$numCount[$i] = 0;
// Check every unique element
foreach($M as $key => $value)
{
if ($key <= $k)
{
// Find all its multiples <= K
for ($i = 1;; ++$i)
{
if ($key * $i > $k)
break;
// Store its frequency
$numCount[$key * $i] += $value;
}
}
else
break;
}
$lcm = 0; $length = 0;
// Obtain the number having
// maximum count
for ($i = 1; $i <= $k; ++$i)
{
if ($numCount[$i] > $length)
{
$length = $numCount[$i];
$lcm = $i;
}
}
// Condition to check if answer
// doesn't exist
if ($lcm == 0)
echo -1 << "\n";
else
{
// Print the answer
echo "LCM = ", $lcm,
", Length = ", $length, "\n";
echo "Indexes = ";
for ($i = 0; $i < $n; ++$i)
if ($lcm % $arr[$i] == 0)
echo $i, " ";
}
}
// Driver code
$k = 14;
$arr = array( 2, 3, 4, 5 );
$n = count($arr);
findSubsequence($arr, $n, $k);
// This code is contributed by Ryuga
?>
---
__
__
**Output:**
LCM = 12, Length = 3
Indexes = 0 1 2
Attention reader! Don’t stop learning now. Get hold of all the important DSA
concepts with the **DSA Self Paced Course** at a student-friendly price and
become industry ready. To complete your preparation from learning a language
to DS Algo and many more, please refer **Complete Interview Preparation
Course** **.**
My Personal Notes _arrow_drop_up_
Save
|
[
"qmnguyenw@gmail.com"
] |
qmnguyenw@gmail.com
|
b8f94e051f25e62ea1ad122dd583ece74db54721
|
acb8e84e3b9c987fcab341f799f41d5a5ec4d587
|
/langs/7/r--.py
|
8991aaddf2c0f342dcddb879f1d827ed3f50ea3a
|
[] |
no_license
|
G4te-Keep3r/HowdyHackers
|
46bfad63eafe5ac515da363e1c75fa6f4b9bca32
|
fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2
|
refs/heads/master
| 2020-08-01T12:08:10.782018
| 2016-11-13T20:45:50
| 2016-11-13T20:45:50
| 73,624,224
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 486
|
py
|
import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'r--':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1])
|
[
"juliettaylorswift@gmail.com"
] |
juliettaylorswift@gmail.com
|
1510aedb16c9dbb1d1655296c75c68faa736a48d
|
ebfc05a0e3c771aebe4045f9bb945926e3fbeaf5
|
/demo/admin.py
|
2b8e9727a99fb3f2bb962c17361a4533366d09ca
|
[] |
no_license
|
nurgalix/Books
|
d26dc5e2fd272e891fe1b82d95ac2a2cce9263fc
|
ab85de0752249a1fb1ebb1f3307d215c7802a2e4
|
refs/heads/master
| 2020-11-27T11:30:09.840435
| 2019-12-21T12:03:10
| 2019-12-21T12:03:10
| 229,421,459
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 341
|
py
|
from django.contrib import admin
from .models import Book, BookNumber, Character, Author
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ['title', 'price']
list_filter = ['published']
search_fields = ['title']
admin.site.register(BookNumber)
admin.site.register(Character)
admin.site.register(Author)
|
[
"nurali.nurgali@gmail.com"
] |
nurali.nurgali@gmail.com
|
4f1f5c574cdad49cfca516d1a275383a77f66f53
|
7b4d8fb23036711f24dd264a0ffe36418d19316f
|
/training/ride/ride.py
|
278c0c358d98341e098d452d905f5d10d1e3048b
|
[] |
no_license
|
warmar/USACO-Problems
|
51a56d0a4aa7d8788482140ab17fc8a1c3464473
|
2c6b38283397531b27e7ecbd743f6c4de83cbfc0
|
refs/heads/master
| 2021-08-31T21:04:36.492962
| 2017-12-22T22:51:03
| 2017-12-22T22:51:03
| 115,082,081
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 564
|
py
|
"""
ID: warwick2
LANG: PYTHON2
TASK: ride
"""
fin = open ('ride.in', 'r')
fout = open ('ride.out', 'w')
name,group=fin.read().split('\n')[:-1]
letters='abcdefghijklmnopqrstuvwxyz'
dic = {}
for i, letter in zip(range(len(letters)), letters):
dic[letter] = i+1
name_product = 1
for letter in name:
name_product *= dic[letter.lower()]
name_mod = name_product % 47
group_product = 1
for letter in group:
group_product *= dic[letter.lower()]
group_mod = group_product % 47
if name_mod == group_mod:
fout.write('GO\n')
else:
fout.write('STAY\n')
fout.close()
|
[
"lifelessmango@gmail.com"
] |
lifelessmango@gmail.com
|
42de44b094f3783113cee47b78a3c41af6da842c
|
6809e06e21895e3366b37b0dff6c0521a093c909
|
/Labs/Lab1/problem1.py
|
cc3942b447f4311f7e2580dc6b5b20c49480ca60
|
[] |
no_license
|
arcPenguinj/CS5001-Intensive-Foundations-of-CS
|
1820b3d919fd96239d2e04bc0c000ff9e36cadd3
|
b9281f5f959e0268b75baa2c2b1262712da3780f
|
refs/heads/main
| 2023-08-15T04:11:08.968276
| 2021-10-18T02:54:46
| 2021-10-18T02:54:46
| 418,321,674
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 400
|
py
|
def main ():
total_amount = float (input ("what is the total amount of your bill?"))
people = int (input ("how many people will split the bill?"))
tip = float (input ("the percentage everyone willing to tip?"))
amount = (total_amount / people) * tip + (total_amount / people)
print ("the number is:" + str(amount))
#....
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
arcPenguinj.noreply@github.com
|
22acb203173e23deb4efa6558b9c41228c38119f
|
7da0a001437ed2785da82cb6dbd2c459b7085295
|
/python_textmining/0413118_3.py
|
00d84bf0b38c68035cc4757be277bfcc86d2591d
|
[] |
no_license
|
d86518/Python-Machine-Learning
|
dbd29dab948652f96bbae33f3120a0df67f282c8
|
02ee74e5228bba0cfb4b77204dea5f97b66de9e0
|
refs/heads/master
| 2021-10-10T00:34:47.602813
| 2019-01-05T04:39:45
| 2019-01-05T04:39:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,889
|
py
|
from nltk.tokenize import word_tokenize
import nltk
from collections import Counter
import string
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer('english')
from nltk.corpus import stopwords
nltk.download('stopwords')
stopword_list = stopwords.words('english')
from nltk.stem import WordNetLemmatizer
wordnet_lemmatizer = WordNetLemmatizer()
def remove_punc(tokens):
clean_tokens=[]
for tok in tokens:
if tok not in string.punctuation:
if tok!="''" and tok!='``' and tok!= "'s":
clean_tokens.append(tok)
return clean_tokens
def remove_stopwords(tokens):
tokens_clean=[]
for tok in tokens:
if tok not in stopword_list:
tokens_clean.append(tok)
return tokens_clean
#unnecessary
def lowercase(tokens):
tokens_lower = []
for tok in tokens:
tokens_lower.append(tok.lower())
return tokens_lower
def lemmatize(token):
for p in ['v','n','a','r','s']:
l = wordnet_lemmatizer.lemmatize(token,pos=p)
if l!=token:
return l
return token
f=open("corpus.txt",'r')
lines=f.read()
tokens = word_tokenize(lines)
##token original
token = word_tokenize(lines)
tokens = remove_stopwords(remove_punc(tokens))
#各單字數量
word_count = Counter(tokens)
#Counting Collocations with dist
window_size = 9
word_pair_counts = Counter()
word_pair_dist_counts = Counter()
tokens = nltk.pos_tag(tokens)
#mutual_information
import math
def mutual_information(w1_w2_prob, w1_prob, w2_prob):
return math.log2(w1_w2_prob / (w1_prob * w2_prob))
## N/N MI
for i in range(len(tokens)-1):
for dist in range(1,window_size):
if i+dist<len(tokens):
if tokens[i][1]=='NN':
w1 = tokens[i][0]
if tokens[i+dist][1]=='NN':
w2 = tokens[i+dist][0]
word_pair_dist_counts[(w1,w2,dist)]+=1
word_pair_counts[(w1,w2)]+=1
print("--------N / N --------")
for (w1,w2),c in word_pair_counts.most_common(40):
w1_prob = Counter(token)[w1]/len(token)
w2_prob = Counter(token)[w2]/len(token)
w1_w2_prob = c
mutual = mutual_information(w1_w2_prob, w1_prob, w2_prob)
print("%s\t%s\t%s" % (w1,w2,mutual))
word_pair_counts = Counter()
word_pair_dist_counts = Counter()
## N/N MI
for i in range(len(tokens)-1):
for dist in range(1,window_size):
if i+dist<len(tokens):
if tokens[i][1]=='NNP':
w1 = tokens[i][0]
if tokens[i+dist][1]=='NN':
w2 = tokens[i+dist][0]
word_pair_dist_counts[(w1,w2,dist)]+=1
word_pair_counts[(w1,w2)]+=1
print("--------NNP / N --------")
for (w1,w2),c in word_pair_counts.most_common(40):
w1_prob = Counter(token)[w1]/len(token)
w2_prob = Counter(token)[w2]/len(token)
w1_w2_prob = c
mutual = mutual_information(w1_w2_prob, w1_prob, w2_prob)
print("%s\t%s\t%s" % (w1,w2,mutual))
word_pair_counts = Counter()
word_pair_dist_counts = Counter()
## N/N MI
for i in range(len(tokens)-1):
for dist in range(1,window_size):
if i+dist<len(tokens):
if tokens[i][1]=='JJ':
w1 = tokens[i][0]
if tokens[i+dist][1]=='NN':
w2 = tokens[i+dist][0]
word_pair_dist_counts[(w1,w2,dist)]+=1
word_pair_counts[(w1,w2)]+=1
print("-------- J / N --------")
for (w1,w2),c in word_pair_counts.most_common(40):
w1_prob = Counter(token)[w1]/len(token)
w2_prob = Counter(token)[w2]/len(token)
w1_w2_prob = c
mutual = mutual_information(w1_w2_prob, w1_prob, w2_prob)
print("%s\t%s\t%s" % (w1,w2,mutual))
|
[
"noreply@github.com"
] |
d86518.noreply@github.com
|
0532268807bfa568f5591eacc7e1455a6d0c6bdd
|
39d7337092314bd014ccb33062c37db4e329052d
|
/ex17.py
|
d6bc2489fd545cc0fde32e2a4c66c57e5508c461
|
[] |
no_license
|
JingGH/Hello-Python3
|
bf34adde15cc13f171f80e2e66e3ea49b6d990b8
|
9b9b93b1dcfddca19cd452e3abf05236e077aac8
|
refs/heads/master
| 2020-04-27T17:51:13.169607
| 2019-03-14T14:12:25
| 2019-03-14T14:12:25
| 174,533,835
| 0
| 0
| null | 2019-03-08T12:48:41
| 2019-03-08T12:26:52
| null |
UTF-8
|
Python
| false
| false
| 2,004
|
py
|
<<<<<<< HEAD
from sys import argv
from os.path import exists
# tips:文件编码应该为utf-8
# echo "This is a test file." > test.txt 在win下不可用
script, from_file, to_file = argv
print(f"Copying from {from_file} to {to_file}")
# we could do these two on one line, how?
# indata = open(from_file).read()
in_file = open(from_file)
print(">>>>in_file = ", repr(in_file))
indata = in_file.read()
# 打印indata字符数
print(f"The input file is {len(indata)} bytes long")
# exists判断文件和文件夹是否存在
print(f"Does the output file exist? {exists(to_file)}")
print("Ready, hit RETURN to continue, CTRL-C to abort.")
input()
# 以写模式 打开copied.txt
# w代表写模式打开文件
# r代表读模式打开文件
# wr代表读写模式打开文件
# w+代表读写模式打开文件
# r+代表读写模式打开文件
# a+代表读写模式打开文件
out_file = open(to_file, 'w')
out_file.write(indata)
print("Alright, all done.")
out_file.close()
=======
from sys import argv
from os.path import exists
# tips:文件编码应该为utf-8
# echo "This is a test file." > test.txt 在win下不可用
script, from_file, to_file = argv
print(f"Copying from {from_file} to {to_file}")
# we could do these two on one line, how?
# indata = open(from_file).read()
in_file = open(from_file)
indata = in_file.read()
# 打印indata字符数
print(f"The input file is {len(indata)} bytes long")
# exists判断文件和文件夹是否存在
print(f"Does the output file exist? {exists(to_file)}")
print("Ready, hit RETURN to continue, CTRL-C to abort.")
input()
# 以写模式 打开copied.txt
# w代表写模式打开文件
# r代表读模式打开文件
# wr代表读写模式打开文件
# w+代表读写模式打开文件
# r+代表读写模式打开文件
# a+代表读写模式打开文件
out_file = open(to_file, 'w')
out_file.write(indata)
print("Alright, all done.")
out_file.close()
>>>>>>> 4041cac322ac87664772dfca932f459134bcc71e
in_file.close()
|
[
"zhangjing@njfu.edu.cn"
] |
zhangjing@njfu.edu.cn
|
760a735560b67a2e77ef1be059169f0c3485441b
|
2af4732248c3e8513537bf379a02abf9980ce0ee
|
/test/tools/myrequests.py
|
acb64aaf2741f5373fd5aac8bc87120f2214d532
|
[] |
no_license
|
weizhimeng/biyesheji
|
2fd18346288d339e4106e3e2b57bdadcd80d1e11
|
1126eea12a239d0e4f586f5316a8d5cbb1e399f4
|
refs/heads/master
| 2021-07-11T00:50:45.798562
| 2020-08-14T10:45:25
| 2020-08-14T10:45:25
| 180,568,991
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,968
|
py
|
# -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '.')
import urllib3
from tools.user_agent import UserAgent
import requests
import lxml.html
etree = lxml.html.etree
ua = UserAgent()
Headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'User-Agent': ua.random(),
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1'
}
urllib3.disable_warnings()
class myrequests():
def get(self, url, params=None, **kwargs):
"""Send a GET request
:param url: URL for get
:param params: (optional) Dictionary, list of tuples or bytes to send
:param kwargs: Optional arguments tha ``requests`` takes
:return: Response
"""
kwargs.setdefault('timeout', 15)
kwargs.setdefault('verify', False)
kwargs.setdefault('headers', Headers)
kwargs.setdefault('allow_redirects', True)
with requests.Session() as session:
response = session.get(url, params=params, **kwargs)
response.xpath = etree.HTML(response.text).xpath
return response
def post(self, url, data=None, json=None, **kwargs):
"""Send a POST request
:param url: URL for post
:param data: (optional) Dictionary, list of tuples,
bytes, or file-like to send
:param json: (optional) json data to send
:param kwargs: Optional arguments tha ``requests`` takes
:return: Response
"""
kwargs.setdefault('timeout', 15)
kwargs.setdefault('verify', False)
kwargs.setdefault('headers', Headers)
kwargs.setdefault('allow_redirects', True)
with requests.Session() as session:
response = session.post(url, data=data, json=json, **kwargs)
response.xpath = etree.HTML(response.text).xpath
return response
_requests = myrequests()
|
[
"noreply@github.com"
] |
weizhimeng.noreply@github.com
|
180b782b318217a45b288ec1bf6b7ce903cc0ff7
|
c92b71615106f7e85ca30d5f55d10222b86164f3
|
/old/archive_images.py
|
457f07bc51af48bea7dfa303eaedff065499e81c
|
[
"MIT"
] |
permissive
|
damienallen/growcam
|
dce071c52c21ea905aca9846f1e2bff839bc20e2
|
81565090a50b755fec059be511e2f197f057b7d4
|
refs/heads/master
| 2018-12-20T18:22:06.113935
| 2018-09-22T18:52:24
| 2018-09-22T18:52:24
| 55,100,278
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,236
|
py
|
import os
import logging
from shutil import move
# set up logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# create logging file handler
handler = logging.FileHandler('growcam_archive.log')
handler.setLevel(logging.INFO)
# set logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# add handler to logger
logger.addHandler(handler)
# set windows dropbox directory and get file list
tmp_dir = 'U:\\Dropbox\\Make\\Growcam\\tmp\\'
archive_dir = 'Y:\\Growcam\\'
logger.debug('Temp directory: %s', tmp_dir)
logger.debug('Archive directory: %s', archive_dir)
image_files = os.listdir(tmp_dir)
logger.debug('%s image(s) found in temp directory.', len(image_files))
if not image_files:
logger.info('No new images found.')
# move files if they exist
else:
logger.info('Moving %s image(s) to archive.', len(image_files))
for i in image_files:
try:
move(tmp_dir + i, archive_dir + i)
logger.debug('Moved %s sucessfully.', i)
except:
logger.error('Unexpected error', exc_info=True)
logger.debug('Archiving completed.')
|
[
"contact@dallen.co"
] |
contact@dallen.co
|
af06b7653f12b422e7151eb5b49d2339f48d2cef
|
b7fe2fc9a037a97c2064d99e73d949b8e7741287
|
/week1/lab06_beautifulsoup4.py
|
bbb1206c9cf4b8c1e0d65241172e072dec3ec0d7
|
[] |
no_license
|
andrewintw/learning-python-web-crawler
|
f56f988e6046e65c580325df0e43a6d998ca0e14
|
6ab7b08d6b62349ae65e37b4ff123795ff311556
|
refs/heads/main
| 2023-06-30T11:50:30.429013
| 2021-08-01T15:51:40
| 2021-08-01T15:51:40
| 390,589,066
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,068
|
py
|
import bs4
import cloudscraper
import time
url = 'https://www.mobile01.com/forumtopic.php?c=37&s=56'
scraper = cloudscraper.create_scraper()
found = False
while not found:
response = scraper.get(url)
soup = bs4.BeautifulSoup(response.text, 'lxml')
tr_lines = soup.select('div.l-listTable__tbody div.l-listTable__tr')
if len(tr_lines) == 30:
found = True
else:
print('.')
time.sleep(5)
continue
for row in tr_lines:
print('===============================')
#print(row)
link_a = row.select_one('a')
#print(link_a['href'], link_a.text)
url = link_a['href']
title = link_a.text
publish_set = row.select_one('.l-listTable__td.l-listTable__td--time')
publish_username = publish_set.select_one('a').text
publish_time = publish_set.select_one('.o-fNotes').text
publish_count = row.select_one('.o-fMini').text
print(url, title, publish_username, publish_time, publish_count)
|
[
"andrew.lin@browan.com"
] |
andrew.lin@browan.com
|
8783a72dddde63b8fc84ec012474bba9cd508d65
|
7e3358791ccfa5f2f49fbe550ee0e06ce0dd067f
|
/list-comprehension.py
|
f21a9be9a9ac165927544c30d26ac1181ad453a9
|
[] |
no_license
|
Sasmita-Coder/HackerRank
|
af83c182e5a946d130eeb7d9bbd237510a52e2a4
|
2481baa17a6761210a6d2388eca67dc8185c3bf2
|
refs/heads/main
| 2023-06-02T10:17:07.772988
| 2021-06-21T16:42:20
| 2021-06-21T16:42:20
| 378,996,066
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 250
|
py
|
if __name__ == '__main__':
x = int(raw_input())
y = int(raw_input())
z = int(raw_input())
n = int(raw_input())
ans = [[i,j,k] for i in range(x + 1) for j in range(y + 1) for k in range(z + 1) if i + j + k != n]
print ans
|
[
"noreply@github.com"
] |
Sasmita-Coder.noreply@github.com
|
93b908eaf2d6182a8abaa2dc58a07a235f7a0dc2
|
08352d51ff1904a963b4e7f1ef598ae6c3854634
|
/datasets/samplers.py
|
42161bccb6f8ae04227ba403a7cc7476009da648
|
[] |
no_license
|
HELL-TO-HEAVEN/prcv2019-mvb-renet
|
ec9cd69d8ef02eb354363362fcba023403c01850
|
6911c89980fb0bdeb516b93a502fc201860363c0
|
refs/heads/master
| 2022-02-24T22:53:46.759942
| 2019-11-05T08:31:09
| 2019-11-05T08:31:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,066
|
py
|
from __future__ import absolute_import
from torch.utils.data.sampler import Sampler
from collections import defaultdict
import numpy as np
import torch
import random
class RandomIdentitySampler(Sampler):
def __init__(self, data_source, num_instances=4):
self.data_source = data_source
self.num_instances = num_instances
self.index_dic = defaultdict(list)
for index, (_, bagid, _, ) in enumerate(data_source):
self.index_dic[bagid].append(index)
self.bagids = list(self.index_dic.keys())
self.num_identities = len(self.bagids)
def __iter__(self):
indices = torch.randperm(self.num_identities)
ret = []
for i in indices:
bagid = self.bagids[i]
t = self.index_dic[bagid]
replace = False if len(t) >= self.num_instances else True
t = np.random.choice(t, size=self.num_instances, replace=replace)
ret.extend(t)
return iter(ret)
def __len__(self):
return self.num_identities * self.num_instances
|
[
"wuh199512@gmail.com"
] |
wuh199512@gmail.com
|
3767aeadb5576307bff507246af46f7f149d214b
|
458b1133df5b38a017f3a690a624a54f0f43fda7
|
/PaperExperiments/XHExp210/parameters.py
|
0e79814e843c3bfd43606772163f13a544dbf5b7
|
[
"MIT"
] |
permissive
|
stefan-c-kremer/TE_World2
|
9c7eca30ee6200d371183c5ba32b3345a4cc04ee
|
8e1fae218af8a1eabae776deecac62192c22e0ca
|
refs/heads/master
| 2020-12-18T14:31:00.639003
| 2020-02-04T15:55:49
| 2020-02-04T15:55:49
| 235,413,951
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,709
|
py
|
# parameters.py
"""
Exp 210 - {'Initial_genes': '500', 'Host_mutation_rate': '0.03', 'TE_progeny': '0.15, 0, 0.55, 1, 0.30, 2', 'TE_Insertion_Distribution': 'Triangle( pmax=0, pzero=3.0/3.0 )', 'Carrying_capacity': '300', 'TE_excision_rate': '0.1', 'Junk_BP': '14', 'Gene_Insertion_Distribution': 'Triangle( pzero=1.0/3.0, pmax=1 )', 'mutation_effect': '0.01', 'TE_death_rate': '0.005'}
"""
from TEUtil import *;
# note that "#" indicates a comment
# set the following to True if you want messages printed to the screen
# while the program runs - search for these keywords in TESim.py to see
# what each one prints out
output = {
"SPLAT": False,
"SPLAT FITNESS": False,
"INITIALIZATION": False,
"GENERATION": True,
"HOST EXTINCTION": True,
"TE EXTINCTION": True,
"TRIAL NO": True,
"GENE INIT": False,
"TE INIT": False,
};
TE_Insertion_Distribution = Triangle( pmax=0, pzero=3.0/3.0 );
Gene_Insertion_Distribution = Triangle( pzero=1.0/3.0, pmax=1 );
# Triangle( pmax, pzero ) generates values between pmax and pzero with
# a triangular probability distribution, where pmax is the point of highest
# probability, and pzero is the point of lowest probability
# - you can change the orientation of the triangle by reversing the values
# of pmax and pzero
# Flat() generates values between 0 and 1 with uniform probability
Gene_length = 1000; # use 1000?
TE_length = 1000; # use 1000?
TE_death_rate = 0.005;
TE_excision_rate = 0.1; # set this to zero for retro transposons
# for retro transposons this is the probability of the given number of progeny
# for dna transposons this is the probability of the given number of progeny
# ___PLUS___ the original re-inserting
TE_progeny = ProbabilityTable( 0.15, 0, 0.55, 1, 0.30, 2 );
Initial_genes = 500;
Append_gene = True; # True: when the intialization routine tries to place
# a gene inside another gene, it instead appends it
# at the end of the original gene (use this with small
# amounts of Junk_BP).
# False: when the intialization routine tries to place
# a gene inside another gene, try to place it somewhere
# else again (don't use theis option with samll amounts
# of Junk_BP).
Initial_TEs = 1;
MILLION = 1000000;
Junk_BP = 14 * MILLION;
Host_start_fitness = 1.0;
Host_mutation_rate = 0.03;
Host_mutation = ProbabilityTable( 0.40, lambda fit: 0.0,
0.30, lambda fit: fit - random.random()*0.01,
0.15, lambda fit: fit,
0.15, lambda fit: fit + random.random()*0.01
);
# what happens when a TA hits a gene
Insertion_effect = ProbabilityTable(0.30, lambda fit: 0.0,
0.20, lambda fit: fit - random.random()*0.01,
0.30, lambda fit: fit,
0.20, lambda fit: fit + random.random()*0.01
);
Carrying_capacity = 300;
Host_reproduction_rate = 1; # how many offspring each host has
Host_survival_rate = lambda propfit: min( Carrying_capacity * propfit, 0.95 );
# propfit = proportion of fitness owned by this individual
Maximum_generations = 1500;
Terminate_no_TEs = True; # end simulation if there are no TEs left
# seed = 0;
seed = None; # if seed = None, the random number generator's initial state is
# set "randomly"
save_frequency = 50; # Frequency with with which to save state of experiment
saved = None; # if saved = None then we start a new simulation from scratch
# if saves = string, then we open that file and resume a simulation
|
[
"stefan@kremer.ca"
] |
stefan@kremer.ca
|
33c52dc3df5d875d5aad941584e61dbcefb846f6
|
70730512e2643833e546e68761ee6cd3d7b95e1d
|
/01-python基础/code/day15/demo02.py
|
43e4cf71d1e4e010b2fcfc16cee26cf9c9d9203e
|
[] |
no_license
|
Yuchen1995-0315/review
|
7f0b0403aea2da62566642c6797a98a0485811d1
|
502859fe11686cc59d2a6d5cc77193469997fe6a
|
refs/heads/master
| 2020-08-26T23:16:33.193952
| 2019-10-24T00:30:32
| 2019-10-24T00:30:32
| 217,177,822
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,465
|
py
|
"""
异常处理
练习:exercise03
练习:信息管理系统
练习:购物车 shopping__oo.py
"""
def div_apple(apple_count):
"""
分苹果
"""
person_count = int(input("请输入人数:")) # ValueError
result = apple_count / person_count # ZeroDivisionError
print("每人%d个苹果" % result)
# 处理目的:让异常(错误)流程 转换为 正常流程
""" 1. 统一处理所有异常
try:
# 可能出错的代码
div_apple(10)
# except Exception:# 可以拦截所有错误(异常)
except:
print("程序出错啦")
print("后续逻辑")
"""
""" 2. 分门别类的处理各种异常(官方更建议)
try:
# 可能出错的代码
div_apple(10)
except ValueError:
print("输入的不是整数,所以错误啦.")
except ZeroDivisionError:
print("输入的是零,所以错误啦.")
print("后续逻辑")
"""
""" 可以处理错误执行逻辑,也可以处理没出错的执行逻辑
try:
# 可能出错的代码
div_apple(10)
except ValueError:
print("输入的不是整数,所以错误啦.")
except ZeroDivisionError:
print("输入的是零,所以错误啦.")
else:
print("没出错执行的逻辑")
print("后续逻辑")
"""
try:
# 可能出错的代码
div_apple(10)
finally:
# 如果出错了,虽然我解决不了,但有个事我必须做.
print("管你错不错呢,一定做!")
print("后续逻辑")
|
[
"2456830920@qq.com"
] |
2456830920@qq.com
|
e924375359f54ede1522b3ccc187df226dc396db
|
41e28669a7ec08778ebb8b80222bd3643515f03b
|
/src/patternfly/patternfly.py
|
af4e20eb47c486bee4f322a448a6ac2c1e412c63
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Rintsi/django-patternfly
|
92a94d0fc180c420a62c4643ea5231f3ce07dd29
|
78a443524c08452c58548641d003da3520b19b2f
|
refs/heads/master
| 2023-03-01T22:19:33.306125
| 2021-01-18T22:39:30
| 2021-01-18T22:39:30
| 330,493,588
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,679
|
py
|
from importlib import import_module
from django.conf import settings
PATTERNFLY_DEFAULTS = {
"css_url": {
"href": "https://unpkg.com/@patternfly/patternfly@4.70.2/patternfly.min.css",
"integrity": "sha512-bWjWdYITpRYUxiU5mbATJFGaSyto6l6uH4PM5NBxampYLIcLgZX14nk5h/GE6dchDNsB+VAPyMojw4YCtX9qow==",
"crossorigin": "anonymous",
},
"css_additions_url": {
"href": "https://unpkg.com/@patternfly/patternfly@4.70.2/patternfly-addons.css",
"integrity": "sha512-/ro7O/bI1XeUpeB7asSaO9HPv6WBcYRptbvXfgRSoEZXR7aUiy28I7fPRm6gYrlujT6sHO3tDr+rKPuqswAgpA==",
"crossorigin": "anonymous",
},
"javascript_url": {
"url": "https://cdnjs.cloudflare.com/ajax/libs/patternfly/4.0.0-rc.1/js/patternfly.min.js",
"integrity": "sha512-6EtzFp0bsGbfrLipEVta4ZaVZioYzJPZidyoGUO3EGy0cI7n7CSKhfJJIvDFWl0ma5p6rT4FdGULk3SYpYgmyQ==",
"crossorigin": "anonymous",
},
"theme_url": None,
"jquery_url": {
"url": "https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.min.js",
"integrity": "sha512-bLT0Qm9VnAYZDflyKcBaQ2gg0hSYNQrJ8RilYldYQ1FxQYoCLtUjuuRuZo+fjqhx/qtq/1itJ0C2ejDxltZVFg==",
"crossorigin": "anonymous",
},
"jquery_slim_url": {
"url": "https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.slim.min.js",
"integrity": "sha512-/DXTXr6nQodMUiq+IUJYCt2PPOUjrHJ9wFrqpJ3XkgPNOZVfMok7cRw6CSxyCQxXn6ozlESsSh1/sMCTF1rL/g==",
"crossorigin": "anonymous",
},
"javascript_in_head": False,
"include_jquery": False,
"use_i18n": False
}
def get_patternfly_setting(name, default=None):
"""Read a setting."""
# Start with a copy of default settings
PATTERNFLY = PATTERNFLY_DEFAULTS.copy()
# Override with user settings from settings.py
PATTERNFLY.update(getattr(settings, "PATTERNFLY", {}))
# Update use_i18n
PATTERNFLY["use_i18n"] = i18n_enabled()
return PATTERNFLY.get(name, default)
def jquery_url():
"""Return the full url to jQuery library file to use."""
return get_patternfly_setting("jquery_url")
def jquery_slim_url():
"""Return the full url to slim jQuery library file to use."""
return get_patternfly_setting("jquery_slim_url")
def include_jquery():
"""
Return whether to include jquery.
Setting could be False, True|'full', or 'slim'
"""
return get_patternfly_setting("include_jquery")
def javascript_url():
"""Return the full url to the Bootstrap JavaScript file."""
return get_patternfly_setting("javascript_url")
def css_url():
"""Return the full url to the Bootstrap CSS file."""
return get_patternfly_setting("css_url")
def css_additions_url():
"""Return the full url to the Bootstrap CSS file."""
return get_patternfly_setting("css_additions_url")
def theme_url():
"""Return the full url to the theme CSS file."""
return get_patternfly_setting("theme_url")
def i18n_enabled():
"""Return the projects i18n setting."""
return getattr(settings, "USE_I18N", False)
def get_renderer(renderers, **kwargs):
layout = kwargs.get("layout", "")
path = renderers.get(layout, renderers["default"])
mod, cls = path.rsplit(".", 1)
return getattr(import_module(mod), cls)
def get_formset_renderer(**kwargs):
renderers = get_patternfly_setting("formset_renderers")
return get_renderer(renderers, **kwargs)
def get_form_renderer(**kwargs):
renderers = get_patternfly_setting("form_renderers")
return get_renderer(renderers, **kwargs)
def get_field_renderer(**kwargs):
renderers = get_patternfly_setting("field_renderers")
return get_renderer(renderers, **kwargs)
|
[
"rintsi@gmail.com"
] |
rintsi@gmail.com
|
886fd6cf3fc2b0ef118a75dda6970d19a4dbe368
|
a5ba631dddaf2912c309601f8fbdd3c5b494fe20
|
/src/azure-cli-core/tests/test_application.py
|
2142b0515bc0f65955d650493cf8b880a7437891
|
[
"MIT"
] |
permissive
|
saurabsa/azure-cli-old
|
37471020cd2af9a53e949e739643299f71037565
|
f77477a98c9aa9cb55daf5b0d2f410d1455a9225
|
refs/heads/master
| 2023-01-09T04:00:15.642883
| 2018-04-23T21:40:04
| 2018-04-23T21:40:04
| 130,759,501
| 0
| 0
|
NOASSERTION
| 2022-12-27T14:59:06
| 2018-04-23T21:33:34
|
Python
|
UTF-8
|
Python
| false
| false
| 4,302
|
py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
import os
import tempfile
from six import StringIO
from azure.cli.core.application import Application, Configuration, IterateAction
from azure.cli.core.commands import CliCommand
from azure.cli.core._util import CLIError
class TestApplication(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
self.io = StringIO()
def tearDown(self):
self.io.close()
def test_application_register_and_call_handlers(self):
handler_called = [False]
def handler(**kwargs):
kwargs['args'][0] = True
def other_handler(**kwargs): # pylint: disable=unused-variable
self.assertEqual(kwargs['args'], 'secret sauce')
config = Configuration([])
app = Application(config)
app.raise_event('was_handler_called', args=handler_called)
self.assertFalse(handler_called[0],
"Raising event with no handlers registered somehow failed...")
app.register('was_handler_called', handler)
self.assertFalse(handler_called[0])
# Registered handler won't get called if event with different name
# is raised...
app.raise_event('other_handler_called', args=handler_called)
self.assertFalse(handler_called[0], 'Wrong handler called!')
app.raise_event('was_handler_called', args=handler_called)
self.assertTrue(handler_called[0], "Handler didn't get called")
app.raise_event('other_handler_called', args='secret sauce')
def test_list_value_parameter(self):
hellos = []
def handler(args):
hellos.append(args)
command = CliCommand('test command', handler)
command.add_argument('hello', '--hello', nargs='+', action=IterateAction)
command.add_argument('something', '--something')
cmd_table = {'test command': command}
argv = 'az test command --hello world sir --something else'.split()
config = Configuration(argv)
config.get_command_table = lambda: cmd_table
application = Application(config)
application.execute(argv[1:])
self.assertEqual(2, len(hellos))
self.assertEqual(hellos[0]['hello'], 'world')
self.assertEqual(hellos[0]['something'], 'else')
self.assertEqual(hellos[1]['hello'], 'sir')
self.assertEqual(hellos[1]['something'], 'else')
def test_expand_file_prefixed_files(self):
f = tempfile.NamedTemporaryFile(delete=False)
f.close()
f_with_bom = tempfile.NamedTemporaryFile(delete=False)
f_with_bom.close()
with open(f.name, 'w+') as stream:
stream.write('foo')
from codecs import open as codecs_open
with codecs_open(f_with_bom.name, encoding='utf-8-sig', mode='w+') as stream:
stream.write('foo')
cases = [
[['bar=baz'], ['bar=baz']],
[['bar', 'baz'], ['bar', 'baz']],
[['bar=@{}'.format(f.name)], ['bar=foo']],
[['bar=@{}'.format(f_with_bom.name)], ['bar=foo']],
[['bar', '@{}'.format(f.name)], ['bar', 'foo']],
[['bar', f.name], ['bar', f.name]],
[['bar=name@company.com'], ['bar=name@company.com']],
[['bar', 'name@company.com'], ['bar', 'name@company.com']],
[['bar=mymongo=@connectionstring'], ['bar=mymongo=@connectionstring']]
]
for test_case in cases:
try:
args = Application._expand_file_prefixed_files(test_case[0]) # pylint: disable=protected-access
self.assertEqual(args, test_case[1], 'Failed for: {}'.format(test_case[0]))
except CLIError as ex:
self.fail('Unexpected error for {} ({}): {}'.format(test_case[0], args, ex))
os.remove(f.name)
if __name__ == '__main__':
unittest.main()
|
[
"saurabsa@microsoft.com"
] |
saurabsa@microsoft.com
|
a24c64d98cd1aa2290e0a7b928ec7b1c9d502a24
|
03477749a8cb7aea88ab3f75a0a458c4cf647e72
|
/AC-agents/keras/actor.py
|
6a8cd3784b9238b844f617c4274916e9b6586a44
|
[] |
no_license
|
amirloe/DRL_ASS3
|
ebc434e7fcccf140daff1d59d68b218d3a93b23b
|
7a5254a945b11af3e3a6ad28b087efd6dc933e9c
|
refs/heads/master
| 2023-02-16T10:12:18.351105
| 2021-01-14T15:38:01
| 2021-01-14T15:38:01
| 324,605,320
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,740
|
py
|
import tensorflow as tf
from keras.models import Model
from keras.layers import Dense, Concatenate, Input
weights_initializer = tf.initializers.GlorotUniform()
class Actor:
def __init__(self, state_size, action_size, name='actor'):
self.state_size = state_size
self.action_size = action_size
inputs = Input(self.state_size)
# 5-Hidden Layers
X = Dense(256, input_shape=(self.state_size,), activation="relu", kernel_initializer=weights_initializer,
name='h1')(inputs)
X = Dense(160, activation="relu", kernel_initializer=weights_initializer, name='h2')(X)
X = Dense(128, activation="relu", kernel_initializer=weights_initializer, name='h3')(X)
X = Dense(64, activation="relu", kernel_initializer=weights_initializer, name='h4')(X)
X = Dense(64, activation="relu", kernel_initializer=weights_initializer, name='h5')(X)
# Output layer
output = Dense(self.action_size, activation=None, kernel_initializer=weights_initializer, name='output')(X)
self.model = Model(inputs=inputs, outputs=output, name='actor_model')
def predict(self, state):
return self.model(state)
def train(self, state, td_error, action_one_hot, actor_lr):
with tf.GradientTape() as tape:
optimizer = tf.optimizers.Adam(learning_rate=actor_lr)
output = self.predict(state)
neg_log_prob = tf.compat.v1.nn.softmax_cross_entropy_with_logits_v2(logits=output, labels=action_one_hot)
loss = tf.reduce_mean(neg_log_prob * td_error)
grads = tape.gradient(loss, self.model.trainable_variables)
optimizer.apply_gradients(zip(grads, self.model.trainable_variables))
|
[
"amirloe@post.bgu.ac.il"
] |
amirloe@post.bgu.ac.il
|
e0e3e669df1e49aed4df29deef8a61af3e197166
|
3551107a3fdc643d96382b24b03f9b5628218431
|
/chocolate/search/cmaes.py
|
79fb445191346a2439c2b4f626dd4d63eb2911c1
|
[
"BSD-3-Clause"
] |
permissive
|
lccostajr/chocolate
|
3c62a6f9fedd879c7a40d90a02849a37721064c4
|
a4a310b4d96bb4afcdf99ce482c12481d209fbc6
|
refs/heads/master
| 2021-01-01T04:52:58.625918
| 2017-07-14T18:50:30
| 2017-07-14T18:50:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,511
|
py
|
from itertools import groupby
from operator import itemgetter
import numpy
from ..base import SearchAlgorithm
class CMAES(SearchAlgorithm):
"""Covariance Matrix Adaptation Evolution Strategy minimization method.
A CMA-ES strategy that combines the :math:`(1 + \\lambda)` paradigm
[Igel2007]_, the mixed integer modification [Hansen2011]_, active
covariance update [Arnold2010]_ and covariance update for constrained
optimization [Arnold2012]_. It generates a single new point per
iteration and adds a random step mutation to dimensions that undergoes a
too small modification. Even if it includes the mixed integer
modification, CMA-ES does not handle well dimensions without variance and
thus it should be used with care on search spaces with conditional
dimensions.
Args:
connection: A database connection object.
space: The search space to explore.
crossvalidation: A cross-validation object that handles experiment
repetition.
clear_db: If set to :data:`True` and a conflict arise between the
provided space and the space in the database, completely clear the
database and set the space to the provided one.
**params: Additional parameters to pass to the strategy as described in
the following table, along with default values.
+----------------+---------------------------+----------------------------+
| Parameter | Default value | Details |
+================+===========================+============================+
| ``d`` | ``1 + ndim / 2`` | Damping for step-size. |
+----------------+---------------------------+----------------------------+
| ``ptarg`` | ``1 / 3`` | Taget success rate. |
+----------------+---------------------------+----------------------------+
| ``cp`` | ``ptarg / (2 + ptarg)`` | Step size learning rate. |
+----------------+---------------------------+----------------------------+
| ``cc`` | ``2 / (ndim + 2)`` | Cumulation time horizon. |
+----------------+---------------------------+----------------------------+
| ``ccovp`` | ``2 / (ndim**2 + 6)`` | Covariance matrix positive |
| | | learning rate. |
+----------------+---------------------------+----------------------------+
| ``ccovn`` | ``0.4 / (ndim**1.6 + 1)`` | Covariance matrix negative |
| | | learning rate. |
+----------------+---------------------------+----------------------------+
| ``beta`` | ``0.1 / (ndim + 2)`` | Covariance matrix |
| | | constraint learning rate. |
+----------------+---------------------------+----------------------------+
| ``pthresh`` | ``0.44`` | Threshold success rate. |
+----------------+---------------------------+----------------------------+
.. [Igel2007] Igel, Hansen, Roth. Covariance matrix adaptation for
multi-objective optimization. 2007
.. [Arnold2010] Arnold and Hansen. Active covariance matrix adaptation for
the (1 + 1)-CMA-ES. 2010.
.. [Hansen2011] Hansen. A CMA-ES for Mixed-Integer Nonlinear Optimization.
Research Report] RR-7751, INRIA. 2011
.. [Arnold2012] Arnold and Hansen. A (1 + 1)-CMA-ES for Constrained
Optimisation. 2012
"""
def __init__(self, connection, space, crossvalidation=None, clear_db=False, **params):
super(CMAES, self).__init__(connection, space, crossvalidation, clear_db)
self.random_state = numpy.random.RandomState()
self.params = params
def _next(self, token=None):
"""Retrieve the next point to evaluate based on available data in the
database. Each time :meth:`next` is called, the algorithm will reinitialize
it-self based on the data in the database.
Returns:
A tuple containing a unique token and a fully qualified parameter set.
"""
self._init()
# Check what is available in that database
results = {r["_chocolate_id"]: r for r in self.conn.all_results()}
ancestors, ancestors_ids = self._load_ancestors(results)
bootstrap = self._load_bootstrap(results, ancestors_ids)
# Rank-mu update on individuals created from another algorithm
self._bootstrap(bootstrap)
# _bootstrab sets the parent if enough candidates are available (>= 4)
# If the parent is still None and ancestors are available
# set the parent to the first evaluated candidate if any
if self.parent is None and len(ancestors) > 0:
self.parent = next((a for a in ancestors if a["loss"] is not None), None)
# Generate the next point
token = token or {}
token.update({"_chocolate_id": self.conn.count_results()})
# If the parent is still None, no information available
if self.parent is None:
# out = numpy.ones(self.dim) / 2.0
out = self.random_state.rand(self.dim)
# Signify the first point to others using loss set to None
# Transform to dict with parameter names
# entry = {str(k): v for k, v in zip(self.space.names(), out)}
entry = self.space(out, transform=False)
# entry["_loss"] = None
entry.update(token)
self.conn.insert_result(entry)
# Add the step to the complementary table
# Transform to dict with parameter names
# entry = {str(k): v for k, v in zip(self.space.names(), out)}
entry = self.space(out, transform=False)
entry.update(_ancestor_id=-1, _invalid=0, _search_algo="cmaes", **token)
self.conn.insert_complementary(entry)
# return the true parameter set
return token, self.space(out)
else:
# Simulate the CMA-ES update for each ancestor.
for key, group in groupby(ancestors[1:], key=itemgetter("ancestor_id")):
# If the loss for this entry is not yet availabe, don't include it
group = list(group)
self.lambda_ = len(group)
self._configure() # Adjust constants that depends on lambda
self._update_internals(group)
invalid = 1
while invalid > 0:
# Generate a single candidate at a time
self.lambda_ = 1
self._configure()
# The ancestor id is the last candidate that participated in the
# covariance matrix update
ancestor_id = next(
(a["chocolate_id"] for a in reversed(bootstrap + ancestors) if a["loss"] is not None or a[
"invalid"] > 0),
None)
assert ancestor_id is not None, "Invalid ancestor id"
out, y = self._generate()
# Encode constraint violation
invalid = sum(2 ** (2 * i) for i, xi in enumerate(out) if xi < 0)
invalid += sum(2 ** (2 * i + 1) for i, xi in enumerate(out) if xi >= 1)
# Add the step to the complementary table
# Transform to dict with parameter names
# entry = {str(k): v for k, v in zip(self.space.names(), y)}
entry = self.space(y, transform=False)
entry.update(_ancestor_id=ancestor_id, _invalid=invalid, _search_algo="cmaes", **token)
self.conn.insert_complementary(entry)
# Signify next point to others using loss set to None
# Transform to dict with parameter names
# entry = {str(k): v for k, v in zip(self.space.names(), out)}
entry = self.space(out, transform=False)
# entry["_loss"] = None
entry.update(token)
self.conn.insert_result(entry)
# return the true parameter set
return token, self.space(out)
def _init(self):
self.parent = None
self.sigma = 0.2
self.dim = len(self.space)
self.C = numpy.identity(self.dim)
self.A = numpy.linalg.cholesky(self.C)
self.pc = numpy.zeros(self.dim)
# Covariance matrix adaptation
self.cc = self.params.get("cc", 2.0 / (self.dim + 2.0))
self.ccovp = self.params.get("ccovp", 2.0 / (self.dim ** 2 + 6.0))
self.ccovn = self.params.get("ccovn", 0.4 / (self.dim ** 1.6 + 1.0))
self.beta = self.params.get("beta", 0.1 / (self.dim + 2.0))
self.pthresh = self.params.get("pthresh", 0.44)
# Active covariance update for unsucessful candidates
self.ancestors = list()
# Constraint vectors for covariance adaptation
# We work in the unit box [0, 1)
self.constraints = numpy.zeros((self.dim * 2, self.dim))
self.S_int = numpy.zeros(self.dim)
for i, s in enumerate(self.space.steps()):
if s is not None:
self.S_int[i] = s
self.i_I_R = numpy.flatnonzero(2 * self.sigma * numpy.diag(self.C) ** 0.5 < self.S_int)
self.update_count = 0
def _configure(self):
self.d = self.params.get("d", 1.0 + self.dim / (2.0 * self.lambda_))
self.ptarg = self.params.get("ptarg", 1.0 / (5 + numpy.sqrt(self.lambda_) / 2.0))
self.cp = self.params.get("cp", self.ptarg * self.lambda_ / (2 + self.ptarg * self.lambda_))
if self.update_count == 0:
self.psucc = self.ptarg
def _load_ancestors(self, results):
# Get a list of the actual ancestor and the complementary information
# on that ancestor
ancestors = list()
ancestors_ids = set()
for c in sorted(self.conn.all_complementary(), key=itemgetter("_chocolate_id")):
candidate = dict()
candidate["step"] = numpy.array([c[str(k)] for k in self.space.names()])
candidate["chocolate_id"] = c["_chocolate_id"]
candidate["ancestor_id"] = c["_ancestor_id"]
candidate["invalid"] = c["_invalid"]
candidate["loss"] = None
if c["_invalid"] == 0:
candidate["X"] = numpy.array([results[c["_chocolate_id"]][str(k)] for k in self.space.names()])
candidate["loss"] = results[c["_chocolate_id"]]["_loss"]
ancestors.append(candidate)
ancestors_ids.add(candidate["chocolate_id"])
return ancestors, ancestors_ids
def _load_bootstrap(self, results, ancestors_ids):
# Find individuals produced by another algorithm
bootstrap = list()
for _, c in sorted(results.items()):
# Skip those included in ancestors
if c["_chocolate_id"] in ancestors_ids:
continue
candidate = dict()
# The initial distribution is assumed uniform and centred on 0.5^n
candidate["step"] = numpy.array([c[str(k)] - 0.5 for k in self.space.names()])
candidate["X"] = numpy.array([results[c["_chocolate_id"]][str(k)] for k in self.space.names()])
candidate["chocolate_id"] = c["_chocolate_id"]
candidate["ancestor_id"] = -1
# Compute constraint violation
candidate["invalid"] = sum(2 ** (2 * i) for i, xi in enumerate(candidate["X"]) if xi < 0)
candidate["invalid"] += sum(2 ** (2 * i + 1) for i, xi in enumerate(candidate["X"]) if xi >= 1)
candidate["loss"] = None
if candidate["invalid"] == 0:
candidate["loss"] = c["_loss"]
bootstrap.append(candidate)
return bootstrap
def _bootstrap(self, candidates):
# Active covariance update for invalid individuals
self._process_invalids(candidates)
# Remove invalids and not evaluated
candidates = [c for c in candidates if c["invalid"] == 0 and c["loss"] is not None]
# Rank-mu update for covariance matrix
if len(candidates) >= 4:
mu = int(len(candidates) / 2)
# superlinear weights (the usual default)
weights = numpy.log(mu + 0.5) - numpy.log(numpy.arange(1, mu + 1))
weights /= sum(weights)
c1 = 2 / len(candidates[0]) ** 2
cmu = mu / len(candidates[0]) ** 2
candidates.sort(key=itemgetter("loss"))
c_array = numpy.array([c["step"] for c in candidates[:mu]])
cw = numpy.sum(weights * c_array.T, axis=1)
self.pc = (1 - self.cc) * self.pc + numpy.sqrt(1 - (1 - self.cc) ** 2) * numpy.sqrt(mu) * cw
self.C = (1 - c1 - cmu) * self.C + c1 * numpy.outer(self.pc, self.pc) + cmu * numpy.dot(weights * c_array.T,
c_array)
self.parent = candidates[0]
def _update_internals(self, candidates):
assert self.parent is not None, "No parent for CMA-ES internal update"
assert "loss" in self.parent, "Parent has no loss in CMA-ES internal update"
assert self.parent["loss"] is not None, "Invalid loss for CMA-ES parent"
# Active covariance update for invalid individuals
self._process_invalids(candidates)
# Remove invalids and not evaluated
candidates = [s for s in candidates if s["invalid"] == 0 and s["loss"] is not None]
if len(candidates) == 0:
# Empty group, abort
return
# Is the new point better than the parent?
candidates.sort(key=itemgetter("loss"))
lambda_succ = sum(s["loss"] <= self.parent["loss"] for s in candidates)
p_succ = float(lambda_succ) / self.lambda_
self.psucc = (1 - self.cp) * self.psucc + self.cp * p_succ
# On success update the matrices C, A == B*D and evolution path
if candidates[0]["loss"] <= self.parent["loss"]:
self.parent = candidates[0].copy()
if self.psucc < self.pthresh:
self.pc = (1 - self.cc) * self.pc + numpy.sqrt(self.cc * (2 - self.cc)) * candidates[0]["step"]
self.C = (1 - self.ccovp) * self.C + self.ccovp * numpy.outer(self.pc, self.pc)
else:
self.pc = (1 - self.cc) * self.pc
self.C = (1 - self.ccovp) * self.C + self.ccovp * (numpy.outer(self.pc, self.pc)
+ self.cc * (2 - self.cc) * self.C)
self.A = numpy.linalg.cholesky(self.C)
elif len(self.ancestors) >= 5 and candidates[0]["loss"] > sorted(s["loss"] for s in self.ancestors)[-1]:
# Active negative covariance update
z = numpy.dot(numpy.linalg.inv(self.A), candidates[0]["step"])
n_z2 = numpy.linalg.norm(z) ** 2
if 1 - self.ccovn * n_z2 / (1 + self.ccovn) < 0.5:
ccovn = 1 / (2 * numpy.linalg.norm(z) ** 2 - 1)
else:
ccovn = self.ccovn
self.A = numpy.sqrt(1 + ccovn) * self.A + numpy.sqrt(1 + ccovn) / n_z2 * (
numpy.sqrt(1 - ccovn * n_z2 / (1 + ccovn)) - 1) * numpy.dot(self.A, numpy.outer(z, z))
self.C = numpy.dot(self.A, self.A.T) # Yup we still have an update o C
# Keep a list of ancestors sorted by order of appearance
self.ancestors.insert(0, candidates[0])
if len(self.ancestors) > 5:
self.ancestors.pop(-1)
# Update the step size
self.sigma = self.sigma * numpy.exp(1.0 / self.d * (self.psucc - self.ptarg) / (1 - self.ptarg))
# Update the dimensions where integer mutation is needed
self.i_I_R = numpy.flatnonzero(2 * self.sigma * numpy.diag(self.C) ** 0.5 < self.S_int)
self.update_count += 1
def _process_invalids(self, candidates):
# Process all invalid individuals
for s in candidates:
if s["invalid"] > 0:
sum_vw = 0
invalid_count = 0
inv_A = numpy.linalg.inv(self.A)
_, invalids = bin(s["invalid"]).split("b")
for j, b in enumerate(reversed(invalids)):
if b == "1":
self.constraints[j, :] = (1 - self.cc) * self.constraints[j, :] + self.cc * s["step"]
w = numpy.dot(inv_A, self.constraints[j, :])
sum_vw += numpy.outer(self.constraints[j, :], w) / numpy.inner(w, w)
invalid_count += 1
# Update A and make changes in C since in next updates we use C
self.A = self.A - (self.beta / invalid_count) * sum_vw
self.C = numpy.dot(self.A, self.A.T)
def _generate(self):
n_I_R = self.i_I_R.shape[0]
R_int = numpy.zeros(self.dim)
# Mixed integer CMA-ES is developped for (mu/mu , lambda)
# We have a (1 + 1) setting, the integer will be probabilistic.
# The integer mutation is lambda / 2 if all dimensions are integers or
# min(lambda / 2 - 1, lambda / 10 + n_I_R + 1), minus 1 accounts for
# the last new candidate getting its integer mutation from the last best
# solution.
if n_I_R == self.dim:
p = 0.5
else:
p = min(0.5, 0.1 + n_I_R / self.dim)
if n_I_R > 0 and self.random_state.rand() < p:
Rp = numpy.zeros(self.dim)
Rpp = numpy.zeros(self.dim)
# Ri' has exactly one of its components set to one.
# The Ri' are dependent in that the number of mutations for each coordinate
# differs at most by one.
j = self.random_state.choice(self.i_I_R)
Rp[j] = 1
Rpp[j] = self.random_state.geometric(p=0.7 ** (1.0 / n_I_R)) - 1
I_pm1 = (-1) ** self.random_state.randint(0, 2, self.dim)
R_int = I_pm1 * (Rp + Rpp)
y = numpy.dot(self.random_state.standard_normal(self.dim), self.A.T)
arz = self.parent["X"] + self.sigma * y + self.S_int * R_int
return arz, y
|
[
"f.derainville@gmail.com"
] |
f.derainville@gmail.com
|
eaeeca02077d219d98821fcaf5dbc7bbde70203d
|
01e29cbbb3eba58e255b9bdaa2d17f800a50073e
|
/2 Discrete time signals/DiscreteTimeSignals.py
|
34e35faa9b3ac918cfb703f7fbf80c0cb7a136a8
|
[
"MIT"
] |
permissive
|
Alja9/Plot-Signal-Processing-with-Python
|
ecef89018bf911c3a83e4d3da67c31165dce6e83
|
71021f06a6dd66a1234d6ed8f8b2ccbc5a04fc93
|
refs/heads/master
| 2021-01-02T09:54:29.495502
| 2020-02-13T13:30:29
| 2020-02-13T13:30:29
| 239,566,054
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 577
|
py
|
import numpy as np
from matplotlib import pyplot as plt
n = np.arange(-1,6)
xN = np.array([0,1,2,3,4,5,0])
xN_1 = n+1
xN_11 = [None] * len(n)
x=0
for l in xN:
if xN_1[x] == 0:
xN_11[x] = 1
elif xN_1[x] == 1:
xN_11[x] = 2
elif xN_1[x] == 2:
xN_11[x] = 3
elif xN_1[x] == 3:
xN_11[x] = 4
elif xN_1[x] == 4:
xN_11[x] = 5
else:
xN_11[x] = 0
x+=1
plt.xlabel('n')
plt.ylabel('x[n+1]')
plt.title('Discrete Time Signals')
plt.stem(n,xN_11)
plt.show()
|
[
"sayaalja12@gmail.com"
] |
sayaalja12@gmail.com
|
15a52973aad29181b12f44a258c04283d53544fd
|
d8a766d00c6cdc78ae62204d61c1ff32d4c76914
|
/menu.py
|
d4acc49f049bd8e946649c8ef85102e0e0f9f090
|
[] |
no_license
|
Dosache/RepositorioPython006
|
759ece3f85178b25591f986f729ce6fd5eb30866
|
65b9df982c24c288369095001d3d16cff49b04b7
|
refs/heads/main
| 2022-12-19T07:04:23.304199
| 2020-10-03T02:50:13
| 2020-10-03T02:50:13
| 300,626,009
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,717
|
py
|
import os
#menu
def Numeros(n):
posi = 0
nega = 0
cero = 0
for i in range(1,n+1):
numeritos = int(input("Ingrese un numero: "))
if (numeritos>0):
print("El numero es postivo")
print("")
posi = posi+1
if (numeritos<0):
print("El numero es negativo")
print("")
nega = nega+1
if (numeritos==0):
print("El numero es igual a 0")
print("")
cero= cero+1
print("La cantidad de numeros positivos es de: ", posi)
print("La cantidad de numeros negativos es de: ", nega)
print("La cantidad de numeros iguales a 0 es de: ", cero)
print("")
pause = input("Digite cualquier tecla para continuar: ")
def Personas(n):
totaledad = 0
prom = 0
for i in range (n):
print("")
nom = input("Ingrese nombre: ")
edad = int(input("Ingrese edad: "))
totaledad = totaledad+edad
prom = round((totaledad/n),1)
print("")
print("El promedio de edades de las personas ingresadas es de: ",prom)
pause = input("Digite cualquier tecla para continuar: ")
seguir = True
n = 0
while (seguir):
os.system('cls')
print("Menu: ")
print("")
print("1. Numeros")
print("2. Datos Personales")
print("3. Finalizar")
print("")
op = int(input("Digite opcion 1, 2 o 3: "))
if(op==1):
os.system('cls')
n = int(input("Ingrese una cantidad de numeros: "))
Numeros(n)
if(op==2):
os.system('cls')
n = int(input("Ingrese una cantidad de personas: "))
Personas(n)
if(op==3):
print("Programa finalizado!")
break
|
[
"riquelme.sc9@gmail.com"
] |
riquelme.sc9@gmail.com
|
2469304eb23d9a67d2c373e555ac6e69712e742c
|
96a0b0f1a8300418519164c51a54ce5dc12b5003
|
/node_modules/mongoose/node_modules/mongodb/node_modules/kerberos/build/config.gypi
|
f66b3084e2cfd137457da1f2d887b02b37d2e394
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
benhalverson/angularWorkshop
|
8da075a6c9fd4929d9af18e861b29e3637f0d97d
|
75adefd56563bd2b80ce28d46778aabd422e2bef
|
refs/heads/master
| 2021-01-10T19:56:46.406716
| 2015-02-05T00:05:36
| 2015-02-05T00:05:36
| 30,328,385
| 0
| 1
| null | 2019-04-25T05:56:18
| 2015-02-05T00:06:11
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,161
|
gypi
|
# Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 1,
"host_arch": "x64",
"node_install_npm": "true",
"node_prefix": "/Users/ben/.nvm/v0.10.30",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_unsafe_optimizations": 0,
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"python": "/usr/bin/python",
"target_arch": "x64",
"v8_enable_gdbjit": 0,
"v8_no_strict_aliasing": 1,
"v8_use_snapshot": "true",
"want_separate_host_toolset": 0,
"nodedir": "/Users/ben/.node-gyp/0.10.30",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"save_dev": "",
"browser": "",
"viewer": "man",
"rollback": "true",
"usage": "",
"globalignorefile": "/Users/ben/.nvm/v0.10.30/etc/npmignore",
"init_author_url": "",
"shell": "/bin/bash",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"fetch_retries": "2",
"npat": "",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/Users/ben/.nvm/v0.10.30/etc/npmrc",
"always_auth": "",
"spin": "true",
"cache_lock_retries": "10",
"cafile": "",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"proprietary_attribs": "true",
"json": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/ben/.npm-init.js",
"userconfig": "/Users/ben/.npmrc",
"node_version": "0.10.30",
"user": "501",
"save": "true",
"editor": "vi",
"tag": "latest",
"global": "",
"optional": "true",
"bin_links": "true",
"force": "",
"searchopts": "",
"depth": "Infinity",
"rebuild_bundle": "true",
"searchsort": "name",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"strict_ssl": "true",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"save_exact": "",
"cache_lock_stale": "60000",
"version": "",
"cache_min": "10",
"cache": "/Users/ben/.npm",
"searchexclude": "",
"color": "true",
"save_optional": "",
"user_agent": "npm/2.1.16 node/v0.10.30 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"init_version": "1.0.0",
"umask": "18",
"git": "git",
"init_author_name": "",
"scope": "",
"onload_script": "",
"tmp": "/tmp",
"unsafe_perm": "",
"link": "",
"prefix": "/Users/ben/.nvm/v0.10.30"
}
}
|
[
"benhalverson33@gmail.com"
] |
benhalverson33@gmail.com
|
c856e9daf7af421e56d6286c9e05ae9ec6b2e402
|
66893940013495796c9f371e52e287305bcaeb47
|
/olympus-app/olympus/settings.py
|
94bb05fa898cc66e6d1e25746e8784963dbde347
|
[] |
no_license
|
marselester/django-prometheus-via-statsd
|
b17bcd82dbb0ab8db24c4f86867e851a657ff255
|
f625d204c334d72afcac1ed88a482c0976f0bee9
|
refs/heads/master
| 2021-06-23T03:17:44.359342
| 2017-06-23T04:29:03
| 2017-06-23T04:29:03
| 93,476,076
| 1
| 1
| null | 2020-12-02T01:45:24
| 2017-06-06T04:27:11
|
Python
|
UTF-8
|
Python
| false
| false
| 2,382
|
py
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'swordfish'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'olympus.apps.hello.apps.HelloConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'olympus.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'olympus.wsgi.application'
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
STATIC_URL = '/static/'
# Third-party libraries
# statsd_exporter daemon listens to UDP at localhost:8125.
# It exports StatsD-style metrics as Prometheus metrics at localhost:9102.
STATSD_HOST = 'localhost'
STATSD_PORT = 8125
|
[
"marselester@ya.ru"
] |
marselester@ya.ru
|
0c8895e935685ca264dd4d56859d0679fa753124
|
e164dfb3591162fae383b0519bb19e958f7ffec3
|
/AdventofCode/pythonDay1.py
|
151182a8eed13ad79ced7db21e9cba0b397b2ba8
|
[] |
no_license
|
JordanBradshaw/Coding_Challenges
|
47eedb405213603c0b927e6d3fd4bb3cdc07417f
|
6ec1fb4f4b15e7f109550163dd2022a2385d35bc
|
refs/heads/main
| 2023-02-05T01:50:13.591734
| 2020-12-26T00:43:27
| 2020-12-26T00:43:27
| 320,424,075
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 362
|
py
|
# PART 1
fileOpen = open('Day1/inputDay1.txt', 'r')
listInt = [int(i) for i in fileOpen.readlines()]
twoValuesMult = [x * y for x in listInt for y in listInt if x + y == 2020]
print((set(twoValuesMult)).pop())
# PART 2
threeValuesMult = [
x * y * z for x in listInt for y in listInt for z in listInt if x + y + z == 2020]
print((set(threeValuesMult)).pop())
|
[
"jordanbradshaw27@aol.com"
] |
jordanbradshaw27@aol.com
|
65f216806b2947135f7235854a38bc036cdb3264
|
bb5fd3b4af87c51267bff02dd5b895569956cea8
|
/bin/git-change-date
|
9da45a89073ca66ed2384f535ff7309293d8a3a4
|
[] |
no_license
|
amigrave/toothbrush
|
5151708ee6fc9d61e976d5847c0b5fceffb59448
|
8198d7988ebaa312295844047d51d88c7d859333
|
refs/heads/master
| 2021-09-29T23:00:39.407958
| 2020-06-15T13:23:30
| 2020-06-15T13:23:30
| 104,474,725
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,281
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import argparse
import subprocess
import sys
parser = argparse.ArgumentParser()
parser.add_argument("commit", help="The commit to edit")
args = parser.parse_args()
commit = args.commit
try:
date = subprocess.check_output(['git', 'show', '-s', '--format=%ci', commit])
except subprocess.CalledProcessError:
sys.exit("Could not get date of commit %s" % commit)
args = ['--nocancel', '--inputbox', 'New date', '7', '40', date.strip()]
p = None
for prog in ('whiptail', 'dialog'):
try:
p = subprocess.Popen([prog] + args, stderr=subprocess.PIPE)
break
except Exception:
continue
if p is None:
sys.exit("Could not execute whiptail or dialog")
newdate = p.communicate()[1].strip()
if not newdate or date.strip() == newdate:
print("Nothing to do.")
sys.exit()
gitcmd = ['git', 'filter-branch', '-f', '--env-filter', """
if test $GIT_COMMIT = "{commit}"; then
export GIT_COMMITTER_DATE="{newdate}";
export GIT_AUTHOR_DATE="{newdate}";
fi""".format(**locals()), '%s~1..HEAD' % commit]
# subprocess.call(['git', 'stash', 'save'])
subprocess.call(gitcmd)
# subprocess.call(['git', 'stash', 'pop'])
|
[
"agr@amigrave.com"
] |
agr@amigrave.com
|
|
99f6d5092ae5f13632a74eea5f0df8c0d96a43a3
|
a09aeddddcadd2adc795e49890ad45a118a02dc4
|
/src/data_loader.py
|
9891834132daedac435a5e84eef8c8a44873dc53
|
[] |
no_license
|
Zumbalamambo/Solar-Panels-Detection
|
7607fa774bf7d9b81e1116a105503ac4fe613963
|
5f6df0b743ce7915d1075ebe12c5f790eec35f03
|
refs/heads/master
| 2020-04-12T15:54:11.913732
| 2018-05-30T20:15:21
| 2018-05-30T20:15:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,365
|
py
|
import torch.utils.data as data
from PIL import Image
import os
import os.path
#from tifffile import imread
from scipy.misc import imread, imresize
import torchvision.transforms as transforms
import torch
import numpy as np
import cv2
import random
import tifffile as tiff
IMG_EXTENSIONS = [
'.jpg', '.JPG', '.jpeg', '.JPEG',
'.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP',
]
def is_image_file(filename):
return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
def make_dataset(dir, dir_output,text_file):
images = []
print(text_file)
with open(text_file, 'r') as f:
for line in f:
line = line[:-1]
#print(line)
split_lines = line.split(",")
path1 = '{0}{1}'.format(dir, split_lines[0])
path2 = '{0}{1}'.format(dir_output, split_lines[1])
item = (path1,path2)
images.append(item)
random.shuffle(images)
if len(images)>2000:
return images[:2000]
else:
return images[:900]
def make_dataset_from_big_image(dir_subimages, filename):
"""
This function does the following:
1) crops big images into subimages of 256x256,
2) saves subimages in dir_subimages
3) creates the images_csv list with the path of the subimages
Input:
-----
dir_subimages: path to save images
filename: path+filename of the big image
Output:
------
images_csv: list of the subimages paths
"""
image = imread(filename)
img_id = os.path.splitext(os.path.basename(filename))[0]
height, width = image.shape[:2]
n_rows, n_cols = height//256, width//256
images_csv= []
for i in range(n_rows):
for j in range(n_cols):
cropped = image[i*256:(i*256)+256, j*256:(j*256)+256, :]
cropped_id = img_id + '_'+str(i)+'_'+str(j) + '.tif'
# save image
tiff.imsave(os.path.join(dir_subimages, cropped_id), cropped)
# write in csv file image path
images_csv.append(os.path.join(dir_subimages, cropped_id))
if n_cols*256 < width:
cropped = image[i*256:(i*256)+256, width-256:width,:]
cropped_id = img_id + '_'+str(i)+'_'+str(j+1)+'.tif'
# save image
tiff.imsave(os.path.join(dir_subimages, cropped_id), cropped)
# write in csv file image path
images_csv.append(os.path.join(dir_subimages, cropped_id))
if n_rows*256 < height:
for j in range(n_cols):
cropped = image[height-256:height, j*256:(j*256)+256, :]
cropped_id = img_id + '_'+str(i+1)+'_'+str(j)+'.tif'
# save image
tiff.imsave(os.path.join(dir_subimages, cropped_id), cropped)
# write in csv file image path
images_csv.append(os.path.join(dir_subimages, cropped_id))
if n_cols*256 < width:
cropped = image[height-256:height, width-256:width,:]
cropped_id = img_id + '_'+str(i+1)+'_'+str(j+1)+'.tif'
# save image
tiff.imsave(os.path.join(dir_subimages, cropped_id), cropped)
# write in csv file image path
images_csv.append(os.path.join(dir_subimages, cropped_id))
if len(images_csv)%5 > 0:
for i in range(5-(len(images_csv)%5)):
images_csv.append(images_csv[-1])
return images_csv
def reconstruct_image(dir_subimages, filename):
"""
Input:
-----
dir_subimages: path of the subimages
filename: path+filename of the big original image
Output:
------
reconstructed image
"""
original_image = tiff.imread(filename)
height, width = original_image.shape[:2]
n_rows, n_cols = height//256, width//256
#subimages = [os.path.splitext(os.path.basename(x))[0] for x in glob(os.path.join(dir_subimages, '*.png'))]
img_id = os.path.splitext(os.path.basename(filename))[0]
img_rows = []
for i in range(n_rows):
for j in range(n_cols):
img = tiff.imread(os.path.join(dir_subimages, img_id+'_'+str(i)+'_'+str(j)+'.png'))
if (j == 0):
img_row = img
else:
img_row = np.concatenate([img_row, img], axis = 2)
if n_cols*256 < width:
img = tiff.imread(os.path.join(dir_subimages, img_id+'_'+str(i)+'_'+str(j+1)+'.png'))
img = img[:,:,-width%256:]
img_row = np.concatenate([img_row, img], axis = 2)
img_rows.append(img_row)
if n_rows*256 < height:
for j in range(n_cols):
img = tiff.imread(os.path.join(dir_subimages, img_id+'_'+str(i+1)+'_'+str(j)+'.png'))
img = img[:,-height%256:,:]
if (j == 0):
img_row = img
else:
img_row = np.concatenate([img_row, img], axis = 2)
if n_cols*256 < width:
img = tiff.imread(os.path.join(dir_subimages, img_id+'_'+str(i+1)+'_'+str(j+1)+'.png'))
img = img[:,-height%256:,-width%256:]
img_row = np.concatenate([img_row, img], axis = 2)
img_rows.append(img_row)
reconstruction = np.concatenate(img_rows, axis=1)
return reconstruction
def default_loader(path):
return imread(path)
#return(Image.open(path))
class ImagerLoader(data.Dataset):
def __init__(self, root, root_output,text_file,transform=None, target_transform=None,
loader=default_loader, crop=False, normalize = False, size_cropped = 512):
imgs = make_dataset(root, root_output,text_file)
self.root = root
self.imgs = imgs
self.transform = transform
self.target_transform = transform
self.loader = loader
self.crop = crop
self.normalize = normalize
self.size_cropped = size_cropped
def __getitem__(self, index):
path, path_output = self.imgs[index]
img = self.loader(path)#.astype(int) image has dimension height x width x n_channels
output = self.loader(path_output)#.astype(int)
#img = imresize(img, (512, 512))
#output = imresize(output, (512, 512))
img = img.astype('int16')
output = output.astype('int16')
# if we want to crop the image at the centre
if self.crop:
h,w,channels = img.shape
img = img[(h//2-self.size_cropped//2):(h//2+self.size_cropped//2), (w//2-self.size_cropped//2):(w//2+self.size_cropped//2),:]
h,w = output.shape
output = output[h//2-self.size_cropped//2:h//2+self.size_cropped//2, w//2-self.size_cropped//2:w//2+self.size_cropped//2]
img = np.transpose(img, (2,0,1))
# if we want to normalize the images to [-1,1]
if self.normalize:
img = img.astype(float)
img = (img-128)/128
img = torch.FloatTensor(img)
else:
img = torch.ShortTensor(img)
# if self.transform is not None:
# img = self.transform(img)
# if self.target_transform is not None:
# output = self.target_transform(output)
img = img.float()
img_id = os.path.basename(path).split('.')[0]
return img_id, img, torch.ShortTensor(output).long()
def __len__(self):
return len(self.imgs)
class ImageLoaderPredictionBigImage(data.Dataset):
def __init__(self, dir_subimages, filename, normalize = False, loader=default_loader):
imgs = make_dataset_from_big_image(dir_subimages, filename)
self.imgs = imgs
self.dir_subimages = dir_subimages
self.filename = filename
self.loader = loader
self.normalize = normalize
def __getitem__(self, index):
path = self.imgs[index]
img = self.loader(path)
img = img.astype('int16')
img = np.transpose(img, (2,0,1))
if self.normalize:
img = img.astype(float)
img = (img-128)/128.
img = torch.FloatTensor(img)
img_id = os.path.basename(path).split('.')[0]
return img_id, img
def __len__(self):
return len(self.imgs)
|
[
"marcsv87@gmail.com"
] |
marcsv87@gmail.com
|
a1e578e8f76920f85451a52014e38448708f5fb3
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_227/ch54_2020_03_25_12_43_46_843222.py
|
c6cc1e0e1175dc441e85e676ecb8da1c00a6319b
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 161
|
py
|
def calcula_fibonacci(n):
F=[0]*n
F[0] = 1
F[1] = 1
i = 1
while i <= n:
F[i + 1] = F [i] + F[i - 1]
i += 1
return F
|
[
"you@example.com"
] |
you@example.com
|
404b2029b028e71a7c90ae5fc5060501e472fbdf
|
43fd4a6edc07d8021c83e3a382ec5fda9f4d3e18
|
/tests/test_repo.py
|
0a99533af77704b7094dc8dd5880ea2029037917
|
[
"Apache-2.0"
] |
permissive
|
pombredanne/dvc
|
7aabefb700eed5793024e12e41132e9de3dc63f7
|
402eb2da0122932c0a8cd04cd1a9b88f6a0bc432
|
refs/heads/master
| 2020-05-01T00:21:59.674072
| 2019-03-22T13:50:30
| 2019-03-22T13:50:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,656
|
py
|
from tests.basic_env import TestDvc
from dvc.scm.git import GitTree
from dvc.repo.tree import WorkingTree
class TestCollect(TestDvc):
def setUp(self):
super(TestCollect, self).setUp()
self.dvc.add(self.FOO)
self.dvc.run(
deps=[self.FOO],
outs=[self.BAR],
cmd="python code.py {} {}".format(self.FOO, self.BAR),
)
self.dvc.scm.add([".gitignore", self.FOO + ".dvc", self.BAR + ".dvc"])
self.dvc.scm.commit("foo.dvc and bar.dvc")
self.dvc.scm.checkout("new_branch", True)
self.dvc.run(
deps=[self.BAR],
outs=["buzz"],
cmd="python code.py {} {}".format(self.BAR, "buzz"),
)
self.dvc.scm.add([".gitignore", "buzz.dvc"])
self.dvc.scm.commit("add buzz")
self.dvc.scm.checkout("master")
def _check(self, branch, target, with_deps, expected):
if branch:
self.dvc.tree = GitTree(self.dvc.scm.git, branch)
else:
self.dvc.tree = WorkingTree()
result = self.dvc.collect(target + ".dvc", with_deps=with_deps)
self.assertEqual(
[[j.rel_path for j in i.outs] for i in result], expected
)
return result
def test(self):
self._check("", self.BAR, True, [[self.FOO], [self.BAR]])
self._check("master", self.BAR, True, [[self.FOO], [self.BAR]])
self._check(
"new_branch", "buzz", True, [[self.FOO], [self.BAR], ["buzz"]]
)
result = self._check("new_branch", "buzz", False, [["buzz"]])
self.assertEqual([i.rel_path for i in result[0].deps], ["bar"])
|
[
"andrew@ei-grad.ru"
] |
andrew@ei-grad.ru
|
a1fbbc3fc04cd57ed0d79382528d4efe0b8c50b4
|
a27295e174a341f7f9b792dbf8bd8abb7679c380
|
/backend/articles/article.py
|
fb00612507bc67e25781193fff7c11370564f266
|
[
"MIT"
] |
permissive
|
alilou01/NeuralOPS
|
02c6b798d2f4dae90f3eaa6972b829dd1fda06fa
|
f5ea5768a7e0940cb978a7822dd53c3ab63d47b3
|
refs/heads/master
| 2020-06-16T18:28:43.761584
| 2019-07-06T13:56:54
| 2019-07-06T13:56:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,656
|
py
|
import pickle
import torch
class Article:
def __init__(self, arti):
self.source = arti["source"]
self.author = arti["author"]
self.content = arti["content"]
self.description = arti["description"]
self.title = arti["title"]
self.urlToImage = arti["urlToImage"]
self.tranformed = {}
self.fake=""
self.category=""
self.objectivity=""
self.extract()
def describe(self):
print("SOURCE : "+str(self.source))
print("AUTHOR : "+str(self.source))
print("CONTENT PREVIEW : "+str(self.content[:20]))
def extract(self):
from postagging.transform import transform_to_pos
count_vectorizer = pickle.load(open("../../training/models/countvectorizer-category.pickle", "rb"))
transformer = pickle.load(open("../../training/models/tfidf-category.pickle", "rb"))
model = pickle.load(open("../../training/models/model-category.pickle", "rb"))
#print("Content : "+str(self.content))
try:
counts = count_vectorizer.transform([self.content])
except:
counts = count_vectorizer.transform([self.title])
tfidf = transformer.transform(counts)
self.category = model.predict(tfidf)
try:
self.tranformed = transform_to_pos(self.content)
except:
self.tranformed = transform_to_pos(self.title)
grilli = []
for i in self.tranformed:
grilli.append(self.tranformed[i])
import numpy as np
gril_np = np.asarray(grilli)
gril_torch = torch.tensor(gril_np)
device = torch.device('cpu')
gril_mid =gril_torch.to(device=device, dtype=torch.float32).type(torch.FloatTensor)
model2 = pickle.load(open("../../training/models/model-objectivity.pickle", "rb"))
model2.eval()
with torch.no_grad():
self.objectivity = torch.round(model2(gril_mid))
model3 = pickle.load(open("../../training/models/model-fakenews.pickle", "rb"))
count_vectorizer2 = pickle.load(open("../../training/models/countvectorizer-fake.pickle", "rb"))
transformer2 = pickle.load(open("../../training/models/tfidf-fake.pickle", "rb"))
try:
counts2 = count_vectorizer2.transform([self.content])
except:
counts2 = count_vectorizer2.transform([self.title])
tfidf2 = transformer2.transform(counts2)
f = model3.predict(tfidf2)
if "1" in str(f[0]):
self.fake = "Fake"
else:
self.fake = "Not Fake"
|
[
"raysamram@gmail.com"
] |
raysamram@gmail.com
|
fb4173a546d903ee4250a1fb91774ef47ad3ab53
|
88292515f30df6662d55a992ec1837614287b9bd
|
/Lab4_6/posts/models.py
|
8a15a4a3a54afe94447fcb2ec119b8000612d8fd
|
[] |
no_license
|
JakubPikus/aplikacje-internetowe-22164-185ic
|
becb73cc4bff3fab0eee0f776a0cee84ba2f219e
|
2951739a74e3da7e8055afc096cdfee65d89fef8
|
refs/heads/master
| 2023-02-22T05:46:41.460594
| 2021-01-24T04:16:53
| 2021-01-24T04:16:53
| 315,351,818
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 449
|
py
|
from django.db import models
from django.contrib.auth.models import User
class Post(models.Model):
author = models.ForeignKey(User, on_delete=models.CASCADE)
title = models.CharField(max_length=50)
body = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = "Moje posty"
def __str__(self):
return self.title
|
[
"kub943@gmail.com"
] |
kub943@gmail.com
|
7377a597b212b5e80263e4d76d6b5e4f66101ac7
|
6463cc6418eae02bb1436d04dff1dfd4f69b85c6
|
/Chapter 5/favorite_fruit.py
|
ae16d5866f48361e68f8267e30f2e460caf6d2af
|
[] |
no_license
|
danielgaylord/PythonCrashCourse
|
f0d69ec133973fdc08897c4e0533833d58c7aa9a
|
8ce5b5862a90c138036f9772d73db0feda5f982c
|
refs/heads/master
| 2021-01-17T12:57:11.545532
| 2016-06-22T02:15:30
| 2016-06-22T02:15:30
| 59,448,072
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 405
|
py
|
favorite_fruits = ["strawberry", "pear", "pineapple"]
if "banana" in favorite_fruits:
print("You really like bananas!")
if "apple" in favorite_fruits:
print("You really like apples!")
if "grape" in favorite_fruits:
print("You really like grapes!")
if "pear" in favorite_fruits:
print("You really like pears!")
if "strawberry" in favorite_fruits:
print("You really like strawberries!")
|
[
"danielgaylord@gmail.com"
] |
danielgaylord@gmail.com
|
c6a39643969e44e9b0aa6fa2005d17e2633d91f6
|
0f7b1937b16239778529c094fdd08b7b93dfb1cf
|
/retrace/ddrawretrace.py
|
4f2ff944144f53710e46f764b17deba2174b404d
|
[
"MIT"
] |
permissive
|
laanwj/apitrace-kms
|
707cb13cc08827a6e4279329ddf44aed47376abe
|
96b39d84accfca01897490ff975362c0931b220b
|
refs/heads/master
| 2021-01-12T15:51:35.328219
| 2016-11-24T16:12:58
| 2016-11-24T16:27:15
| 71,892,145
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,543
|
py
|
##########################################################################
#
# Copyright 2011 Jose Fonseca
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""D3D retracer generator."""
import sys
from dllretrace import DllRetracer as Retracer
from specs.stdapi import API
from specs.d3d import ddraw, HWND
from specs.ddraw import DDCREATE_LPGUID
class D3DRetracer(Retracer):
def retraceApi(self, api):
print '// Swizzling mapping for lock addresses'
print 'static std::map<void *, void *> _maps;'
print
# TODO: Keep a table of windows
print 'static HWND g_hWnd;'
print
Retracer.retraceApi(self, api)
def invokeInterfaceMethod(self, interface, method):
# keep track of the last used device for state dumping
if interface.name in ('IDirect3DDevice7',):
if method.name == 'Release':
print r' if (call.ret->toUInt() == 0) {'
print r' d3d7Dumper.unbindDevice(_this);'
print r' }'
else:
print r' d3d7Dumper.bindDevice(_this);'
# create windows as neccessary
hWndArg = method.getArgByType(HWND)
if hWndArg is not None:
# FIXME: Try to guess the window size (e.g., from IDirectDrawSurface7::Blt)
print r' if (!g_hWnd) {'
print r' g_hWnd = d3dretrace::createWindow(512, 512);'
print r' }'
print r' %s = g_hWnd;' % hWndArg.name
if method.name == 'Lock':
# Reset _DONOTWAIT flags. Otherwise they may fail, and we have no
# way to cope with it (other than retry).
mapFlagsArg = method.getArgByName('dwFlags')
if mapFlagsArg is not None:
print r' dwFlags &= DDLOCK_DONOTWAIT;'
print r' dwFlags |= DDLOCK_WAIT;'
Retracer.invokeInterfaceMethod(self, interface, method)
if method.name == 'CreateDevice':
print r' if (FAILED(_result)) {'
print r' exit(1);'
print r' }'
# notify frame has been completed
# process events after presents
if interface.name == 'IDirectDrawSurface7' and method.name == 'Blt':
print r' DDSCAPS2 ddsCaps;'
print r' if (SUCCEEDED(_this->GetCaps(&ddsCaps)) &&'
print r' (ddsCaps.dwCaps & DDSCAPS_PRIMARYSURFACE)) {'
print r' retrace::frameComplete(call);'
print r' d3dretrace::processEvents();'
print r' }'
if method.name == 'Lock':
print ' VOID *_pbData = NULL;'
print ' size_t _MappedSize = 0;'
# FIXME: determine the mapping size
#print ' _getMapInfo(_this, %s, _pbData, _MappedSize);' % ', '.join(method.argNames()[:-1])
print ' if (_MappedSize) {'
print ' _maps[_this] = _pbData;'
# TODO: check pitches match
print ' } else {'
print ' return;'
print ' }'
if method.name == 'Unlock':
print ' VOID *_pbData = 0;'
print ' _pbData = _maps[_this];'
print ' if (_pbData) {'
print ' retrace::delRegionByPointer(_pbData);'
print ' _maps[_this] = 0;'
print ' }'
def extractArg(self, function, arg, arg_type, lvalue, rvalue):
# Handle DDCREATE_* flags
if arg.type is DDCREATE_LPGUID:
print ' if (%s.toArray()) {' % rvalue
Retracer.extractArg(self, function, arg, arg_type, lvalue, rvalue)
print ' } else {'
print ' %s = static_cast<%s>(%s.toPointer());' % (lvalue, arg_type, rvalue)
print ' }'
return
Retracer.extractArg(self, function, arg, arg_type, lvalue, rvalue)
def main():
print r'#include <string.h>'
print
print r'#include <iostream>'
print
print r'#include "d3dretrace.hpp"'
print
api = API()
print r'#include "d3dimports.hpp"'
api.addModule(ddraw)
print
print '''static d3dretrace::D3DDumper<IDirect3DDevice7> d3d7Dumper;'''
print
retracer = D3DRetracer()
retracer.table_name = 'd3dretrace::ddraw_callbacks'
retracer.retraceApi(api)
if __name__ == '__main__':
main()
|
[
"jfonseca@vmware.com"
] |
jfonseca@vmware.com
|
de1485dac110b950249be801a4a06ab687b134c4
|
fc00b177802c49cf04dc6a8e430093bc14ae9b53
|
/venv/Lib/site-packages/git/test/test_git.py
|
060a4c3c16583ef845e9d97e6eff59b0007e2f53
|
[] |
permissive
|
artisakov/vigilant-journey
|
9c8264d36da5745374a0d08b0b0288a70f978a11
|
4fed9026071a64489d26422ba7cd1a9b9cb05e16
|
refs/heads/master
| 2022-11-16T03:10:06.418221
| 2020-07-16T07:33:06
| 2020-07-16T07:33:06
| 238,490,887
| 0
| 1
|
MIT
| 2020-03-01T10:12:22
| 2020-02-05T16:03:07
|
HTML
|
UTF-8
|
Python
| false
| false
| 10,926
|
py
|
# -*- coding: utf-8 -*-
# test_git.py
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import os
import subprocess
import sys
from tempfile import TemporaryFile
from unittest import mock
from git import (
Git,
refresh,
GitCommandError,
GitCommandNotFound,
Repo,
cmd
)
from git.compat import is_darwin
from git.test.lib import (
TestBase,
fixture_path
)
from git.test.lib import with_rw_directory
from git.util import finalize_process
import os.path as osp
from git.compat import is_win
class TestGit(TestBase):
@classmethod
def setUpClass(cls):
super(TestGit, cls).setUpClass()
cls.git = Git(cls.rorepo.working_dir)
def tearDown(self):
import gc
gc.collect()
@mock.patch.object(Git, 'execute')
def test_call_process_calls_execute(self, git):
git.return_value = ''
self.git.version()
self.assertTrue(git.called)
self.assertEqual(git.call_args, ((['git', 'version'],), {}))
def test_call_unpack_args_unicode(self):
args = Git._Git__unpack_args(u'Unicode€™')
mangled_value = 'Unicode\u20ac\u2122'
self.assertEqual(args, [mangled_value])
def test_call_unpack_args(self):
args = Git._Git__unpack_args(['git', 'log', '--', u'Unicode€™'])
mangled_value = 'Unicode\u20ac\u2122'
self.assertEqual(args, ['git', 'log', '--', mangled_value])
def test_it_raises_errors(self):
self.assertRaises(GitCommandError, self.git.this_does_not_exist)
def test_it_transforms_kwargs_into_git_command_arguments(self):
self.assertEqual(["-s"], self.git.transform_kwargs(**{'s': True}))
self.assertEqual(["-s", "5"], self.git.transform_kwargs(**{'s': 5}))
self.assertEqual([], self.git.transform_kwargs(**{'s': None}))
self.assertEqual(["--max-count"], self.git.transform_kwargs(**{'max_count': True}))
self.assertEqual(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5}))
self.assertEqual(["--max-count=0"], self.git.transform_kwargs(**{'max_count': 0}))
self.assertEqual([], self.git.transform_kwargs(**{'max_count': None}))
# Multiple args are supported by using lists/tuples
self.assertEqual(["-L", "1-3", "-L", "12-18"], self.git.transform_kwargs(**{'L': ('1-3', '12-18')}))
self.assertEqual(["-C", "-C"], self.git.transform_kwargs(**{'C': [True, True, None, False]}))
# order is undefined
res = self.git.transform_kwargs(**{'s': True, 't': True})
self.assertEqual({'-s', '-t'}, set(res))
def test_it_executes_git_to_shell_and_returns_result(self):
self.assertRegex(self.git.execute(["git", "version"]), r'^git version [\d\.]{2}.*$')
def test_it_accepts_stdin(self):
filename = fixture_path("cat_file_blob")
with open(filename, 'r') as fh:
self.assertEqual("70c379b63ffa0795fdbfbc128e5a2818397b7ef8",
self.git.hash_object(istream=fh, stdin=True))
@mock.patch.object(Git, 'execute')
def test_it_ignores_false_kwargs(self, git):
# this_should_not_be_ignored=False implies it *should* be ignored
self.git.version(pass_this_kwarg=False)
self.assertTrue("pass_this_kwarg" not in git.call_args[1])
def test_it_raises_proper_exception_with_output_stream(self):
tmp_file = TemporaryFile()
self.assertRaises(GitCommandError, self.git.checkout, 'non-existent-branch', output_stream=tmp_file)
def test_it_accepts_environment_variables(self):
filename = fixture_path("ls_tree_empty")
with open(filename, 'r') as fh:
tree = self.git.mktree(istream=fh)
env = {
'GIT_AUTHOR_NAME': 'Author Name',
'GIT_AUTHOR_EMAIL': 'author@example.com',
'GIT_AUTHOR_DATE': '1400000000+0000',
'GIT_COMMITTER_NAME': 'Committer Name',
'GIT_COMMITTER_EMAIL': 'committer@example.com',
'GIT_COMMITTER_DATE': '1500000000+0000',
}
commit = self.git.commit_tree(tree, m='message', env=env)
self.assertEqual(commit, '4cfd6b0314682d5a58f80be39850bad1640e9241')
def test_persistent_cat_file_command(self):
# read header only
hexsha = "b2339455342180c7cc1e9bba3e9f181f7baa5167"
g = self.git.cat_file(
batch_check=True, istream=subprocess.PIPE, as_process=True
)
g.stdin.write(b"b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
g.stdin.flush()
obj_info = g.stdout.readline()
# read header + data
g = self.git.cat_file(
batch=True, istream=subprocess.PIPE, as_process=True
)
g.stdin.write(b"b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
g.stdin.flush()
obj_info_two = g.stdout.readline()
self.assertEqual(obj_info, obj_info_two)
# read data - have to read it in one large chunk
size = int(obj_info.split()[2])
g.stdout.read(size)
g.stdout.read(1)
# now we should be able to read a new object
g.stdin.write(b"b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
g.stdin.flush()
self.assertEqual(g.stdout.readline(), obj_info)
# same can be achieved using the respective command functions
hexsha, typename, size = self.git.get_object_header(hexsha)
hexsha, typename_two, size_two, _ = self.git.get_object_data(hexsha)
self.assertEqual(typename, typename_two)
self.assertEqual(size, size_two)
def test_version(self):
v = self.git.version_info
self.assertIsInstance(v, tuple)
for n in v:
self.assertIsInstance(n, int)
# END verify number types
def test_cmd_override(self):
prev_cmd = self.git.GIT_PYTHON_GIT_EXECUTABLE
exc = GitCommandNotFound
try:
# set it to something that doens't exist, assure it raises
type(self.git).GIT_PYTHON_GIT_EXECUTABLE = osp.join(
"some", "path", "which", "doesn't", "exist", "gitbinary")
self.assertRaises(exc, self.git.version)
finally:
type(self.git).GIT_PYTHON_GIT_EXECUTABLE = prev_cmd
# END undo adjustment
def test_refresh(self):
# test a bad git path refresh
self.assertRaises(GitCommandNotFound, refresh, "yada")
# test a good path refresh
which_cmd = "where" if is_win else "which"
path = os.popen("{0} git".format(which_cmd)).read().strip().split('\n')[0]
refresh(path)
def test_options_are_passed_to_git(self):
# This work because any command after git --version is ignored
git_version = self.git(version=True).NoOp()
git_command_version = self.git.version()
self.assertEqual(git_version, git_command_version)
def test_persistent_options(self):
git_command_version = self.git.version()
# analog to test_options_are_passed_to_git
self.git.set_persistent_git_options(version=True)
git_version = self.git.NoOp()
self.assertEqual(git_version, git_command_version)
# subsequent calls keep this option:
git_version_2 = self.git.NoOp()
self.assertEqual(git_version_2, git_command_version)
# reset to empty:
self.git.set_persistent_git_options()
self.assertRaises(GitCommandError, self.git.NoOp)
def test_single_char_git_options_are_passed_to_git(self):
input_value = 'TestValue'
output_value = self.git(c='user.name=%s' % input_value).config('--get', 'user.name')
self.assertEqual(input_value, output_value)
def test_change_to_transform_kwargs_does_not_break_command_options(self):
self.git.log(n=1)
def test_insert_after_kwarg_raises(self):
# This isn't a complete add command, which doesn't matter here
self.assertRaises(ValueError, self.git.remote, 'add', insert_kwargs_after='foo')
def test_env_vars_passed_to_git(self):
editor = 'non_existent_editor'
with mock.patch.dict('os.environ', {'GIT_EDITOR': editor}): # @UndefinedVariable
self.assertEqual(self.git.var("GIT_EDITOR"), editor)
@with_rw_directory
def test_environment(self, rw_dir):
# sanity check
self.assertEqual(self.git.environment(), {})
# make sure the context manager works and cleans up after itself
with self.git.custom_environment(PWD='/tmp'):
self.assertEqual(self.git.environment(), {'PWD': '/tmp'})
self.assertEqual(self.git.environment(), {})
old_env = self.git.update_environment(VARKEY='VARVALUE')
# The returned dict can be used to revert the change, hence why it has
# an entry with value 'None'.
self.assertEqual(old_env, {'VARKEY': None})
self.assertEqual(self.git.environment(), {'VARKEY': 'VARVALUE'})
new_env = self.git.update_environment(**old_env)
self.assertEqual(new_env, {'VARKEY': 'VARVALUE'})
self.assertEqual(self.git.environment(), {})
path = osp.join(rw_dir, 'failing-script.sh')
with open(path, 'wt') as stream:
stream.write("#!/usr/bin/env sh\n"
"echo FOO\n")
os.chmod(path, 0o777)
rw_repo = Repo.init(osp.join(rw_dir, 'repo'))
remote = rw_repo.create_remote('ssh-origin', "ssh://git@server/foo")
with rw_repo.git.custom_environment(GIT_SSH=path):
try:
remote.fetch()
except GitCommandError as err:
if sys.version_info[0] < 3 and is_darwin:
self.assertIn('ssh-orig', str(err))
self.assertEqual(err.status, 128)
else:
self.assertIn('FOO', str(err))
def test_handle_process_output(self):
from git.cmd import handle_process_output
line_count = 5002
count = [None, 0, 0]
def counter_stdout(line):
count[1] += 1
def counter_stderr(line):
count[2] += 1
cmdline = [sys.executable, fixture_path('cat_file.py'), str(fixture_path('issue-301_stderr'))]
proc = subprocess.Popen(cmdline,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
creationflags=cmd.PROC_CREATIONFLAGS,
)
handle_process_output(proc, counter_stdout, counter_stderr, finalize_process)
self.assertEqual(count[1], line_count)
self.assertEqual(count[2], line_count)
|
[
"60698561+artisakov@users.noreply.github.com"
] |
60698561+artisakov@users.noreply.github.com
|
d3b93125a8b6cff22b783911d384af6ebc4e2a4e
|
60dab7cc9f0f0304acd2069c804752d1679386fd
|
/datatank_py/DTTriangularMesh2D.py
|
44735768d069f1a57f1cc76d2cbde456bcd0648b
|
[
"BSD-3-Clause"
] |
permissive
|
amaxwell/datatank_py
|
d459578d395e7a08329c722801dcebe8cc1c871f
|
69404b23e456b23db8ef2e59b484283f40dbb9ec
|
refs/heads/master
| 2021-06-24T09:10:27.365004
| 2020-12-19T23:35:14
| 2020-12-19T23:35:14
| 17,044,589
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,707
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
import numpy as np
class DTTriangularMesh2D(object):
"""2D triangular mesh object."""
dt_type = ("2D Triangular Mesh",)
"""Type strings allowed by DataTank"""
def __init__(self, grid, values):
"""
:param grid: :class:`datatank_py.DTTriangularGrid2D.DTTriangularGrid2D` instance
:param values: vector or list of values in nodal order
"""
super(DTTriangularMesh2D, self).__init__()
values = np.squeeze(values)
assert grid.number_of_points() == len(values)
self._grid = grid
self._values = values
def __dt_type__(self):
return "2D Triangular Mesh"
def grid(self):
""":returns: a :class:`datatank_py.DTTriangularGrid2D.DTTriangularGrid2D` instance"""
return self._grid
def bounding_box(self):
""":returns: a :class:`datatank_py.DTRegion2D.DTRegion2D` instance"""
return self._grid.bounding_box()
def write_with_shared_grid(self, datafile, name, grid_name, time, time_index):
"""Allows saving a single grid and sharing it amongst different time
values of a variable.
:param datafile: a :class:`datatank_py.DTDataFile.DTDataFile` open for writing
:param name: the mesh variable's name
:param grid_name: the grid name to be shared (will not be visible in DataTank)
:param time: the time value for this step (DataTank's ``t`` variable)
:param time_index: the corresponding integer index of this time step
This is an advanced technique, but it can give a significant space savings in
a data file. It's not widely implemented, since it's not clear yet if this
is the best API, but the following example shows how it's used::
#!/usr/bin/env python
import numpy as np
from datatank_py.DTDataFile import DTDataFile
from datatank_py.DTTriangularGrid2D import DTTriangularGrid2D
from datatank_py.DTTriangularMesh2D import DTTriangularMesh2D
# files that exist in the current directory
grid_filename = "grid.txt"
# this is a time-varying list of depths at each node
depth_filename = "depths.txt"
# function that returns a DTTriangularGrid2D
grid = parse_grid_from_path(grid_filename)
# this can be any string; the user won't see it
shared_grid_name = grid_filename
with DTDataFile("Output.dtbin", truncate=True) as dtf:
# a bunch of this is related to parsing the textfile
with open(depth_filename, "rU") as asciivalues:
# here we have some state variables, but the time ones are relevant
passed_header = False
accumulated_values = []
# this is a time extracted from the file (a floating point value)
timeval = None
# this is the zero-based index of the timeval
time_index = 0
# this is the initial value of the timeval variable
base_timeval = None
for lineidx, line in enumerate(asciivalues):
line = line.strip()
if line.startswith("TS"):
# If we've already seen a timeval, a "TS" marker means that we're starting
# another block of depth values so we're going to save the previous
# timestep to disk.
if timeval is not None:
assert passed_header is True
# save the t0 if we haven't already done so
if base_timeval is None:
base_timeval = timeval
# create a DTTriangularMesh2D as usual, with grid and values
# note that a 32-bit float will save significant space over
# a double, if you can live with the reduced precision.
mesh = DTTriangularMesh2D(grid, np.array(accumulated_values, dtype=np.float32))
# This is the floating point time value that will be used for
# DataTank's time slider. Here I'm using hours.
dttime_hours = (timeval - base_timeval) / 3600.
# Now, save it off. The variable in the file will be visible as "Depth",
# and write_with_shared_grid() will take care of saving the grid for the
# first time and then saving the name on subsequent time steps.
#
# The dttime_hours variable is our slider time, and time_index is passed
# so that write_with_shared_grid() can create the correct variable name,
# i.e., "Depth_0, Depth_1, Depth_2, … Depth_N" for successive time steps.
#
mesh.write_with_shared_grid(dtf, "Depth", shared_grid_name, dttime_hours, time_index)
#
# This code shows what write_with_shared_grid() is really doing in our specific
# example:
#
# dtf.write(mesh, "Depth_%d" % (time_index), time=(timeval - base_timeval))
# dtf.write_anonymous(shared_grid_name, "Depth_%d" % (time_index))
# dtf.write_anonymous(np.array(accumulated_values).astype(np.float32), "Depth_%d_V" % (time_index))
# dtf.write_anonymous(np.array((timeval - base_timeval,)), "Depth_%d_time" % (time_index))
time_index += 1
# update our state variables and continue parsing the file
ts, zero, time_str = line.split()
timeval = float(time_str)
# this will be the start of a new vector of depth values
accumulated_values = []
passed_header = True
elif passed_header and not line.startswith("ENDDS"):
# here we're just saving off an individual depth value for a node
accumulated_values.append(float(line))
else:
print "Ignored: %s" % (line)
"""
if grid_name not in datafile:
datafile.write_anonymous(self._grid, grid_name)
datafile.write_anonymous(self.__dt_type__(), "Seq_" + name)
varname = "%s_%d" % (name, time_index)
datafile.write_anonymous(grid_name, varname)
datafile.write_anonymous(self._values, varname + "_V")
datafile.write_anonymous(np.array((time,)), varname + "_time")
def __str__(self):
return self.__dt_type__() + ":\n grid = " + str(self._grid)
def __dt_write__(self, datafile, name):
datafile.write_anonymous(self._values, name + "_V")
datafile.write_anonymous(self._grid, name)
@classmethod
def from_data_file(self, datafile, name):
name = datafile.resolve_name(name)
values = datafile[name + "_V"]
grid = datafile[name]
assert values != None, "DTTriangularMesh2D: no such variable %s in %s" % (name + "_V", datafile.path())
assert grid != None, "DTTriangularMesh2D: no such variable %s in %s" % (name, datafile)
return DTStructuredMesh2D(grid, values)
|
[
"amaxwell@mac.com"
] |
amaxwell@mac.com
|
a6155ef023589c254e65453a1624e089ee5200d3
|
5d614198b61ab7e4287c17db1c13ab1c7745dc94
|
/Classes.py
|
fd0f8bedd5d5decda61185aae4b564ee5d5b5de7
|
[] |
no_license
|
cesarak2/EM624_assignments
|
1bb31971fdd07465c20d832272f88fd824704e59
|
7f84e872364ff8aaafe3daf687913e8d3c366bed
|
refs/heads/master
| 2021-09-07T19:33:42.808252
| 2018-02-27T23:27:53
| 2018-02-27T23:27:53
| 119,113,851
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 914
|
py
|
class Student(object):
"""
Description of the class
Methods:
name: str represents the name of the student
Attributes:
....
"""
def __init__(self, name, course):
self.__name = name
self._course = course
self.__age = None
def print_my_name(self):
"""
This function prints the name of the student
:return: None
"""
print self.__name
def what_is_my_age(self, age):
"""
:param age:
:return:
"""
print "Student " + str(self.__name) + " has the age of " + str(age) + ""
def set_my_age(self, age):
"""
:param age:
:return:
"""
self.__age = age
print "Updated the age..."
def get_my_age(self):
"""
:return:
"""
return self.__age
# design patterns
# I am X bootstrap
|
[
"cesarak2+github@gmail.com"
] |
cesarak2+github@gmail.com
|
c52088881bbfb8044d419632a30838c99d52f2b4
|
7fd5c148da0549124acbc98b2166066244c0813a
|
/testing/test_parm_yaml.py
|
3a3310319e8690c4f4af76a35e0d2866ec91ac26
|
[] |
no_license
|
FPP-1454737567/FPP-homework
|
b1ff33233509423cee2fe040b453562f61122353
|
ad89a5c1575afb0ecd89ccf188df91ec6a1b41bd
|
refs/heads/main
| 2023-03-10T23:01:20.763549
| 2021-02-28T08:37:07
| 2021-02-28T08:37:07
| 320,301,749
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 517
|
py
|
# @Time : 2020/12/13
# @Author : FPP
import pytest
import yaml
def get_datas():
with open("data.yml") as f:
datas = yaml.safe_load(f)
print(datas)
add_datas = datas["datas"]
add_ids = datas["myid"]
return [add_datas, add_ids]
def add_function(a, b):
return a + b
@pytest.mark.parametrize("a,b,expected",
get_datas()[0],
ids=get_datas()[1])
def test_add(a, b, expected):
assert add_function(a, b) == expected
|
[
"1454737567@qq.com"
] |
1454737567@qq.com
|
fc592a2a675b1f77409e611c942ec11c431e62ba
|
849e95a72f4f380d6b31573a0a13e9eccd288838
|
/legal-api/src/legal_api/models/party_role.py
|
ead552b83329bfbcb0478b9f16c94e55b44cfa10
|
[
"Apache-2.0"
] |
permissive
|
bcgov/lear
|
d9b27e2b44ba607ca13878357a62a0623d54ddee
|
d90f11a7b14411b02c07fe97d2c1fc31cd4a9b32
|
refs/heads/main
| 2023-09-01T11:26:11.058427
| 2023-08-31T20:25:24
| 2023-08-31T20:25:24
| 168,396,249
| 13
| 117
|
Apache-2.0
| 2023-09-14T20:52:02
| 2019-01-30T18:49:09
|
Python
|
UTF-8
|
Python
| false
| false
| 6,487
|
py
|
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds data for party roles in a business."""
from __future__ import annotations
from datetime import datetime
from enum import Enum
from sqlalchemy import Date, cast, or_
from .db import db # noqa: I001
from .party import Party # noqa: I001,F401,I003 pylint: disable=unused-import; needed by the SQLAlchemy rel
class PartyRole(db.Model):
"""Class that manages data for party roles related to a business."""
class RoleTypes(Enum):
"""Render an Enum of the role types."""
APPLICANT = 'applicant'
COMPLETING_PARTY = 'completing_party'
CUSTODIAN = 'custodian'
DIRECTOR = 'director'
INCORPORATOR = 'incorporator'
LIQUIDATOR = 'liquidator'
PROPRIETOR = 'proprietor'
PARTNER = 'partner'
__versioned__ = {}
__tablename__ = 'party_roles'
id = db.Column(db.Integer, primary_key=True)
role = db.Column('role', db.String(30), default=RoleTypes.DIRECTOR)
appointment_date = db.Column('appointment_date', db.DateTime(timezone=True))
cessation_date = db.Column('cessation_date', db.DateTime(timezone=True))
business_id = db.Column('business_id', db.Integer, db.ForeignKey('businesses.id'))
filing_id = db.Column('filing_id', db.Integer, db.ForeignKey('filings.id'))
party_id = db.Column('party_id', db.Integer, db.ForeignKey('parties.id'))
# relationships
party = db.relationship('Party')
def save(self):
"""Save the object to the database immediately."""
db.session.add(self)
db.session.commit()
@property
def json(self) -> dict:
"""Return the party member as a json object."""
party = {
**self.party.json,
'appointmentDate': datetime.date(self.appointment_date).isoformat(),
'cessationDate': datetime.date(self.cessation_date).isoformat() if self.cessation_date else None,
'role': self.role
}
return party
@classmethod
def find_by_internal_id(cls, internal_id: int) -> PartyRole:
"""Return a party role by the internal id."""
party_role = None
if internal_id:
party_role = cls.query.filter_by(id=internal_id).one_or_none()
return party_role
@classmethod
def find_party_by_name(cls, business_id: int, first_name: str, # pylint: disable=too-many-arguments; one too many
last_name: str, middle_initial: str, org_name: str) -> Party:
"""Return a Party connected to the given business_id by the given name."""
party_roles = cls.query.filter_by(business_id=business_id).all()
party = None
# the given name to find
search_name = ''
if org_name:
search_name = org_name
elif middle_initial:
search_name = ' '.join((first_name.strip(), middle_initial.strip(), last_name.strip()))
else:
search_name = ' '.join((first_name.strip(), last_name.strip()))
for role in party_roles:
# the name of the party for each role
name = role.party.name
if name and name.strip().upper() == search_name.strip().upper():
party = role.party
break
return party
@staticmethod
def get_parties_by_role(business_id: int, role: str) -> list:
"""Return all people/oraganizations with the given role for this business (ceased + current)."""
members = db.session.query(PartyRole). \
filter(PartyRole.business_id == business_id). \
filter(PartyRole.role == role). \
all()
return members
@staticmethod
def get_active_directors(business_id: int, end_date: datetime) -> list:
"""Return the active directors as of given date."""
directors = db.session.query(PartyRole). \
filter(PartyRole.business_id == business_id). \
filter(PartyRole.role == PartyRole.RoleTypes.DIRECTOR.value). \
filter(cast(PartyRole.appointment_date, Date) <= end_date). \
filter(or_(PartyRole.cessation_date.is_(None), cast(PartyRole.cessation_date, Date) > end_date)). \
all()
return directors
@staticmethod
def get_party_roles(business_id: int, end_date: datetime, role: str = None) -> list:
"""Return the parties that match the filter conditions."""
party_roles = db.session.query(PartyRole). \
filter(PartyRole.business_id == business_id). \
filter(cast(PartyRole.appointment_date, Date) <= end_date). \
filter(or_(PartyRole.cessation_date.is_(None), cast(PartyRole.cessation_date, Date) > end_date))
if role is not None:
party_roles = party_roles.filter(PartyRole.role == role.lower())
party_roles = party_roles.all()
return party_roles
@staticmethod
def get_party_roles_by_party_id(business_id: int, party_id: int) -> list:
"""Return the parties that match the filter conditions."""
party_roles = db.session.query(PartyRole). \
filter(PartyRole.business_id == business_id). \
filter(PartyRole.party_id == party_id). \
all()
return party_roles
@staticmethod
def get_party_roles_by_filing(filing_id: int, end_date: datetime, role: str = None) -> list:
"""Return the parties that match the filter conditions."""
party_roles = db.session.query(PartyRole). \
filter(PartyRole.filing_id == filing_id). \
filter(cast(PartyRole.appointment_date, Date) <= end_date). \
filter(or_(PartyRole.cessation_date.is_(None), cast(PartyRole.cessation_date, Date) > end_date))
if role is not None:
party_roles = party_roles.filter(PartyRole.role == role.lower())
party_roles = party_roles.all()
return party_roles
|
[
"noreply@github.com"
] |
bcgov.noreply@github.com
|
7b1116460997af5e119696e3f90fb8615fd4e3de
|
4cbf3ab1162509a5322dc9c31838e6db81e8ea3a
|
/koordinat/admin.py
|
0bb9c6cfef7fdcd333b4b742500f1a9215e06a07
|
[] |
no_license
|
RyanDritama/bridge-crack-monitoring
|
425a27fed2345cdb2715a4a4dd7c061fb1208ff7
|
e9edfd17e505e255199ab0de03b251ec7585af30
|
refs/heads/main
| 2023-06-29T01:47:32.160256
| 2021-07-28T11:06:10
| 2021-07-28T11:06:10
| 390,318,070
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 125
|
py
|
from django.contrib import admin
from .models import Koordinat
admin.site.register(Koordinat)
# Register your models here.
|
[
"noreply@github.com"
] |
RyanDritama.noreply@github.com
|
473dd365fb3ca3dd368745e5d30fe6e70bbc1266
|
7c763618a2b37cbff09f5b61e2d14e70abae60d8
|
/stack_balanced_parenthesis.py
|
3a9bfd8a8062f078dc6a442e1a85698e1cd0bcc6
|
[] |
no_license
|
shubhamg14/DataStructures-Algorithms
|
5f669db5a0b5459634885d3759bc44b28a49736d
|
7c54277d9cd5b4aef9ace91e4c77fe9bd135c142
|
refs/heads/master
| 2020-05-24T18:22:47.241861
| 2019-05-18T21:54:49
| 2019-05-18T21:54:49
| 187,408,732
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 913
|
py
|
from stack import Stack
def is_match(p1, p2):
if p1 == "(" and p2 == ")":
return True
if p1 == "{" and p2 == "}":
return True
if p1 == "[" and p2 == "]":
return True
else:
return False
def is_parenthesis_balanced(paren_string):
s = Stack()
is_balanced = True
index = 0
while index < len(paren_string) and is_balanced:
parenthesis = paren_string[index]
if parenthesis in '({[':
s.push(parenthesis)
else:
if s.is_empty():
is_balanced = False
else:
top_element = s.pop()
if not is_match(top_element, parenthesis):
is_balanced = False
index += 1
if s.is_empty() and is_balanced:
return True
else:
return False
print (is_parenthesis_balanced('{()}'))
|
[
"noreply@github.com"
] |
shubhamg14.noreply@github.com
|
3e95fdd18d9acec82483b21c2815e10ae2e465dd
|
2e86a37cfa3f882b3dd93433bc2cc86e569bc7a6
|
/web_app/recommendation/recommender.py
|
b4afdec245b94eef0b52ea4aab8b4cdc7ee9efd7
|
[] |
no_license
|
peterle93/Recommendations-with-IBM
|
8a2f2d890b25f0a308e43ca24702312c3aa2f043
|
2f0758e20097eed8fb5dceb58b197e81072592d7
|
refs/heads/main
| 2023-03-26T13:51:44.730291
| 2021-03-14T03:53:35
| 2021-03-14T03:53:35
| 322,931,043
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,881
|
py
|
import pandas as pd
import numpy as np
# web app use plotly , so remove : import matplotlib.pyplot as plt
from recommendation.data_clean import Data_Clean
from recommendation.rbrecommender import RBRecommender
from recommendation.ucfrecommender import UCFRecommender
from recommendation.mfrecommender import MFRecommender
from recommendation.cbrecommender import CBRecommender
class Recommender():
'''
Class: User Based Collaborative Filtering Recommendations
'''
def __init__(self, interact_pth='data/user-item-interactions.csv', articles_pth='data/articles_community.csv',top_n=10):
self.top_n = top_n
dc=Data_Clean(interact_pth, articles_pth)
self.ucfr=UCFRecommender(dc.interacts_clean)
self.rbr=RBRecommender(dc.interacts_clean)
self.cbr=CBRecommender(dc.articles_clean,dc.interacts_clean)
self.mfr=MFRecommender(dc.articles_clean,dc.interacts_clean)
self.user_with_few_articles = self.ucfr.user_item.index[self.ucfr.user_item.sum(axis=1)<3].values
def recommend_articles(self, user_id, top_n=10):
'''
Description:
Acording to users type:
new user: RBRecommender
old user and articles : UCFRecommender/MFRecommender
user reading few articles: CBRecommender
Args:
user_id
Return:
Recs: list of recommendations
'''
if top_n != 10:
self.top_n = top_n
if user_id in self.cbr.users:
if user_id in self.user_with_few_articles:
recs=self.cbr.make_content_recs(user_id, self.top_n)
if len(recs) == 0:
recs=self.rbr.get_top_articles()
else:
_, recs=self.ucfr.user_advance_recs(user_id, self.top_n)
else:
recs=self.rbr.get_top_articles()
return recs
def mf_calculate_error(self):
return self.mfr.calculate_error()
'''
# Web server use ploly to draw curve, so comment out the following.
def draw_curve(self):
latent_factors_num,test_accuracy,train_accuracy =self.mfr.calculate_error()
self.mfr.draw_curve(latent_factors_num,test_accuracy,train_accuracy)
'''
if __name__ == '__main__':
rec=Recommender()
# Quick spot check just use it to test your functions
# normal users
rec_names =rec.recommend_articles(20)
print("The top 10 recommendations for user 20 are the following article names:")
print(rec_names)
# user with few articles.
rec_names =rec.recommend_articles(141)
print("The top 10 recommendations for user 2 are the following article names:")
print(rec_names)
# new user id 6000
rec_names =rec.recommend_articles(6000)
print("The top 10 recommendations for new user 6000 are the following article names:")
print(rec_names)
#rec.draw_curve()
|
[
"le.peter1993@gmail.com"
] |
le.peter1993@gmail.com
|
65a977859bc72062caed63bb6ea59ebeb4f24d3c
|
d02c7b6097573d469d4fc42dffbcb43f72b193fa
|
/examples/counter/urls.py
|
83c9e566e14b7e7ce65e180d3a47b6ded1a3a02c
|
[] |
no_license
|
wmatyskiewicz/django-serverpush
|
5febeedee6ca6834c6cfb487432bb8e9cdd2357c
|
51c67df5a4b7157b801d1d53aa63bdf16382a8e1
|
refs/heads/master
| 2020-07-01T15:39:30.183320
| 2014-04-17T07:44:13
| 2014-04-17T07:44:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 134
|
py
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
url(r'^$', 'counter.demoapp.views.list'),
)
|
[
"ziga@hamsworld.net"
] |
ziga@hamsworld.net
|
22e2585734491967052aa67dc5f1eb1ebb10b18f
|
92d19be0e7825a8ec99a876530fa943cd3b3739c
|
/telegram_flask_bot.py
|
c295f1992cb069fd274b43aa67f78def85f1fca6
|
[
"MIT"
] |
permissive
|
bondgeodima/first
|
36e11c0d9eb7739270549812b4a9e00520094dde
|
5fb772cbffd065a1ee17135c369c2878b28122dd
|
refs/heads/master
| 2021-07-17T01:44:03.909795
| 2021-04-01T08:47:04
| 2021-04-01T08:47:04
| 68,470,987
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 764
|
py
|
from flask import Flask, request
import requests
app = Flask(__name__)
def send_message(chat_id, text):
method = "sendMessage"
token = "1383386139:AAGWeMlF9BW26ZwUwnVuk2pQm6nOvUADxyw"
url = f"https://api.telegram.org/bot{token}/{method}"
data = {"chat_id": chat_id, "text": text}
requests.post(url, data=data)
@app.route("/", methods=["GET", "POST"])
def receive_update():
if request.method == "POST":
print(request.json)
chat_id = request.json["message"]["chat"]["id"]
m_text = request.json["message"]["text"]
# send_message(chat_id, "pong")
send_message(chat_id, m_text)
return {"ok": True}
if __name__ == '__main__':
#app.run()
app.run(host='127.0.0.1', port=5000, debug=True, )
|
[
"bondgeodima@gmail.com"
] |
bondgeodima@gmail.com
|
961970383721ee8e81d947897598f984e6316067
|
eb4b2609a0ce2f74569a3691a5ec2f1dcb8112ba
|
/demo/simple_app.py
|
00c6a17ac90931fdfc07852dc5d0f38c9aa4bdc3
|
[
"MIT"
] |
permissive
|
ra2003/Calendarium
|
15ca05ff142bffbbefe82a8c015f904b60210a05
|
4c7fa27ed70851dc607ee293c84179e9356b8e2f
|
refs/heads/master
| 2022-02-14T14:15:44.882525
| 2019-08-28T08:46:58
| 2019-08-28T08:46:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,378
|
py
|
#!/usr/bin/python3
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox
from calendarium import Calendarium
class Main(ttk.Frame):
def __init__(self, parent):
super().__init__()
self.parent = parent
self.text = tk.StringVar()
self.init_ui()
def init_ui(self):
self.pack(fill=tk.BOTH, expand=1)
f = ttk.Frame()
self.start_date = Calendarium(self,"Start Date")
self.end_date = Calendarium(self,"End Date")
self.start_date.get_calendarium(f,)
self.end_date.get_calendarium(f,)
w = ttk.Frame()
ttk.Button(w, text="Print Date", command=self.on_callback).pack()
ttk.Button(w, text="Set Today", command=self.on_reset).pack()
ttk.Button(w, text="Compare", command=self.on_compare).pack()
ttk.Button(w, text="Close", command=self.on_close).pack()
f.pack(side=tk.LEFT, fill=tk.BOTH, expand=1)
w.pack(side=tk.RIGHT, fill=tk.BOTH, expand=1)
def on_open(self):
pass
#self.start_date.set_today()
#self.end_date.set_today()
def on_callback(self,):
if self.start_date.get_date(self)==False:return
if self.end_date.get_date(self)==False:
return
else:
msg = "{0}: {1}\n{2}: {3}".format(self.start_date.name,self.start_date.get_date(self),
self.end_date.name,self.end_date.get_date(self))
messagebox.showinfo(self.parent.title(), msg, parent=self)
def on_reset(self):
self.start_date.set_today()
self.end_date.set_today()
def on_compare(self):
if self.start_date.get_date(self)==False:
return
else:
d1 = self.start_date.get_date(self)
if self.end_date.get_date(self)==False:
return
else:
d2 = self.end_date.get_date(self)
if d1 > d2:
msg = "{0} is greater than {1} :".format(self.start_date.name,self.end_date.name)
elif d1 < d2:
msg = "{0} is less than {1} :".format(self.start_date.name,self.end_date.name)
else:
msg = "{0} is equal than {1} :".format(self.start_date.name,self.end_date.name)
messagebox.showinfo(self.parent.title(), msg, parent=self)
def on_close(self):
self.parent.on_exit()
class App(tk.Tk):
"""Start here"""
def __init__(self):
super().__init__()
self.protocol("WM_DELETE_WINDOW", self.on_exit)
self.set_title()
self.set_style()
frame = Main(self,)
frame.pack(fill=tk.BOTH, expand=1)
frame.on_open()
def set_style(self):
self.style = ttk.Style()
#('winnative', 'clam', 'alt', 'default', 'classic', 'vista', 'xpnative')
self.style.theme_use("clam")
def set_title(self):
s = "{0}".format('My App')
self.title(s)
def on_exit(self):
"""Close all"""
if messagebox.askokcancel(self.title(), "Do you want to quit?", parent=self):
self.destroy()
if __name__ == '__main__':
app = App()
app.mainloop()
|
[
"noreply@github.com"
] |
ra2003.noreply@github.com
|
2db38cfeec04bd93ee0dff2f0a9c4a1cf5e1c111
|
255e19ddc1bcde0d3d4fe70e01cec9bb724979c9
|
/dockerized-gists/a1312d211c110ff3855d/snippet.py
|
f27dd11226e4e07b0fa62f88066af4631a465cb1
|
[
"MIT"
] |
permissive
|
gistable/gistable
|
26c1e909928ec463026811f69b61619b62f14721
|
665d39a2bd82543d5196555f0801ef8fd4a3ee48
|
refs/heads/master
| 2023-02-17T21:33:55.558398
| 2023-02-11T18:20:10
| 2023-02-11T18:20:10
| 119,861,038
| 76
| 19
| null | 2020-07-26T03:14:55
| 2018-02-01T16:19:24
|
Python
|
UTF-8
|
Python
| false
| false
| 5,920
|
py
|
#!/usr/bin/env python2
"""
Automate your browser via telnet.
Requirements:
* Firefox
* MozRepl add-on (https://addons.mozilla.org/en-US/firefox/addon/mozrepl/)
- activate the add-on (under Tools -> MozRepl, "Start" and "Activate on startup")
Documentation of gBrowser:
* https://developer.mozilla.org/en-US/docs/XUL/tabbrowser (reference)
* https://developer.mozilla.org/en-US/docs/Code_snippets/Tabbed_browser (code snippets)
# from jpl2 import firefox as ff
written by Jabba Laci
https://github.com/jabbalaci
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
import json
import re
import socket
import sys
import telnetlib
import time
class Mozrepl(object):
"""
based on https://github.com/bard/mozrepl/wiki/Pyrepl
"""
HOST = 'localhost'
PORT = 4242
prompt = [r'repl\d*> '] # list of regular expressions
def __init__(self, ip=HOST, port=PORT):
self.ip = ip
self.port = port
def __enter__(self):
self.tn = telnetlib.Telnet(self.ip, self.port)
self.tn.expect(Mozrepl.prompt)
return self
def __exit__(self, type, value, traceback):
self.tn.close()
del self.tn
def cmd(self, command):
"""
Execute the command and fetch its result.
"""
self.tn.write(command.encode() + b"\n")
return self.tn.expect(Mozrepl.prompt)
def get_text_result(self, command, sep=''):
"""
Execute the command and fetch its result as text.
"""
lines = self.cmd(command)[2].decode("utf8").split("\n")
if re.search(Mozrepl.prompt[0].strip(), lines[-1]):
lines = lines[:-1]
return sep.join(lines)
@classmethod
def is_installed(cls):
"""
Test if MozRepl is installed.
We simply try to connect to localhost:4242 where
MozRepl should be listening.
"""
try:
with Mozrepl() as mr:
pass
return True
except socket.error:
return False
#############################################################################
def open_url_in_curr_tab(url):
"""
Open a URL in the *current* tab.
"""
with Mozrepl() as mr:
cmd = "content.location.href = '{url}'".format(url=url)
mr.cmd(cmd)
def get_curr_tab_url():
"""
URL of the current tab.
"""
with Mozrepl() as mr:
result = mr.cmd("content.location.href")
return result[2].split()[0].replace('"', '')
def open_new_empty_tab():
"""
Open a new empty tab and put the focus on it.
"""
with Mozrepl() as mr:
mr.cmd('gBrowser.addTab()')
mr.cmd('length = gBrowser.tabContainer.childNodes.length')
mr.cmd('gBrowser.selectedTab = gBrowser.tabContainer.childNodes[length-1]')
def put_focus_on_tab(n):
"""
Put the focus on the selected tab.
"""
if not (0 <= n < get_number_of_tabs()):
print("Warning! Incorrect tab number!")
return
# else
with Mozrepl() as mr:
mr.cmd('gBrowser.selectedTab = gBrowser.tabContainer.childNodes[{n}]'.format(n=n))
def open_url_in_new_tab(url):
"""
Open the given URL in a new tab.
webbrowser.open_new_tab puts the focus on Firefox.
This one doesn't.
"""
open_new_empty_tab()
open_url_in_curr_tab(url)
def get_curr_tab_html():
"""
HTML source of the current tab.
If the current page is big, don't use
this method on it, it'll take much time.
"""
with Mozrepl() as mr:
result = mr.cmd('content.document.body.innerHTML')
html = result[2].decode("utf8").split('\n')
if html[0].strip() == '"':
html = html[1:]
if re.search(Mozrepl.prompt[0], html[-1]):
html = html[:-1]
if html[-1].strip() == '"':
html = html[:-1]
return ''.join(html)
def close_curr_tab():
"""
Close the current tab.
"""
with Mozrepl() as mr:
mr.cmd('gBrowser.removeCurrentTab()')
def get_number_of_tabs():
"""
Number of tabs in the browser.
"""
with Mozrepl() as mr:
result = mr.get_text_result('gBrowser.tabContainer.childNodes.length')
return int(result)
def get_curr_tab_title():
"""
Title of the page in the current tab.
"""
with Mozrepl() as mr:
result = mr.get_text_result('document.title')
return result
def get_tab_list():
cmd = \
"""
String.prototype.format = function() {
var formatted = this;
for(arg in arguments) {
formatted = formatted.replace("{" + arg + "}", arguments[arg]);
}
return formatted;
};
var all_tabs = gBrowser.mTabContainer.childNodes;
var tab_list = [];
for (var i = 0; i < all_tabs.length; ++i ) {
var tab = gBrowser.getBrowserForTab(all_tabs[i]).contentDocument;
if(tab.location != "about:blank")
tab_list.push({"url":tab.location, "title":tab.title});
}
for (var i=0; i<tab_list.length; ++i) {
var title = tab_list[i].title;
title = title.replace(/"/g, "'");
var item = '{"index": {0}, "title": "{1}", "url": "{2}"}'.format(i, title, tab_list[i].url);
repl.print(item);
}
"""
with Mozrepl() as mr:
result = mr.get_text_result(cmd, sep='\n')
li = []
for e in result.split('\n'):
li.append(json.loads(e))
return li
#############################################################################
if __name__ == "__main__":
if not Mozrepl.is_installed():
print('Cannot connect to {host}:{port}'.format(host=Mozrepl.HOST, port=Mozrepl.PORT))
print('Make sure that the MozRepl Firefox add-on is installed and activated.')
sys.exit(1)
else:
li = ["nsfw", "legs", "pantyhose"]
for e in li:
open_url_in_new_tab("http://www.reddit.com/r/{}".format(e))
|
[
"gistshub@gmail.com"
] |
gistshub@gmail.com
|
7a4051effcd4d7d715a73c0ff50a3b60c05a5460
|
5993170b5a055ce60f207af8e95414909379561c
|
/frontend/view/config.py
|
27463473aac1dde0e70b96d271d9a908d23b516f
|
[] |
no_license
|
vyakunin/zakupki
|
29b1879be79e23a0616d802103fdb85891e2fcd0
|
396614e2fb8c22f934a2aab12ea878378c979b22
|
refs/heads/master
| 2020-04-06T11:54:21.298112
| 2012-04-28T10:34:56
| 2012-04-28T10:34:56
| 32,123,985
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 327
|
py
|
#!/usr/bin/python2.5
# encoding: utf-8
"""
config.py
Copyright (c) Sergey Babenko and Vladimir Yakunin 2011.
All rights reserved.
"""
class Config(object):
# TODO: invent a better way to deal with this.
# Probably we may store it in DataStore
actuality_date = "29 ноября 2011"
start_date = "1 января 2007"
|
[
"mc.vertix@gmail.com"
] |
mc.vertix@gmail.com
|
13e51cf7b1950166defe7ad6a138d5944de27cba
|
6a94ad9e53d81e09894e7a9c7c27edc1b6ff8123
|
/reverseflow/generator.py
|
fbb2d65bb0481cf2a514294473b02f40ccd9a74c
|
[] |
no_license
|
zenna/reverseflow
|
c59ed2470dae743202acc2a3a2af0db60d157d5a
|
7a9f2961fb59e608d0f0cae4f8b10e83cc36a20d
|
refs/heads/master
| 2021-05-04T06:20:41.426689
| 2016-10-13T17:31:58
| 2016-10-13T17:31:58
| 66,391,115
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,347
|
py
|
"""Generate random tensorflow graphs"""
from rf.util import *
import tensorflow as tf
import numpy as np
import random
import pdb
class Choice:
def __init__(self, transform, prob, **kwargs):
assert prob > 0
self.transform = transform
self.prob = prob
self.kwargs = kwargs
## Predicates
## ==========
def check_empty(g):
return g.get_operations() == []
def two_equiv_tensors(tensor_groups):
"""Get two tensors with same shape and dtype"""
for group in tensor_groups.values():
if len(group) > 1:
return True
return False
## Transforms
## ==========
def create_var(g, dtype, shape):
tf.placeholder(dtype=dtype, shape=shape)
return False
def create_const(g, const_gen):
tf.constant(const_gen())
return False
def apply_op(g, op, args):
op(*args)
return False
def stop_signal(g):
return True
## suggestions
## ===========
def create_vars(g):
n = num_ph(g)
if n == 0:
return [Choice(create_var, 1.0, dtype=tf.float32, shape=(128,128))]
else:
return [Choice(create_var, 1.0, dtype=tf.float32, shape=(128,128)),
Choice(create_const, 0.5, const_gen=lambda: np.random.rand(10, 10))]
def maybe_stop(g):
if len(all_tensors_namescope(g, 'fwd_g')) > 10.0:
return [Choice(stop_signal, 1.0)]
else:
return []
def apply_elem_op(g):
tensors = all_tensors_namescope(g, 'fwd_g')
valid_tensors = []
for t in tensors:
if in_namescope(t, "fwd_g") and (t.op.type == "placeholder" or t.op.type == "Identity"):
valid_tensors.append(t)
elif in_namescope(t, "random_graph"):
valid_tensors.append(t)
pdb.set_trace()
tensor_groups = group_equiv_tensors(valid_tensors)
if two_equiv_tensors(tensor_groups):
ops = [tf.add, tf.sub, tf.mul]
for v in tensor_groups.values():
print("V is", len(v))
if len(v) > 1:
a, b = np.random.choice(v, (2,), replace=False)
op = np.random.choice(ops)
return [Choice(apply_op, 2.0, op=op, args=(a, b))]
assert False
else:
return []
def gen_graph(g, suggestions, max_iterations=1000):
"""
Generate a tensorlow graph
g :: tf.Graph - graph to append to, if None creates new graph
"""
np.random.seed(0)
random.seed(0)
print("Generating Graph")
with g.as_default():
with g.name_scope("random_graph"):
for i in range(max_iterations):
# pdb.set_trace()
choices = []
for suggest in suggestions:
choices = choices + suggest(g)
weights = [c.prob for c in choices]
print(weights)
probs = weights / np.sum(weights)
curr_choice = np.random.choice(choices, p=probs)
print(i," ", curr_choice.prob)
stop_now = curr_choice.transform(g, **curr_choice.kwargs)
if stop_now:
print("Caught stop signal, stopping")
break
print(summary(g))
detailed_summary(g)
return g
# g = tf.Graph()
# gen_graph(g, [create_vars, maybe_stop, apply_elem_op])
# print(summary(g))
# writer = tf.train.SummaryWriter('/home/zenna/repos/inverse/log', g)
|
[
"zennatavares@gmail.com"
] |
zennatavares@gmail.com
|
08f678546969bfacb9cf47a6133749b69d114030
|
47c3a25497269cfe4fde41bb01856acb6566d280
|
/book_management/models.py
|
faf7f7111a6733f350fc9165c5171c64f306f4f9
|
[] |
no_license
|
sreekanth-kc/Books-inventory-System
|
d6bf847bd33828593513e46d833e7493ea7ae23f
|
45501ef4a8c9ba67c326e43af965210d38eae1cd
|
refs/heads/master
| 2022-07-30T05:22:08.011554
| 2020-05-19T15:58:36
| 2020-05-19T15:58:36
| 265,243,456
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,170
|
py
|
import uuid
from django.contrib.auth.models import AbstractUser
from django.db import models
from rest_framework.authtoken.models import Token
from Books_Inventory import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
class AppUser(AbstractUser, models.Model):
"""
Model class for manage user details.
"""
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
email = models.EmailField(unique=True)
password = models.CharField(max_length=100)
class Meta:
db_table = 'user'
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
class Book(models.Model):
"""
Model class for manage book details.
"""
book_id = models.CharField(primary_key=True, max_length=100, editable=False)
book_name = models.CharField(max_length=100, blank=False)
author_name = models.CharField(max_length=100, blank=False)
book_count = models.IntegerField(editable=True)
class Meta:
db_table = 'book_details'
|
[
"sreekanthc@qburst.com"
] |
sreekanthc@qburst.com
|
e047f38b9cc1f44a36290382d061ca90d7d3df9c
|
c546730456a2b72865bbb93764be89602f370d8e
|
/oceans/synop.py
|
17e92dd4a2422081c002d57092bc11658f991d17
|
[
"BSD-3-Clause"
] |
permissive
|
anhlpham/python-oceans
|
164b46afe21f5240a71a7515884b315f8b597704
|
591e85d1e6421337f031ea65ddf152f2e3e71486
|
refs/heads/master
| 2022-04-03T12:52:17.574913
| 2020-02-12T20:30:07
| 2020-02-12T20:30:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,880
|
py
|
import numpy as np
def scaloa(xc, yc, x, y, t=None, corrlen=None, err=None, zc=None):
"""
Scalar objective analysis. Interpolates t(x, y) into tp(xc, yc)
Assumes spatial correlation function to be isotropic and Gaussian in the
form of: C = (1 - err) * np.exp(-d**2 / corrlen**2) where:
d : Radial distance from the observations.
Parameters
----------
corrlen : float
Correlation length.
err : float
Random error variance (epsilon in the papers).
Return
------
tp : array
Gridded observations.
ep : array
Normalized mean error.
Examples
--------
See https://ocefpaf.github.io/python4oceanographers/blog/2014/10/27/OI/
Notes
-----
The funcion `scaloa` assumes that the user knows `err` and `corrlen` or
that these parameters where chosen arbitrary. The usual guess are the
first baroclinic Rossby radius for `corrlen` and 0.1 e 0.2 to the sampling
error.
"""
n = len(x)
x, y = np.reshape(x, (1, n)), np.reshape(y, (1, n))
# Squared distance matrix between the observations.
d2 = (np.tile(x, (n, 1)).T - np.tile(x, (n, 1))) ** 2 + (
np.tile(y, (n, 1)).T - np.tile(y, (n, 1))
) ** 2
nv = len(xc)
xc, yc = np.reshape(xc, (1, nv)), np.reshape(yc, (1, nv))
# Squared distance between the observations and the grid points.
dc2 = (np.tile(xc, (n, 1)).T - np.tile(x, (nv, 1))) ** 2 + (
np.tile(yc, (n, 1)).T - np.tile(y, (nv, 1))
) ** 2
# Correlation matrix between stations (A) and cross correlation (stations
# and grid points (C)).
A = (1 - err) * np.exp(-d2 / corrlen ** 2)
C = (1 - err) * np.exp(-dc2 / corrlen ** 2)
if 0: # NOTE: If the parameter zc is used (`scaloa2.m`)
A = (1 - d2 / zc ** 2) * np.exp(-d2 / corrlen ** 2)
C = (1 - dc2 / zc ** 2) * np.exp(-dc2 / corrlen ** 2)
# Add the diagonal matrix associated with the sampling error. We use the
# diagonal because the error is assumed to be random. This means it just
# correlates with itself at the same place.
A = A + err * np.eye(len(A))
# Gauss-Markov to get the weights that minimize the variance (OI).
tp = None
if t:
t = np.reshape(t, (n, 1))
tp = np.dot(C, np.linalg.solve(A, t))
if 0: # NOTE: `scaloa2.m`
mD = np.sum(np.linalg.solve(A, t)) / np.sum(
np.sum(np.linalg.inv(A))
)
t = t - mD
tp = C * (np.linalg.solve(A, t))
tp = tp + mD * np.ones(tp.shape)
if not t:
print("Computing just the interpolation errors.") # noqa
# Normalized mean error. Taking the squared root you can get the
# interpolation error in percentage.
ep = 1 - np.sum(C.T * np.linalg.solve(A, C.T), axis=0) / (1 - err)
return tp, ep
|
[
"ocefpaf@gmail.com"
] |
ocefpaf@gmail.com
|
42a8c0b06bee2d39d2f246bb2c072f3cf3f1dbc7
|
3a42b62133900d84ebd71bcde6115d3e3c003d18
|
/test_cg.py
|
dd7f0547bb436950976e83a5c70c69620da41540
|
[] |
no_license
|
4Lisandr/cgmc
|
0d7bc479a4bff98fadd1f47e4e49b7efa4370a36
|
641dfb9a2b672f75d321ff1c13980c1567237040
|
refs/heads/master
| 2023-08-24T11:50:15.366286
| 2021-10-11T10:00:21
| 2021-10-11T10:00:21
| 407,466,774
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 461
|
py
|
import pandas as pd
import requests
coingecko = "https://www.coingecko.com/en"
r = requests.get(coingecko)
fields = ["Coin", "Price", "Mkt Cap", "1h","24h","7d"]
table = pd.read_html(r.text)[0][fields]
#left symbol only
table["Coin"] = table ["Coin"].apply(lambda x: x.split(" ")[2])
for s in fields[1], fields[2]:
table[s] = table [s].apply(lambda x: x.replace(",","").replace("$",""))
print(table)
name ="TopCMC_test.csv"
table.to_csv(name, index=False)
|
[
"sumy.ua@gmail.com"
] |
sumy.ua@gmail.com
|
c68f272bb4279ab85f248f79876c827125fbf5f3
|
6d6a79cfb3cc7e9db5bd84c92ce815da52ddae58
|
/PX4Flow_I2C.py
|
4d62529657ba71b224023a6cee203b5fae59b704
|
[] |
no_license
|
kyle-kelly/PX4Flow_python
|
d4d86e1be8a41789cf0b41e2a07aa547d3a0a382
|
9765144dbddbcbfbdc5573fa2cc180c8ceca7163
|
refs/heads/master
| 2021-01-25T06:24:43.794343
| 2017-10-21T17:45:10
| 2017-10-21T17:45:10
| 93,566,120
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,236
|
py
|
import smbus
import time
class PX4Flow_I2C(object):
"""
Class to hold I2C frame data from PX4Flow
typedef struct i2c_frame
{
uint16_t frame_count;// counts created I2C frames [#frames]
int16_t pixel_flow_x_sum;// latest x flow measurement in pixels*10 [pixels]
int16_t pixel_flow_y_sum;// latest y flow measurement in pixels*10 [pixels]
int16_t flow_comp_m_x;// x velocity*1000 [meters/sec]
int16_t flow_comp_m_y;// y velocity*1000 [meters/sec]
int16_t qual;// Optical flow quality / confidence [0: bad, 255: maximum quality]
int16_t gyro_x_rate; // latest gyro x rate [rad/sec]
int16_t gyro_y_rate; // latest gyro y rate [rad/sec]
int16_t gyro_z_rate; // latest gyro z rate [rad/sec]
uint8_t gyro_range; // gyro range [0 .. 7] equals [50 deg/sec .. 2000 deg/sec]
uint8_t sonar_timestamp;// time since last sonar update [milliseconds]
int16_t ground_distance;// Ground distance in meters*1000 [meters]. Positive value: distance known. Negative value: Unknown distance
} i2c_frame;
typedef struct i2c_integral_frame
{
uint16_t frame_count_since_last_readout;//number of flow measurements since last I2C readout [#frames]
int16_t pixel_flow_x_integral;//accumulated flow in radians*10000 around x axis since last I2C readout [rad*10000]
int16_t pixel_flow_y_integral;//accumulated flow in radians*10000 around y axis since last I2C readout [rad*10000]
int16_t gyro_x_rate_integral;//accumulated gyro x rates in radians*10000 since last I2C readout [rad*10000]
int16_t gyro_y_rate_integral;//accumulated gyro y rates in radians*10000 since last I2C readout [rad*10000]
int16_t gyro_z_rate_integral;//accumulated gyro z rates in radians*10000 since last I2C readout [rad*10000]
uint32_t integration_timespan;//accumulation timespan in microseconds since last I2C readout [microseconds]
uint32_t sonar_timestamp;// time since last sonar update [microseconds]
int16_t ground_distance;// Ground distance in meters*1000 [meters*1000]
int16_t gyro_temperature;// Temperature * 100 in centi-degrees Celsius [degcelsius*100]
uint8_t quality;// averaged quality of accumulated flow values [0:bad quality;255: max quality]
} __attribute__((packed)) i2c_integral_frame;
"""
def __init__(self, bus, address):
self.name = "PX4Flow"
self.bus = bus
self.address = address
"""Initialize with negative values"""
#I2C frame values
self.frame_count = -1 #counts created I2C frames [#frames]
self.pixel_flow_x_sum = -1 #latest x flow measurement in pixels*10 [pixels]
self.pixel_flow_y_sum = -1 #latest y flow measurement in pixels*10 [pixels]
self.flow_comp_m_x = -1 #x velocity*1000 [meters/sec]
self.flow_comp_m_y = -1 #y velocity*1000 [meters/sec]
self.qual = -1 #Optical flow quality / confidence [0: bad, 255: maximum quality]
self.gyro_x_rate = -1 #latest gyro x rate [rad/sec]
self.gyro_y_rate = -1 #latest gyro y rate [rad/sec]
self.gyro_z_rate = -1 #latest gyro z rate [rad/sec]
self.gyro_range = -1 #gyro range [0 .. 7] equals [50 deg/sec .. 2000 deg/sec]
self.sonar_timestamp = -1 #time since last sonar update [milliseconds]
self.ground_distance = -1 #Ground distance in meters*1000 [meters]. Positive value: distance known. Negative value: Unknown distance
#Integral I2C frame values
self.frame_count_since_last_readout = -1 #number of flow measurements since last I2C readout [#frames]
self.pixel_flow_x_integral = -1 #accumulated flow in radians*10000 around x axis since last I2C readout [rad*10000]
self.pixel_flow_y_integral = -1 #accumulated flow in radians*10000 around y axis since last I2C readout [rad*10000]
self.gyro_x_rate_integral = -1 #accumulated gyro x rates in radians*10000 since last I2C readout [rad*10000]
self.gyro_y_rate_integral = -1 #accumulated gyro y rates in radians*10000 since last I2C readout [rad*10000]
self.gyro_z_rate_integral = -1 #accumulated gyro z rates in radians*10000 since last I2C readout [rad*10000]
self.integration_timespan = -1 #accumulation timespan in microseconds since last I2C readout [microseconds]
self.sonar_timestamp = -1 # time since last sonar update [microseconds]
self.ground_distance = -1 # Ground distance in meters*1000 [meters*1000]
self.gyro_temperature = -1 # Temperature * 100 in centi-degrees Celsius [degcelsius*100]
self.quality = -1 # averaged quality of accumulated flow values [0:bad quality;255: max quality]
def update(self):
"""Send 0x0 to PX4FLOW module and receive back 22 bytes of data in registers 0x00-0x15"""
self.bus.write_byte(self.address, 0x0)
i2c_frame = self.bus.read_i2c_block_data(self.address, 0x00, 22)
self.frame_count = i2c_frame[0] | (i2c_frame[1] << 8)
self.pixel_flow_x_sum = self.twos_comp(i2c_frame[2] | (i2c_frame[3] << 8), 16)
self.pixel_flow_y_sum = self.twos_comp(i2c_frame[4] | (i2c_frame[5] << 8), 16)
self.flow_comp_m_x = self.twos_comp(i2c_frame[6] | (i2c_frame[7] << 8), 16)
self.flow_comp_m_y = self.twos_comp(i2c_frame[8] | (i2c_frame[9] << 8), 16)
self.qual = self.twos_comp(i2c_frame[10] | (i2c_frame[11] << 8), 16)
self.gyro_x_rate = self.twos_comp(i2c_frame[12] | (i2c_frame[13] << 8), 16)
self.gyro_y_rate = self.twos_comp(i2c_frame[14] | (i2c_frame[15] << 8), 16)
self.gyro_z_rate = self.twos_comp(i2c_frame[16] | (i2c_frame[17] << 8), 16)
self.gyro_range = i2c_frame[18]
self.sonar_timestamp = i2c_frame[19]
self.ground_distance = self.twos_comp(i2c_frame[20] | (i2c_frame[21] << 8), 16)
def integral_update(self):
"""Send 0x16 to PX4FLOW module and receive back 25 bytes of data in registers 0x16-0x2E"""
self.bus.write_byte(self.address, 0x16)
i2c_integral_frame = self.bus.read_i2c_block_data(self.address, 0x16, 25)
self.frame_count_since_last_readout = i2c_integral_frame[0] | (i2c_integral_frame[1] << 8)
self.pixel_flow_x_integral = self.twos_comp(i2c_integral_frame[2] | (i2c_integral_frame[3] << 8), 16)
self.pixel_flow_y_integral = self.twos_comp(i2c_integral_frame[4] | (i2c_integral_frame[5] << 8), 16)
self.gyro_x_rate_integral = self.twos_comp(i2c_integral_frame[6] | (i2c_integral_frame[7] << 8), 16)
self.gyro_y_rate_integral = self.twos_comp(i2c_integral_frame[8] | (i2c_integral_frame[9] << 8), 16)
self.gyro_z_rate_integral = self.twos_comp(i2c_integral_frame[10] | (i2c_integral_frame[11] << 8), 16)
self.integration_timespan = i2c_integral_frame[12] | (i2c_integral_frame[13] << 8) | (i2c_integral_frame[14] << 16) | (i2c_integral_frame[15] << 24)
self.sonar_timestamp = i2c_integral_frame[16] | (i2c_integral_frame[17] << 8) | (i2c_integral_frame[18] << 16) | (i2c_integral_frame[19] << 24)
self.ground_distance = self.twos_comp(i2c_integral_frame[20] | (i2c_integral_frame[21] << 8), 16)
self.gyro_temperature = self.twos_comp(i2c_integral_frame[22] | (i2c_integral_frame[23] << 8), 16)
self.quality = i2c_integral_frame[24]
def twos_comp(self, val, bits):
"""compute the 2's complement of int value val"""
if (val & (1 << (bits - 1))) != 0: # if sign bit is set e.g., 8bit: 128-255
val = val - (1 << bits) # compute negative value
return val # return positive value as is
|
[
"kkelly.667@gmail.com"
] |
kkelly.667@gmail.com
|
7571dcccdfd3671f28bc3e9a8266832a0a5b8be7
|
e17d214b24d2e648ee5c0f888c7679b3a75a9c99
|
/madlib.py
|
91cf6af14f2dcb412f6ba2e04fafd2fa25634dde
|
[] |
no_license
|
AngelDelunadev/python-101
|
012b7f0e0ec001411b067721363f6edba5ceebbd
|
78971790a5d071602b0a0bc8ea63e084ca0da046
|
refs/heads/main
| 2023-02-11T04:58:35.314751
| 2021-01-13T16:26:29
| 2021-01-13T16:26:29
| 329,095,531
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 250
|
py
|
print("Please fill in the blanks below: ")
print("__(name)___'s favorite subject in school is __(subject)___.")
name = input("What is the name? ")
subject= input("what is the subject? ")
print("%s's favorite subject in school is %s."%(name, subject))
|
[
"angelluna2016@gmail.com"
] |
angelluna2016@gmail.com
|
157b38ddeaad12bf914205f92359e1d4d0b674be
|
12c02706f25294e975e1864e8488513d192e69ec
|
/Códigos_2/sistema-masa-resorte(1).py
|
54a10abc339188b00cc0299bc87060002b1dc096
|
[] |
no_license
|
Dishonestink/Dishonestink
|
b2dc6e6757f7c8edb6a24351dd967bc6e7f0b9b2
|
61a13d076c31a0b9d5f1bdd629a90d248728a242
|
refs/heads/main
| 2023-07-19T06:41:10.011884
| 2021-09-22T01:22:50
| 2021-09-22T01:22:50
| 367,212,672
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,115
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 24 18:15:30 2020
@author: Admin
"""
import numpy as np
import matplotlib.pyplot as plt
m1 = 1.0
m2 = 0.5
m3 = 2.0
k1 = 500.0
k2 = 100.0
k3 = 200.0
k4 = 1000.0
b1 = 1.0
b2 = 3.0
def funciones(t,x,i):
#x --> [x1,v1,x2,v2,x3,v3]
x1 = x[0]
v1 = x[1]
x2 = x[2]
v2 = x[3]
x3 = x[4]
v3 = x[5]
if(i==0):
f = v1
elif(i==1):
f = (-k1*x1+k2*(x2-x1)+b1*(v2-v1))/m1
elif(i==2):
f = v2
elif(i==3):
f = (-k2*(x2-x1)-b1*(v2-v1)+k3*(x3-x2)+b2*(v3-v2))/m2
elif(i==4):
f = v3
elif(i==5):
f = (-k3*(x3-x2)-b2*(v3-v2)-k4*x3)/m3
return f
def rk4(fi_in,n,dt,t):
k1 = np.zeros(n)
k2 = np.zeros(n)
k3 = np.zeros(n)
k4 = np.zeros(n)
fi_out = np.zeros(n)
for i in range(n):
k1[i] = funciones(t,fi_in,i)
for i in range(n):
k2[i] = funciones(t+0.5*dt,fi_in+0.5*k1*dt,i)
for i in range(n):
k3[i] = funciones(t+0.5*dt,fi_in+0.5*k2*dt,i)
for i in range(n):
k4[i] = funciones(t+dt,fi_in+k3*dt,i)
for i in range(n):
fi_out[i] = fi_in[i] + (k1[i]+2.0*k2[i]+2.0*k3[i]+k4[i])*dt/6.0
return fi_out
A = np.array([[0,1,0,0,0,0],
[-(k1+k2)/m1,-b1/m1,k2/m1,b1/m1,0,0],
[0,0,0,1,0,0],
[k2/m2,b1/m2,-(k2+k3)/m2,-(b1+b2)/m2,k3/m2,b2/m2],
[0,0,0,0,0,1],
[0,0,k3/m3,b2/m3,-(k3+k4)/m3,-b2/m3]])
eigen_val,eigen_vec = np.linalg.eig(A)
#condiciones iniciales
x1_0 = 0.1
v1_0 = 0.0
x2_0 = 0.0
v2_0 = 0.0
x3_0 = 0.0
v3_0 = 0.0
B = [x1_0,v1_0,x2_0,v2_0,x3_0,v3_0]
minv = np.linalg.inv(eigen_vec)
C = minv@B
dt = 0.05
tf = 5.0 #Tiempo de simulación
it = int(tf/dt)
t = np.zeros(it+1)
x = np.zeros((it+1,6))
x_rk4 = np.zeros((it+1,6))
err = np.zeros((it+1,6))
#x --> [x1,v1,x2,v2,x3,v3]
x[0,:] = [x1_0,v1_0,x2_0,v2_0,x3_0,v3_0]
x_rk4[0,:] = [x1_0,v1_0,x2_0,v2_0,x3_0,v3_0]
for i in range(1,it+1):
t[i] = t[i-1] + dt
for k in range(6):
for j in range(6):
alpha = np.real(eigen_val[j])
beta = np.imag(eigen_val[j])
a = np.real(C[j]*eigen_vec[k,j])
b = np.imag(C[j]*eigen_vec[k,j])
x[i,k] = x[i,k] + np.exp(alpha*t[i])*(a*np.cos(beta*t[i])-b*np.sin(beta*t[i]))
x_rk4[i,:] = rk4(x_rk4[i-1,:],6,dt,t[i-1])
ac = np.zeros(it+1)
for i in range(1,it+1):
ac[i] = x[i,1]/t[i]
print(ac)
print(len(ac))
err = abs(x-x_rk4)
# plt.plot(t,x[:,0],"-b",label="x1_analítica")
# plt.plot(t,x[:,2],"-g",label="x2_analítica")
# plt.plot(t,x[:,4],"-y",label="x3_analítica")
# plt.legend(loc="upper right")
# plt.xlabel('Tiempo(s)')
# plt.ylabel('Posición de las masas (m)')
# plt.grid()
plt.plot(t,x[:,0],"-k",label="x1_analítica")
plt.plot(t,x_rk4[:,0],"--k",label="x1_rk4")
plt.legend(loc="upper right")
plt.xlabel('Tiempo(s)')
plt.ylabel('Posición de las masas (m)')
plt.grid()
# plt.plot(t,err[:,0],"-k",label="Error en x1")
# plt.legend(loc="upper right")
# plt.xlabel('Tiempo(s)')
# plt.ylabel('Error absoluto (m)')
# plt.grid()
|
[
"dishonestink.2001@gmail.com"
] |
dishonestink.2001@gmail.com
|
cc3a3e754f397f503bf522f5399f45aa1f67c86a
|
c31d37f23786328e0124d1d92a1abb58b3a23e68
|
/Writeafunction.py
|
254d7677ff438689df3d5b96ec8a1e2309f50746
|
[] |
no_license
|
Santiago78op/hackerrank
|
6cfdbdb04e49ddcc8eb6fa72327d4ede4bf49523
|
4040d9012cc340bc7c3e4e67b46153e910a5222e
|
refs/heads/main
| 2023-02-27T17:03:26.889350
| 2021-02-06T19:08:55
| 2021-02-06T19:08:55
| 336,616,034
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 133
|
py
|
def is_leap(year):
leap = False
if not year % 4 and (year % 100 or not year % 400):
leap = True
return leap
|
[
"2993696170101@ingenieria.usac.edu.gt"
] |
2993696170101@ingenieria.usac.edu.gt
|
7754a204926e6e907b58c9854ddc2a349a3796cf
|
196babffc00795575104960f98f64f614911b72f
|
/기능개발.py
|
2a8ae32fb344a8aaa12cce78ff48325cf14fd04b
|
[] |
no_license
|
do-park/programmers
|
9c4ff5289eb5c42aaba9ff33612dfd32eded4baa
|
054603f5a551e4e3d3f4b48d6b04a094b70c0661
|
refs/heads/master
| 2022-06-12T03:12:07.154184
| 2022-06-10T05:22:32
| 2022-06-10T05:22:32
| 250,500,372
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 729
|
py
|
# 코딩테스트 연습 > 스택/큐 > 기능개발
from collections import deque
from math import ceil
def solution(progresses, speeds):
completes = deque()
for progress, speed in zip(progresses, speeds):
completes.append(ceil((100 - progress) / speed))
print(completes)
answer = []
deploy, count = completes.popleft(), 1
while completes:
complete = completes.popleft()
if complete <= deploy:
count += 1
else:
answer.append(count)
deploy, count = complete, 1
answer.append(count)
return answer
print(solution([93, 30, 55], [1, 30, 5])) # [2, 1]
print(solution([95, 90, 99, 99, 80, 99], [1, 1, 1, 1, 1, 1])) # [1, 3, 2]
|
[
"dohee.pa@gmail.com"
] |
dohee.pa@gmail.com
|
1092e202125466531c8ebdc1e3aa56b0901d94e6
|
f5c6015c3c57b4b8bed95b4f55fc8233c5a5697a
|
/Django - Kelas Terbuka/07-Menggunakan_URL_pada_App/Django_Project/blog/urls.py
|
46c57dfbdada47135aa2505dd3ed9445716d4857
|
[] |
no_license
|
rasyidev/learn_django3
|
0f8c072dbc7ebd2b92b2fb56de47a564aff163d3
|
b12041b560eaf4012bb358667ffd26cf6784469a
|
refs/heads/main
| 2023-03-15T17:07:20.081457
| 2021-03-02T06:03:29
| 2021-03-02T06:03:29
| 335,824,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 123
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('recent/', views.recent)
]
|
[
"habib.rasyid11@gmail.com"
] |
habib.rasyid11@gmail.com
|
b0c8ebe0e54a04e4890f8d2c85dcc7fe6fc82bc5
|
fdf51c5834c0f9d854018be9cfc280030ceb52b0
|
/weather/weather/items.py
|
a4c45cba68b9c4d037ee454ab4da7914bb0568bd
|
[] |
no_license
|
gangyu0716/spider_project
|
70987f1353ae55633fbc7d2c68c79d315f1db9d2
|
ec475d9def7dcd31c13496c14ca479f2ecf97bf9
|
refs/heads/master
| 2021-04-15T06:06:58.742717
| 2019-07-29T08:05:41
| 2019-07-29T08:05:48
| 126,314,799
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 444
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class WeatherItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
date = scrapy.Field()
week = scrapy.Field()
img = scrapy.Field()
temperature = scrapy.Field()
weather = scrapy.Field()
wind = scrapy.Field()
|
[
"yu_gang@apowertec.com"
] |
yu_gang@apowertec.com
|
f25e867061cc00a6a48dd776f96286d0422ddb92
|
79bef927639937a2ae611293497e981a04d54f9f
|
/huicong/middlewares/useragent_middlewares.py
|
307fbab0213d1cbc36a4dea2244ae7999edf6e3f
|
[] |
no_license
|
msean/crawl_huicong_web
|
aeb2f63bec11322beaad4b54b46731e1ac754497
|
4c9e1444cb46c013b61230a19c7619b23358188d
|
refs/heads/master
| 2020-04-17T16:54:05.782398
| 2019-01-22T13:17:40
| 2019-01-22T13:17:40
| 166,760,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,794
|
py
|
import logging
import random
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
class RotateUserAgentMiddleware(UserAgentMiddleware):
def __init__(self, user_agent=''):
self.user_agent = user_agent
super().__init__(user_agent)
def process_request(self, request, spider):
ua = random.choice(self.user_agent_list)
if ua:
logging.info('Current UserAgent: '+ua)
request.headers.setdefault('User-Agent', ua)
user_agent_list = [
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 "
"(KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 "
"(KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 "
"(KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 "
"(KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 "
"(KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 "
"(KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 "
"(KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 "
"(KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 "
"(KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 "
"(KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
]
|
[
"m_wuhua@126.com"
] |
m_wuhua@126.com
|
581791cfa265823f721e7727a7ce6c5d779656a8
|
853d4cec42071b76a80be38c58ffe0fbf9b9dc34
|
/venv/Lib/site-packages/networkx/algorithms/community/community_utils.py
|
4c372fa31ead346c902fa49f657b2843459e478d
|
[] |
no_license
|
msainTesting/TwitterAnalysis
|
5e1646dbf40badf887a86e125ef30a9edaa622a4
|
b1204346508ba3e3922a52380ead5a8f7079726b
|
refs/heads/main
| 2023-08-28T08:29:28.924620
| 2021-11-04T12:36:30
| 2021-11-04T12:36:30
| 424,242,582
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 894
|
py
|
"""Helper functions for community-finding algorithms."""
__all__ = ["is_partition"]
def is_partition(G, communities):
"""Returns *True* if `communities` is a partition of the nodes of `G`.
A partition of a universe set is a family of pairwise disjoint sets
whose union is the entire universe set.
Parameters
----------
G : NetworkX graph.
communities : list or iterable of sets of nodes
If not a list, the iterable is converted internally to a list.
If it is an iterator it is exhausted.
"""
# Alternate implementation:
# return all(sum(1 if v in c else 0 for c in communities) == 1 for v in G)
if not isinstance(communities, list):
communities = list(communities)
nodes = {n for c in communities for n in c if n in G}
return len(G) == len(nodes) == sum(len(c) for c in communities)
|
[
"msaineti@icloud.com"
] |
msaineti@icloud.com
|
de2051f36005099f0b51e74c2c08c7a8aa6f698a
|
41960a07a8118cf99236972c1e94a36ef00c55c9
|
/progress_analyzer/migrations/0007_auto_20180213_1614.py
|
11fd9a49ab46bc723d77d247abb129f88cad53f2
|
[
"MIT"
] |
permissive
|
wahello/jvb
|
7288f3fc8099dbbdfa5cdac86b251d4d739f4dca
|
c87fdf49ae040668323d1a034aa407cfe23c4a1d
|
refs/heads/bug/aa_ranges/chart2
| 2022-11-29T12:58:37.561674
| 2019-06-17T10:50:17
| 2019-06-17T10:50:17
| 211,479,327
| 0
| 1
|
MIT
| 2022-11-19T02:54:15
| 2019-09-28T09:52:28
| null |
UTF-8
|
Python
| false
| false
| 423
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-02-13 16:14
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('progress_analyzer', '0006_auto_20180213_1109'),
]
operations = [
migrations.RenameModel(
old_name='OtherStats',
new_name='OtherStatsCumulative',
),
]
|
[
"atulk@s7inc.co"
] |
atulk@s7inc.co
|
36ff95f7833f6cd320ebdc4632a2b862b6c5d5a2
|
83e5ceee5aeed92ce1c1c14e21197744ca96a96e
|
/Name.py
|
d9c0293a7a9c9d29cad368b06d13e0795857f463
|
[] |
no_license
|
SanjanaManjegowda/PythonAssignments
|
3899aa76263e34c86e7b2ddcd8e481469b093310
|
d33fc0d117294d0d96d2bac097099ff1f9b2a70c
|
refs/heads/master
| 2020-08-11T16:02:43.995737
| 2019-12-04T16:57:46
| 2019-12-04T16:57:46
| 214,592,272
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 59
|
py
|
name=input("Enter Your Name")
print("Hello" ,name, "!!!")
|
[
"noreply@github.com"
] |
SanjanaManjegowda.noreply@github.com
|
eab86eb956920bb3b12c74729b6008b7268c2fc5
|
c001bd1576c064eef69eef172d99594377bcc7eb
|
/lambda_pumpkin.py
|
74f99c74a44487cb79e5a1b1d1cc64c44bd31f31
|
[] |
no_license
|
nfultz/pumpkin
|
88d4d06b866b4366ab5e33b3d1c8276f8220fca6
|
65c4a16a6830da4701803c101173e274d47e38e0
|
refs/heads/master
| 2021-06-29T20:53:07.531771
| 2020-07-14T22:03:28
| 2020-07-14T22:03:28
| 84,591,865
| 7
| 2
| null | 2017-10-06T22:09:55
| 2017-03-10T19:02:48
|
Shell
|
UTF-8
|
Python
| false
| false
| 442
|
py
|
import boto3
def lambda_handler(event, context):
instanceID = event['instanceID']
instanceType = event['instanceType']
ec2 = boto3.resource('ec2')
instance = ec2.Instance(instanceID)
instance.stop()
instance.wait_until_stopped()
instance.modify_attribute(InstanceType={"Value":instanceType})
instance.start()
instance.wait_until_running()
return instance.public_dns_name
|
[
"nfultz@gmail.com"
] |
nfultz@gmail.com
|
4239b540f62907401da42ff6044c087742f0f322
|
f52ddcafb4a33c2cbf9ee49505cae3c84c0c1f3c
|
/setup.py
|
ae2fe0d2a55d50cd98212e2719d5928a32e8a9a6
|
[] |
no_license
|
kalyanchatterjee/image_flip
|
dd693fce04bfb2c11beb4f85a5dd48b22f37050d
|
aeb1d34f8bfd4225b8d37ce26e1c1e6ad6e2829d
|
refs/heads/master
| 2020-06-12T02:03:45.792115
| 2019-06-27T20:56:42
| 2019-06-27T20:56:42
| 194,160,437
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 302
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 27 14:07:53 2019
@author: kalyan.chatterjee
"""
import sys
try:
from PIL import Image
except ImportError:
sys.exit("""You need the Pillow module!
Install it by running "pip install Pillow"
""")
|
[
"Kalyan.Chatterjee@cantire.com"
] |
Kalyan.Chatterjee@cantire.com
|
acd444692cb0f73b17cb72f114939466ca2ca538
|
deab03581c41836901ebaa7a5c72b2e21ec6be86
|
/algos/sorts.py
|
eb6781723abb637622655e8fe075edfddc6fff38
|
[] |
no_license
|
alvinburgos/contest-stuff
|
bf352061419d0695d7f322e4aa9574e48900d655
|
01cf26d66a07fab61540614e43284c549605e02e
|
refs/heads/master
| 2016-08-04T07:55:00.583083
| 2014-12-08T18:40:38
| 2014-12-08T18:40:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 761
|
py
|
import random
def insertsort(a, i, j):
for x in xrange(i+1, j):
tmp = a[x]
while x > i and tmp < a[x-1]:
a[x] = a[x-1]
x -= 1
a[x] = tmp
def merge(a, i, m, j):
l = []
k1 = i
k2 = m
while k1 < m and k2 < j:
if a[k1] < a[k2]:
l.append(a[k1])
k1 += 1
else:
l.append(a[k2])
k2 += 1
while k1 < m:
l.append(a[k1])
k1 += 1
while k2 < j:
l.append(a[k2])
k2 += 1
for x in xrange(i, j):
a[x] = l[x-i]
def mergesort(a, i, j):
if j - i <= 10:
insertsort(a, i, j)
else:
m = (i+j)/2
mergesort(a, i, m)
mergesort(a, m, j)
merge(a, i, m, j)
|
[
"ajmburgospp@gmail.com"
] |
ajmburgospp@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.