blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
55066d351456806918e16de4f90fa5a40c4fc112
|
3ab97361bdc0c1b392d46c299a0293e16be577fa
|
/home/migrations/0002_load_initial_data.py
|
206a0bf2e653e72367f7fdaa30d0dc0414a2394b
|
[] |
no_license
|
crowdbotics-apps/mason-21411
|
2f191039aedb0f66c2acc9fb5c6e022a00a1ba5a
|
1456b83bde0c15151ac78218ae2fd3391ddb8c31
|
refs/heads/master
| 2022-12-30T03:26:44.899130
| 2020-10-12T19:05:12
| 2020-10-12T19:05:12
| 303,487,539
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,274
|
py
|
from django.db import migrations
def create_customtext(apps, schema_editor):
CustomText = apps.get_model("home", "CustomText")
customtext_title = "mason"
CustomText.objects.create(title=customtext_title)
def create_homepage(apps, schema_editor):
HomePage = apps.get_model("home", "HomePage")
homepage_body = """
<h1 class="display-4 text-center">mason</h1>
<p class="lead">
This is the sample application created and deployed from the Crowdbotics app.
You can view list of packages selected for this application below.
</p>"""
HomePage.objects.create(body=homepage_body)
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "mason-21411.botics.co"
site_params = {
"name": "mason",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("home", "0001_initial"),
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_customtext),
migrations.RunPython(create_homepage),
migrations.RunPython(create_site),
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
efbac9d802b7d3a3214f5eec619b410d173accbd
|
9451bdaa89cef69a8216c9ca9c7ac8e5b7d7c22e
|
/7multi_process/multi_process.py
|
fd1f8d9b8081f0ec4d63fc57aa3c2eb7a24339e0
|
[] |
no_license
|
PaulYoung1024/lou-plus-python
|
87c97334a5b2aa4c8dc3bc0071a7ab41fe553682
|
7cc8e85eb25bb72e79a7dd9d25046bc1b4f9f3c6
|
refs/heads/master
| 2020-04-30T08:36:36.466716
| 2019-03-20T11:57:47
| 2019-03-20T11:57:47
| 176,721,830
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 565
|
py
|
import time
from multiprocessing import Process
def io_task():
time.sleep(1)
def main():
start_time=time.time()
'''
for i in range(5):
io_task()
'''
#child task list
process_list=[]
for i in range(5):
process_list.append(Process(target=io_task))
#start all child process
for process in process_list:
process.start()
for process in process_list:
process.join()
end_time=time.time()
print("program run time:{:.2f}".format(end_time-start_time))
if __name__=='__main__':
main()
|
[
"ubuntu@localhost.localdomain"
] |
ubuntu@localhost.localdomain
|
7d4489bec75756fcdc079f5f14ff4f709be1837b
|
5fa91971a552de35422698ad3e371392fd5eb48a
|
/docs/mcpi/algorytmy/mcpi-lpi02.py
|
5881d83e5b722c10f474aaac3deba06ea22d8b2e
|
[
"MIT",
"CC-BY-SA-4.0"
] |
permissive
|
koduj-z-klasa/python101
|
64b0bf24da6c7fc29c0d3c5a74ce7975d648b760
|
accfca2a8a0f2b9eba884bffe31be6d1e73fb615
|
refs/heads/master
| 2022-06-06T09:29:01.688553
| 2022-05-22T19:50:09
| 2022-05-22T19:50:09
| 23,770,911
| 45
| 182
|
MIT
| 2022-03-31T10:40:13
| 2014-09-07T21:01:09
|
Python
|
UTF-8
|
Python
| false
| false
| 2,565
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import random
from time import sleep
import mcpi.minecraft as minecraft # import modułu minecraft
import mcpi.block as block # import modułu block
import local.minecraftstuff as mcstuff
os.environ["USERNAME"] = "Steve" # nazwa użytkownika
os.environ["COMPUTERNAME"] = "mykomp" # nazwa komputera
mc = minecraft.Minecraft.create("192.168.1.10") # połączenie z serwerem
def plac(x, y, z, roz=10, gracz=False):
"""Funkcja wypełnia sześcienny obszar od podanej pozycji
powietrzem i opcjonalnie umieszcza gracza w środku.
Parametry: x, y, z - współrzędne pozycji początkowej,
roz - rozmiar wypełnianej przestrzeni,
gracz - czy umieścić gracza w środku
Wymaga: globalnych obiektów mc i block.
"""
podloga = block.STONE
wypelniacz = block.AIR
# kamienna podłoże
mc.setBlocks(x, y - 1, z, x + roz, y - 1, z + roz, podloga)
# czyszczenie
mc.setBlocks(x, y, z, x + roz, y + roz, z + roz, wypelniacz)
# umieść gracza w środku
if gracz:
mc.player.setPos(x + roz / 2, y + roz / 2, z + roz / 2)
def model(promien, x, y, z):
"""
Fukcja buduje obrys kwadratu, którego środek to punkt x, y, z
oraz koło wpisane w ten kwadrat
"""
mcfig = mcstuff.MinecraftDrawing(mc)
obrys = block.SANDSTONE
wypelniacz = block.AIR
mc.setBlocks(x - promien, y, z - promien, x +
promien, y, z + promien, obrys)
mc.setBlocks(x - promien + 1, y, z - promien + 1, x +
promien - 1, y, z + promien - 1, wypelniacz)
mcfig.drawHorizontalCircle(0, 0, 0, promien, block.GRASS)
def liczbaPi():
r = float(raw_input("Podaj promień koła: "))
model(r, 0, 0, 0)
# pobieramy ilość punktów w kwadracie
ileKw = int(raw_input("Podaj ilość losowanych punktów: "))
ileKo = 0 # ilość punktów w kole
blok = block.SAND
for i in range(ileKw):
x = round(random.uniform(-r, r))
y = round(random.uniform(-r, r))
print x, y
if abs(x)**2 + abs(y)**2 <= r**2:
ileKo += 1
# umieść blok w MC Pi
mc.setBlock(x, 10, y, blok)
mc.postToChat("W kole = " + str(ileKo) + " W Kwadracie = " + str(ileKw))
pi = 4 * ileKo / float(ileKw)
mc.postToChat("Pi w przyblizeniu: {:.10f}".format(pi))
def main():
mc.postToChat("LiczbaPi") # wysłanie komunikatu do mc
plac(-50, 0, -50, 100)
mc.player.setPos(20, 20, 0)
liczbaPi()
return 0
if __name__ == '__main__':
main()
|
[
"xinulsw@gmail.com"
] |
xinulsw@gmail.com
|
41348a2dd0b811a9e5820c028fb4d84d67ede459
|
7ee1fd7584f8770cd2381d85f797bf85cb9b4b67
|
/usuarios/applications/users/functions.py
|
7518ab24ad5b24eb53709d36fd01737178f8f777
|
[] |
no_license
|
neunapp/usuariosdj
|
3171160fdf6898d07d6b353d034c70801e4bc21b
|
3fe69b7357757baa5d799b614f232d75ed659502
|
refs/heads/master
| 2022-12-01T16:51:00.432272
| 2020-09-17T14:28:21
| 2020-09-17T14:28:21
| 237,993,639
| 4
| 2
| null | 2022-11-22T05:17:26
| 2020-02-03T15:10:33
|
Python
|
UTF-8
|
Python
| false
| false
| 206
|
py
|
# funciones extra de la aplicacion users
import random
import string
def code_generator(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
|
[
"csantacruz1127@gmail.com"
] |
csantacruz1127@gmail.com
|
03a11a480258f5444025f7b60cf63fcf26362edb
|
c33844f13a625f9b3c908ea8816696a0b38abbac
|
/expenditure/migrations/0001_initial.py
|
dc5f3853c79d4d290e1e897dfb0381cd9729c7c3
|
[] |
no_license
|
jkimuli/clinica
|
d6ddb72b4ba56e142f12b77593d2c4e68b3e3237
|
03cf956781ff5a50d692ca07799a73c36111a0aa
|
refs/heads/master
| 2022-12-21T20:13:39.454392
| 2020-11-09T08:13:25
| 2020-11-09T08:13:25
| 21,300,924
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,097
|
py
|
# Generated by Django 2.2.5 on 2020-06-17 16:16
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Expense',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('expense_date', models.DateField(auto_now_add=True, verbose_name='Date Expense Incurred')),
('particulars', models.TextField(verbose_name='Particulars')),
('amount', models.DecimalField(decimal_places=2, max_digits=12, verbose_name='Amount')),
('incurred_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Employee Name')),
],
options={
'verbose_name_plural': 'Expenses',
},
),
]
|
[
"jkimuli@gmail.com"
] |
jkimuli@gmail.com
|
e4e82580ad98bbcb298532c026df83965c2ea339
|
05148c0ea223cfc7ed9d16234ab3e6bb40885e9d
|
/Packages/matplotlib-2.2.2/lib/mpl_examples/images_contours_and_fields/triplot_demo.py
|
d3a65762d021e09952a9512f7aa6c3df6982239b
|
[
"MIT"
] |
permissive
|
NightKirie/NCKU_NLP_2018_industry3
|
9ee226e194287fd9088429f87c58c874e050a8b3
|
23ac13644b140587e23cfeffb114c7c6f46f17a2
|
refs/heads/master
| 2021-06-05T05:33:09.510647
| 2018-07-05T10:19:47
| 2018-07-05T10:19:47
| 133,680,341
| 1
| 4
|
MIT
| 2020-05-20T16:29:54
| 2018-05-16T14:43:38
|
Python
|
UTF-8
|
Python
| false
| false
| 4,772
|
py
|
"""
============
Triplot Demo
============
Creating and plotting unstructured triangular grids.
"""
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import numpy as np
###############################################################################
# Creating a Triangulation without specifying the triangles results in the
# Delaunay triangulation of the points.
# First create the x and y coordinates of the points.
n_angles = 36
n_radii = 8
min_radius = 0.25
radii = np.linspace(min_radius, 0.95, n_radii)
angles = np.linspace(0, 2 * np.pi, n_angles, endpoint=False)
angles = np.repeat(angles[..., np.newaxis], n_radii, axis=1)
angles[:, 1::2] += np.pi / n_angles
x = (radii * np.cos(angles)).flatten()
y = (radii * np.sin(angles)).flatten()
# Create the Triangulation; no triangles so Delaunay triangulation created.
triang = tri.Triangulation(x, y)
# Mask off unwanted triangles.
triang.set_mask(np.hypot(x[triang.triangles].mean(axis=1),
y[triang.triangles].mean(axis=1))
< min_radius)
###############################################################################
# Plot the triangulation.
plt.figure()
plt.gca().set_aspect('equal')
plt.triplot(triang, 'bo-', lw=1)
plt.title('triplot of Delaunay triangulation')
###############################################################################
# You can specify your own triangulation rather than perform a Delaunay
# triangulation of the points, where each triangle is given by the indices of
# the three points that make up the triangle, ordered in either a clockwise or
# anticlockwise manner.
xy = np.asarray([
[-0.101, 0.872], [-0.080, 0.883], [-0.069, 0.888], [-0.054, 0.890],
[-0.045, 0.897], [-0.057, 0.895], [-0.073, 0.900], [-0.087, 0.898],
[-0.090, 0.904], [-0.069, 0.907], [-0.069, 0.921], [-0.080, 0.919],
[-0.073, 0.928], [-0.052, 0.930], [-0.048, 0.942], [-0.062, 0.949],
[-0.054, 0.958], [-0.069, 0.954], [-0.087, 0.952], [-0.087, 0.959],
[-0.080, 0.966], [-0.085, 0.973], [-0.087, 0.965], [-0.097, 0.965],
[-0.097, 0.975], [-0.092, 0.984], [-0.101, 0.980], [-0.108, 0.980],
[-0.104, 0.987], [-0.102, 0.993], [-0.115, 1.001], [-0.099, 0.996],
[-0.101, 1.007], [-0.090, 1.010], [-0.087, 1.021], [-0.069, 1.021],
[-0.052, 1.022], [-0.052, 1.017], [-0.069, 1.010], [-0.064, 1.005],
[-0.048, 1.005], [-0.031, 1.005], [-0.031, 0.996], [-0.040, 0.987],
[-0.045, 0.980], [-0.052, 0.975], [-0.040, 0.973], [-0.026, 0.968],
[-0.020, 0.954], [-0.006, 0.947], [ 0.003, 0.935], [ 0.006, 0.926],
[ 0.005, 0.921], [ 0.022, 0.923], [ 0.033, 0.912], [ 0.029, 0.905],
[ 0.017, 0.900], [ 0.012, 0.895], [ 0.027, 0.893], [ 0.019, 0.886],
[ 0.001, 0.883], [-0.012, 0.884], [-0.029, 0.883], [-0.038, 0.879],
[-0.057, 0.881], [-0.062, 0.876], [-0.078, 0.876], [-0.087, 0.872],
[-0.030, 0.907], [-0.007, 0.905], [-0.057, 0.916], [-0.025, 0.933],
[-0.077, 0.990], [-0.059, 0.993]])
x = np.degrees(xy[:, 0])
y = np.degrees(xy[:, 1])
triangles = np.asarray([
[67, 66, 1], [65, 2, 66], [ 1, 66, 2], [64, 2, 65], [63, 3, 64],
[60, 59, 57], [ 2, 64, 3], [ 3, 63, 4], [ 0, 67, 1], [62, 4, 63],
[57, 59, 56], [59, 58, 56], [61, 60, 69], [57, 69, 60], [ 4, 62, 68],
[ 6, 5, 9], [61, 68, 62], [69, 68, 61], [ 9, 5, 70], [ 6, 8, 7],
[ 4, 70, 5], [ 8, 6, 9], [56, 69, 57], [69, 56, 52], [70, 10, 9],
[54, 53, 55], [56, 55, 53], [68, 70, 4], [52, 56, 53], [11, 10, 12],
[69, 71, 68], [68, 13, 70], [10, 70, 13], [51, 50, 52], [13, 68, 71],
[52, 71, 69], [12, 10, 13], [71, 52, 50], [71, 14, 13], [50, 49, 71],
[49, 48, 71], [14, 16, 15], [14, 71, 48], [17, 19, 18], [17, 20, 19],
[48, 16, 14], [48, 47, 16], [47, 46, 16], [16, 46, 45], [23, 22, 24],
[21, 24, 22], [17, 16, 45], [20, 17, 45], [21, 25, 24], [27, 26, 28],
[20, 72, 21], [25, 21, 72], [45, 72, 20], [25, 28, 26], [44, 73, 45],
[72, 45, 73], [28, 25, 29], [29, 25, 31], [43, 73, 44], [73, 43, 40],
[72, 73, 39], [72, 31, 25], [42, 40, 43], [31, 30, 29], [39, 73, 40],
[42, 41, 40], [72, 33, 31], [32, 31, 33], [39, 38, 72], [33, 72, 38],
[33, 38, 34], [37, 35, 38], [34, 38, 35], [35, 37, 36]])
###############################################################################
# Rather than create a Triangulation object, can simply pass x, y and triangles
# arrays to triplot directly. It would be better to use a Triangulation object
# if the same triangulation was to be used more than once to save duplicated
# calculations.
plt.figure()
plt.gca().set_aspect('equal')
plt.triplot(x, y, triangles, 'go-', lw=1.0)
plt.title('triplot of user-specified triangulation')
plt.xlabel('Longitude (degrees)')
plt.ylabel('Latitude (degrees)')
plt.show()
|
[
"qwer55113322@gmail.com"
] |
qwer55113322@gmail.com
|
56cbe3a325b7e39dbfcee37c3ce40d0b704f5713
|
e23a4f57ce5474d468258e5e63b9e23fb6011188
|
/120_design_patterns/006_adapter/examples/adapter_005.py
|
f7aff08228bdda9c77bd6d64d410d32bb64c4da1
|
[] |
no_license
|
syurskyi/Python_Topics
|
52851ecce000cb751a3b986408efe32f0b4c0835
|
be331826b490b73f0a176e6abed86ef68ff2dd2b
|
refs/heads/master
| 2023-06-08T19:29:16.214395
| 2023-05-29T17:09:11
| 2023-05-29T17:09:11
| 220,583,118
| 3
| 2
| null | 2023-02-16T03:08:10
| 2019-11-09T02:58:47
|
Python
|
UTF-8
|
Python
| false
| false
| 4,088
|
py
|
"""
The Adapter pattern is a structural design pattern. It allows a Client to access
functionalities of a Supplier.
Without an Adapter the Client can not access such functionalities.
This pattern can be implemented with an OBJECT approach or a CLASS approach.
"""
# Client
class Smartphone(object):
max_input_voltage = 5
@classmethod
def outcome(cls, input_voltage):
if input_voltage > cls.max_input_voltage:
print("Input voltage: {}V -- BURNING!!!".format(input_voltage))
else:
print("Input voltage: {}V -- Charging...".format(input_voltage))
def charge(self, input_voltage):
"""Charge the phone with the given input voltage."""
self.outcome(input_voltage)
# Supplier
class Socket(object):
output_voltage = None
class EUSocket(Socket):
output_voltage = 230
class USSocket(Socket):
output_voltage = 120
################################################################################
# Approach A: OBJECT Adapter. The adapter encapsulates client and supplier.
################################################################################
class EUAdapter(object):
"""EUAdapter encapsulates client (Smartphone) and supplier (EUSocket)."""
input_voltage = EUSocket.output_voltage
output_voltage = Smartphone.max_input_voltage
class USAdapter(object):
"""USAdapter encapsulates client (Smartphone) and supplier (USSocket)."""
input_voltage = USSocket.output_voltage
output_voltage = Smartphone.max_input_voltage
################################################################################
# Approach B: CLASS Adapter. Adapt the Client through multiple inheritance.
################################################################################
class CannotTransformVoltage(Exception):
"""Exception raised by the SmartphoneAdapter.
This exception represents the fact that an adapter could not provide the
right voltage to the Smartphone if the voltage of the Socket is wrong."""
pass
class SmartphoneAdapter(Smartphone, Socket):
@classmethod
def transform_voltage(cls, input_voltage):
if input_voltage == cls.output_voltage:
return cls.max_input_voltage
else:
raise CannotTransformVoltage(
"Can\'t transform {0}-{1}V. This adapter transforms {2}-{1}V.".format(
input_voltage, cls.max_input_voltage, cls.output_voltage
)
)
@classmethod
def charge(cls, input_voltage):
try:
voltage = cls.transform_voltage(input_voltage)
cls.outcome(voltage)
except CannotTransformVoltage as e:
print(e)
class SmartphoneEUAdapter(SmartphoneAdapter, EUSocket):
"""System (smartphone + adapter) for a European Socket.
Note: SmartphoneAdapter already inherited from Smartphone and Socket, but by
re-inheriting from EUSocket we redefine all the stuff inherited from Socket.
"""
pass
class SmartphoneUSAdapter(SmartphoneAdapter, USSocket):
"""System (smartphone + adapter) for an American Socket."""
pass
def main():
print("Smartphone without adapter")
smartphone = Smartphone()
smartphone.charge(EUSocket.output_voltage)
smartphone.charge(USSocket.output_voltage)
print("\nSmartphone with EU adapter (object adapter approach)")
smartphone.charge(EUAdapter.output_voltage)
print("\nSmartphone with US adapter (object adapter approach)")
smartphone.charge(USAdapter.output_voltage)
print("\nSmartphone with EU adapter (class adapter approach)")
smarthone_with_eu_adapter = SmartphoneEUAdapter()
smarthone_with_eu_adapter.charge(EUSocket.output_voltage)
smarthone_with_eu_adapter.charge(USSocket.output_voltage)
print("\nSmartphone with US adapter (class adapter approach)")
smarthone_with_us_adapter = SmartphoneUSAdapter()
smarthone_with_us_adapter.charge(EUSocket.output_voltage)
smarthone_with_us_adapter.charge(USSocket.output_voltage)
if __name__ == "__main__":
main()
|
[
"sergejyurskyj@yahoo.com"
] |
sergejyurskyj@yahoo.com
|
76705674841dca7e1c0fdbec51fb4f0b7f989140
|
49f61714a6f78d984fd2194d6064d84e891bc5b7
|
/2019-1/220/users/2882/codes/1736_2496.py
|
34aaeee55371a5a36435ec18f6e128cfa904b6b2
|
[] |
no_license
|
psbarros/Variaveis3
|
b5c4e1517e7d94a846ee03791d25d5821a1c651c
|
3dcf6f810709ce03c78335acf9533e008a2ae125
|
refs/heads/master
| 2023-06-13T07:05:00.878430
| 2021-07-06T17:51:37
| 2021-07-06T17:51:37
| 383,549,597
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 131
|
py
|
num=int(input("Numero: "))
i=0
soma=0
while (num != -1):
soma=soma+num
num=int(input("Numero: "))
i=i+1
print(soma)
|
[
"psb@icomp.ufam.edu.br"
] |
psb@icomp.ufam.edu.br
|
29ab49aa05b4049510072ab0452bce50284ecf40
|
ea1373d9a58ad198c15d35a6daddb4e06d21aa39
|
/application-code/show3d_balls.py
|
4d086af1e8e3e1d75769cd87bd256539b3669351
|
[] |
no_license
|
screnary/VoxSegNet
|
bb2778dfc460dfafdbd923f79755f7f0776dc36f
|
264f2efc0a589018a1fc68c111626beacbe095a5
|
refs/heads/master
| 2020-09-04T03:41:47.067129
| 2019-11-07T06:34:29
| 2019-11-07T06:34:29
| 219,649,857
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,889
|
py
|
""" Original Author: Haoqiang Fan """
import numpy as np
import ctypes as ct
import cv2
import sys
import os
import pdb
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
showsz=800
mousex,mousey=0.5,0.5
ix,iy=-1,-1
zoom=1.0
changed=True
dragging=False
def onmouse(event,x,y,flags,param): # *args
# args=[event,x,y,flags,param]
global mousex,mousey,changed,dragging,ix,iy
# pdb.set_trace()
if event == cv2.EVENT_LBUTTONDOWN:
dragging = True
# init ix,iy when push down left button
iy = y
ix = x
# pdb.set_trace()
#当左键按下并移动时旋转图形,event可以查看移动,flag查看是否按下
elif event==cv2.EVENT_MOUSEMOVE and flags==cv2.EVENT_FLAG_LBUTTON:
if dragging == True:
dx=y-iy
dy=x-ix
mousex+=dx/float(showsz) # 控制旋转角
mousey+=dy/float(showsz)
changed=True
#当鼠标松开时停止绘图
elif event == cv2.EVENT_LBUTTONUP:
dragging == False
cv2.namedWindow('show3d')
cv2.moveWindow('show3d',0,0)
cv2.setMouseCallback('show3d',onmouse)
# dll=np.ctypeslib.load_library(os.path.join(BASE_DIR, 'render_balls_so'),'.')
# pdb.set_trace()
dll=np.ctypeslib.load_library(os.path.join(BASE_DIR, 'render'),'.')
def showpoints(xyz,c_gt=None, c_pred=None ,waittime=0,showrot=False,
magnifyBlue=0,freezerot=False,background=(0,0,0),
normalizecolor=True,ballradius=8,savedir='show3d.png'):
global showsz,mousex,mousey,zoom,changed
xyz=xyz-xyz.mean(axis=0)
radius=((xyz**2).sum(axis=-1)**0.5).max()
xyz/=(radius*2.2)/showsz
if c_gt is None:
c0=np.zeros((len(xyz),),dtype='float32')+255 # green
c1=np.zeros((len(xyz),),dtype='float32')+255 # red
c2=np.zeros((len(xyz),),dtype='float32')+255 # blue
else:
c0=c_gt[:,0]
c1=c_gt[:,1]
c2=c_gt[:,2]
if normalizecolor:
c0/=(c0.max()+1e-14)/255.0
c1/=(c1.max()+1e-14)/255.0
c2/=(c2.max()+1e-14)/255.0
c0=np.require(c0,'float32','C')
c1=np.require(c1,'float32','C')
c2=np.require(c2,'float32','C')
show=np.zeros((showsz,showsz,3),dtype='uint8')
def render():
rotmat=np.eye(3)
if not freezerot:
xangle=(mousey-0.5)*np.pi*0.25
# xangle=(mousey-0.5)*np.pi*1.2
else:
xangle=0
rotmat=rotmat.dot(np.array([
[1.0,0.0,0.0],
[0.0,np.cos(xangle),-np.sin(xangle)],
[0.0,np.sin(xangle),np.cos(xangle)],
]))
if not freezerot:
yangle=(mousex-0.5)*np.pi*0.25
# yangle=(mousex-0.5)*np.pi*1.2
else:
yangle=0
rotmat=rotmat.dot(np.array([
[np.cos(yangle),0.0,-np.sin(yangle)],
[0.0,1.0,0.0],
[np.sin(yangle),0.0,np.cos(yangle)],
]))
rotmat*=zoom
nxyz=xyz.dot(rotmat)+[showsz/2,showsz/2,0]
ixyz=nxyz.astype('int32')
show[:]=background
dll.render_ball(
ct.c_int(show.shape[0]),
ct.c_int(show.shape[1]),
show.ctypes.data_as(ct.c_void_p),
ct.c_int(ixyz.shape[0]),
ixyz.ctypes.data_as(ct.c_void_p),
c0.ctypes.data_as(ct.c_void_p),
c1.ctypes.data_as(ct.c_void_p),
c2.ctypes.data_as(ct.c_void_p),
ct.c_int(ballradius)
)
if magnifyBlue>0:
show[:,:,0]=np.maximum(show[:,:,0],np.roll(show[:,:,0],1,axis=0))
if magnifyBlue>=2:
show[:,:,0]=np.maximum(show[:,:,0],np.roll(show[:,:,0],-1,axis=0))
show[:,:,0]=np.maximum(show[:,:,0],np.roll(show[:,:,0],1,axis=1))
if magnifyBlue>=2:
show[:,:,0]=np.maximum(show[:,:,0],np.roll(show[:,:,0],-1,axis=1))
if showrot:
# cv2.putText(show,'xangle %d' % (int(xangle/np.pi*180)),(30,showsz-30),0,0.5,cv2.cv.CV_RGB(255,0,0))
# cv2.putText(show,'yangle %d' % (int(yangle/np.pi*180)),(30,showsz-50),0,0.5,cv2.cv.CV_RGB(255,0,0))
# cv2.putText(show,'zoom %d%%' % (int(zoom*100)),(30,showsz-70),0,0.5,cv2.cv.CV_RGB(255,0,0))
cv2.putText(show,'xangle %d' % (int(xangle/np.pi*180)),(30,showsz-30),0,0.5,(255,0,0))
cv2.putText(show,'yangle %d' % (int(yangle/np.pi*180)),(30,showsz-50),0,0.5,(255,0,0))
cv2.putText(show,'zoom %d%%' % (int(zoom*100)),(30,showsz-70),0,0.5,(255,0,0))
changed=True
while True:
if changed:
render()
changed=False
cv2.imshow('show3d',show)
if waittime==0:
cmd=cv2.waitKey(10) % 256
else:
cmd=cv2.waitKey(waittime) % 256
if cmd==ord('q'):
break
elif cmd==ord('Q'):
sys.exit(0)
if cmd==ord('t') or cmd == ord('p'):
if cmd == ord('t'):
if c_gt is None:
c0=np.zeros((len(xyz),),dtype='float32')+255
c1=np.zeros((len(xyz),),dtype='float32')+255
c2=np.zeros((len(xyz),),dtype='float32')+255
else:
c0=c_gt[:,0]
c1=c_gt[:,1]
c2=c_gt[:,2]
else:
if c_pred is None:
c0=np.zeros((len(xyz),),dtype='float32')+255
c1=np.zeros((len(xyz),),dtype='float32')+255
c2=np.zeros((len(xyz),),dtype='float32')+255
else:
c0=c_pred[:,0]
c1=c_pred[:,1]
c2=c_pred[:,2]
if normalizecolor:
c0/=(c0.max()+1e-14)/255.0
c1/=(c1.max()+1e-14)/255.0
c2/=(c2.max()+1e-14)/255.0
c0=np.require(c0,'float32','C')
c1=np.require(c1,'float32','C')
c2=np.require(c2,'float32','C')
changed = True
if cmd==ord('j'): # rotate
mousey-=10/float(showsz)
changed=True
elif cmd==ord('l'):
mousey+=10/float(showsz)
changed=True
if cmd==ord('i'):
mousex-=10/float(showsz)
changed=True
elif cmd==ord('k'):
mousex+=10/float(showsz)
changed=True
if cmd==ord('n'): # near
zoom*=1.1
changed=True
elif cmd==ord('m'): # far
zoom/=1.1
changed=True
elif cmd==ord('r'):
zoom=1.0
changed=True
elif cmd==ord('s'):
cv2.imwrite(savedir,show)
if cmd==ord('f'):
freezerot = not freezerot
if waittime!=0:
break
return cmd
if __name__=='__main__':
np.random.seed(100)
showpoints(np.random.randn(2500,3))
|
[
"screnary@qq.com"
] |
screnary@qq.com
|
3a3f131e46c372e5a1ffb23605dd5ea5ac592973
|
4652255cc1adaed1cf808f5475aa06265ee2016a
|
/fluiddb/security/test/test_oauth.py
|
d9a3fda028a68004273e684c46f1c20fd2a3f13a
|
[
"Apache-2.0"
] |
permissive
|
fluidinfo/fluiddb
|
ca55b640ce44be53614068caade373046bdf30e4
|
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
|
refs/heads/master
| 2021-01-11T08:16:01.635285
| 2016-03-27T21:11:58
| 2016-03-27T21:11:58
| 54,848,235
| 3
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,626
|
py
|
from random import sample
from fluiddb.data.exceptions import UnknownUserError
from fluiddb.data.system import createSystemData
from fluiddb.data.user import getUsers, ALPHABET
from fluiddb.model.oauth import (
OAuthAccessToken, OAuthRenewalToken, OAuthConsumerAPI,
UnknownConsumerError)
from fluiddb.model.user import UserAPI, getUser
from fluiddb.security.exceptions import InvalidOAuthTokenError
from fluiddb.security.oauth import SecureOAuthConsumerAPI
from fluiddb.testing.basic import FluidinfoTestCase
from fluiddb.testing.resources import (
ConfigResource, DatabaseResource, CacheResource)
class SecureOAuthConsumerAPITest(FluidinfoTestCase):
resources = [('cache', CacheResource()),
('config', ConfigResource()),
('store', DatabaseResource())]
def setUp(self):
super(SecureOAuthConsumerAPITest, self).setUp()
createSystemData()
secret = ''.join(sample(ALPHABET, 16))
self.config.set('oauth', 'access-secret', secret)
def testRenewToken(self):
"""
L{SecureOAuthConsumerAPI.renewToken} generates a new
L{OAuthRenewalToken} and L{OAuthAccessToken}, given a valid
L{OAuthRenewalToken}.
"""
UserAPI().create([
(u'consumer', u'secret', u'Consumer', u'consumer@example.com'),
(u'user', u'secret', u'User', u'user@example.com')])
consumer = getUser(u'consumer')
user = getUser(u'user')
api = OAuthConsumerAPI()
api.register(consumer)
token = api.getRenewalToken(consumer, user).encrypt()
encryptedRenewalToken, encryptedAccessToken = (
SecureOAuthConsumerAPI().renewToken(u'consumer', token))
renewalToken = OAuthRenewalToken.decrypt(consumer,
encryptedRenewalToken)
accessToken = OAuthAccessToken.decrypt(consumer, encryptedAccessToken)
self.assertTrue(isinstance(renewalToken, OAuthRenewalToken))
self.assertIdentical(consumer, renewalToken.consumer)
self.assertIdentical(user, renewalToken.user)
self.assertTrue(isinstance(accessToken, OAuthAccessToken))
self.assertIdentical(consumer, accessToken.consumer)
self.assertIdentical(user, accessToken.user)
def testRenewTokenWithUnknownConsumer(self):
"""
L{SecureOAuthConsumerAPI.renewToken} raises an L{UnknownConsumerError}
if an L{OAuthRenewalToken} for an unknown consumer is used to generate
a new L{OAuthAccessToken}.
"""
UserAPI().create([
(u'consumer', u'secret', u'Consumer', u'consumer@example.com'),
(u'user', u'secret', u'User', u'user@example.com')])
consumer = getUser(u'consumer')
user = getUser(u'user')
api = OAuthConsumerAPI()
api.register(consumer)
token = api.getRenewalToken(consumer, user).encrypt()
getUsers(usernames=[u'consumer']).remove()
self.assertRaises(UnknownConsumerError,
SecureOAuthConsumerAPI().renewToken, u'consumer',
token)
def testRenewTokenWithUnknownUser(self):
"""
L{SecureOAuthConsumerAPI.renewToken} raises an L{UnknownUserError} if
an L{OAuthRenewalToken} for an unknown L{User} is used to generate a
new L{OAuthAccessToken}.
"""
UserAPI().create([
(u'consumer', u'secret', u'Consumer', u'consumer@example.com'),
(u'user', u'secret', u'User', u'user@example.com')])
consumer = getUser(u'consumer')
user = getUser(u'user')
api = OAuthConsumerAPI()
api.register(consumer)
token = api.getRenewalToken(consumer, user).encrypt()
getUsers(usernames=[u'user']).remove()
self.assertRaises(UnknownUserError,
SecureOAuthConsumerAPI().renewToken, u'consumer',
token)
def testRenewTokenWithInvalidRenewalToken(self):
"""
L{SecureOAuthConsumerAPI.renewToken} raises an
L{InvalidOAuthTokenError} if the specified encrypted
L{OAuthRenewalToken} can't be decrypted.
"""
UserAPI().create([
(u'consumer', u'secret', u'Consumer', u'consumer@example.com'),
(u'user', u'secret', u'User', u'user@example.com')])
consumer = getUser(u'consumer')
OAuthConsumerAPI().register(consumer)
self.assertRaises(InvalidOAuthTokenError,
SecureOAuthConsumerAPI().renewToken, u'consumer',
'invalid')
|
[
"terry@jon.es"
] |
terry@jon.es
|
b129a9f14e65ef5524a7b3b9c62c43528fa7a56e
|
8df1237388352d29c894403feaf91e800edef6bf
|
/Algorithms/717.1-bit-and-2-bit-characters/1-bit-and-2-bit-characters_1.py
|
c29bf6ba7a65751ccf0ecf07f91a5fd4ce738556
|
[
"MIT"
] |
permissive
|
GaLaPyPy/leetcode-solutions
|
8cfa5d220516683c6e18ff35c74d84779975d725
|
40920d11c584504e805d103cdc6ef3f3774172b3
|
refs/heads/master
| 2023-06-19T22:28:58.956306
| 2021-07-19T00:20:56
| 2021-07-19T00:20:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 243
|
py
|
class Solution:
def isOneBitCharacter(self, bits: List[int]) -> bool:
return False if not bits or bits == [[1, 0]] else True if bits == [0] else self.isOneBitCharacter(bits[1:]) if bits[0] == 0 else self.isOneBitCharacter(bits[2:])
|
[
"zoctopus@qq.com"
] |
zoctopus@qq.com
|
deee26ca9e685631ff63c4376f9e435f66452620
|
cadb6dceb7bb67ce47ef48b2c83f480a65d6b01a
|
/s3prl/problem/hear/gtzan_music_speech.py
|
5cd6274c2963c8c9e15792e2f29f92bd1933caa8
|
[
"Apache-2.0",
"CC-BY-NC-4.0"
] |
permissive
|
s3prl/s3prl
|
52ec2ae4df5a61c786c122085603aa9c5e8c2681
|
76a9432b824f6ae3eae09a35a67782c4ed582832
|
refs/heads/main
| 2023-08-17T02:26:57.524087
| 2023-06-10T17:12:27
| 2023-06-10T17:12:27
| 196,905,457
| 1,549
| 398
|
Apache-2.0
| 2023-09-14T13:07:05
| 2019-07-15T01:54:52
|
Python
|
UTF-8
|
Python
| false
| false
| 2,662
|
py
|
import logging
from s3prl.corpus.hear import hear_scene_kfolds
from s3prl.util.configuration import default_cfg, field
from .scene import HearScene
logger = logging.getLogger(__name__)
NUM_FOLDS = 10
class GtzanMusicSpeech(HearScene):
@default_cfg(
**HearScene.setup.default_except(
corpus=dict(
CLS=field(
hear_scene_kfolds,
"\nThe corpus class. You can add the **kwargs right below this CLS key",
str,
),
dataset_root=field(
"???",
"The root path of the corpus",
str,
),
test_fold="???",
num_folds=NUM_FOLDS,
),
train_sampler=dict(
batch_size=32,
),
task=dict(
prediction_type="multiclass",
scores=["top1_acc", "mAP", "d_prime", "aucroc"],
),
)
)
@classmethod
def setup(cls, **cfg):
super().setup(**cfg)
@default_cfg(
**HearScene.train.default_except(
trainer=dict(
valid_metric="top1_acc",
valid_higher_better=True,
)
)
)
@classmethod
def train(cls, **cfg):
super().train(**cfg)
@default_cfg(**HearScene.inference.default_cfg)
@classmethod
def inference(cls, **cfg):
super().inference(**cfg)
@default_cfg(
**HearScene.run.default_except(
stages=["setup", "train", "inference"],
start_stage="setup",
final_stage="inference",
setup=setup.default_cfg.deselect("workspace", "resume"),
train=train.default_cfg.deselect("workspace", "resume"),
inference=inference.default_cfg.deselect("workspace", "resume"),
)
)
@classmethod
def run(cls, **cfg):
super().run(**cfg)
@default_cfg(
num_fold=field(NUM_FOLDS, "The number of folds to run cross validation", int),
**run.default_except(
workspace=field(
"???",
"The root workspace for all folds.\n"
"Each fold will use a 'fold_{id}' sub-workspace under this root workspace",
),
setup=dict(
corpus=dict(
test_fold=field(
"TBD", "This will be auto-set by 'run_cross_validation'"
)
)
),
),
)
@classmethod
def cross_validation(cls, **cfg):
super().cross_validation(**cfg)
|
[
"leo19941227@gmail.com"
] |
leo19941227@gmail.com
|
f79f3465381def312720a2205e28a051021ff1bd
|
3c82ea78607e530811e0e837503ce26717e5fd04
|
/TopQuarkAnalysis/Configuration/python/patRefSel_eventCleaning_cff.py
|
5676a6c747cd5e1df39cdbd39d2ec009084dd6f1
|
[] |
no_license
|
fhoehle/OldCMSSWPackages
|
7065c1cd2944c56fe2731c9be49dc2bd1d9781ce
|
cb655adfd0d59c9c8143b2e3db1ea950110df8f6
|
refs/heads/master
| 2016-09-06T18:05:43.458784
| 2014-08-21T10:54:47
| 2014-08-21T10:54:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 480
|
py
|
import FWCore.ParameterSet.Config as cms
from CommonTools.RecoAlgos.HBHENoiseFilter_cfi import *
# s. https://hypernews.cern.ch/HyperNews/CMS/get/JetMET/1196.html
HBHENoiseFilter.minIsolatedNoiseSumE = 999999.
HBHENoiseFilter.minNumIsolatedNoiseChannels = 999999
HBHENoiseFilter.minIsolatedNoiseSumEt = 999999.
from TopQuarkAnalysis.Configuration.patRefSel_eventCleaning_cfi import scrapingFilter
eventCleaning = cms.Sequence(
HBHENoiseFilter
+ scrapingFilter
)
|
[
"hoehle@physik.rwth-aachen.de"
] |
hoehle@physik.rwth-aachen.de
|
b1b040d767296d8c2901283ddd4759ac1033adbc
|
25ebc03b92df764ff0a6c70c14c2848a49fe1b0b
|
/daily/20200112/example_python/00varargs.py
|
ad017fc8a47ce3cd6acaaafb2268930d71dc5a06
|
[] |
no_license
|
podhmo/individual-sandbox
|
18db414fafd061568d0d5e993b8f8069867dfcfb
|
cafee43b4cf51a321f4e2c3f9949ac53eece4b15
|
refs/heads/master
| 2023-07-23T07:06:57.944539
| 2023-07-09T11:45:53
| 2023-07-09T11:45:53
| 61,940,197
| 6
| 0
| null | 2022-10-19T05:01:17
| 2016-06-25T11:27:04
|
Python
|
UTF-8
|
Python
| false
| false
| 141
|
py
|
def f(name: str, *args: str) -> None:
print(name, args)
# TypeError: f() got multiple values for argument 'name'
f(1, 2, 3, name="foo")
|
[
"ababjam61+github@gmail.com"
] |
ababjam61+github@gmail.com
|
338224b306fe6bd7bfb403460e852eaca4b28dee
|
da64994d73d250d19a30381de7462c5729372f81
|
/apps/student/migrations/0023_auto_20191125_2256.py
|
e8d84833aad8f99e902435dfa2d29e34a791bfa8
|
[] |
no_license
|
Mid0Riii/psybackend
|
2f872c1dd21e97ba0a46efa10f2b3246ac8bb2b5
|
2cd477f01111a816b17725a00ffa77a156dec7b0
|
refs/heads/master
| 2023-03-26T07:55:17.580161
| 2021-03-14T01:45:19
| 2021-03-14T01:45:19
| 305,083,821
| 0
| 1
| null | 2021-03-14T01:45:20
| 2020-10-18T11:15:48
|
Python
|
UTF-8
|
Python
| false
| false
| 578
|
py
|
# Generated by Django 2.2.5 on 2019-11-25 14:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0022_auto_20191123_1526'),
]
operations = [
migrations.AlterField(
model_name='studentbasic',
name='stu_level',
field=models.CharField(blank=True, choices=[('二级', '二级'), ('三级', '三级'), ('中科院', '中科院'), ('心理辅导师', '心理辅导师')], default='空', max_length=16, null=True, verbose_name='级别'),
),
]
|
[
"1534296263@qq.com"
] |
1534296263@qq.com
|
ef4d6dfcc404eef043eb20246f5b4cda418d1972
|
eaa43160aeeaa3cb4c7c9f52d8ed01f9abdf85e5
|
/tests/db/sql/clauses/test_array_agg.py
|
b827c5049998aee470f52ac1b088f9ea6f219154
|
[
"MIT"
] |
permissive
|
furious-luke/polecat
|
4fd2a2f859b9a77d9d004b32bc1bf8f907fea2ba
|
7be5110f76dc42b15c922c1bb7d49220e916246d
|
refs/heads/master
| 2022-07-31T16:38:45.791129
| 2021-05-06T01:05:03
| 2021-05-06T01:05:03
| 179,440,367
| 4
| 0
|
MIT
| 2022-07-05T21:28:34
| 2019-04-04T07:00:55
|
Python
|
UTF-8
|
Python
| false
| false
| 457
|
py
|
from unittest.mock import MagicMock
import pytest
from polecat.db.sql.expression.array_agg import ArrayAgg
from polecat.db.sql.sql import Sql
from .conftest import SqlTermTester
def test_to_sql():
term = ArrayAgg('test')
sql = Sql(term.to_sql())
assert str(sql) == 'array_agg("test")'
@pytest.mark.parametrize('test_func', SqlTermTester.ALL_TESTS)
def test_sql_term_methods(test_func):
term = ArrayAgg(MagicMock())
test_func(term)
|
[
"furious.luke@gmail.com"
] |
furious.luke@gmail.com
|
1958ba72f578a180ae656aab8c407494a1874f58
|
cbc5e26bb47ae69e80a3649c90275becf25ce404
|
/xlsxwriter/test/comparison/test_chart_up_down_bars01.py
|
393b2df0723766bdd319a942f072296d1d44a69f
|
[
"BSD-2-Clause-Views",
"BSD-3-Clause",
"MIT"
] |
permissive
|
mst-solar-car/kicad-bom-generator
|
c3549409c3139f787ad28391372b5cb03791694a
|
2aae905056d06f3d25343a8d784049c141d05640
|
refs/heads/master
| 2021-09-07T14:00:40.759486
| 2018-02-23T23:21:13
| 2018-02-23T23:21:13
| 107,868,801
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,690
|
py
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2017, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_up_down_bars01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of an XlsxWriter file with up-down bars."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'line'})
chart.axis_ids = [46808448, 49289856]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.set_up_down_bars()
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$B$1:$B$5',
})
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$C$1:$C$5',
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
|
[
"mwrb7d@mst.edu"
] |
mwrb7d@mst.edu
|
07401c1e6a187b32c1074eb2c08dd8cb41916ac7
|
b095173b2dbc77c8ad61c42403258c76169b7a63
|
/src/sagemaker/cli/compatibility/v2/modifiers/deprecated_params.py
|
1cc2f6dca060824a2f9117889c88e84f5fe907bd
|
[
"Apache-2.0"
] |
permissive
|
aws/sagemaker-python-sdk
|
666665e717cfb76698ba3ea7563b45344634264d
|
8d5d7fd8ae1a917ed3e2b988d5e533bce244fd85
|
refs/heads/master
| 2023-09-04T01:00:20.663626
| 2023-08-31T15:29:19
| 2023-08-31T15:29:19
| 110,621,895
| 2,050
| 1,255
|
Apache-2.0
| 2023-09-14T17:37:15
| 2017-11-14T01:03:33
|
Python
|
UTF-8
|
Python
| false
| false
| 2,482
|
py
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Classes to remove deprecated parameters."""
from __future__ import absolute_import
from sagemaker.cli.compatibility.v2.modifiers import matching
from sagemaker.cli.compatibility.v2.modifiers.modifier import Modifier
TF_NAMESPACES = ("sagemaker.tensorflow", "sagemaker.tensorflow.estimator")
class TensorFlowScriptModeParameterRemover(Modifier):
"""A class to remove ``script_mode`` from TensorFlow estimators (because it's the only mode)."""
def node_should_be_modified(self, node):
"""Checks if the ``ast.Call`` node instantiates a TensorFlow estimator.
TensorFlow estimator would use``script_mode`` set. This looks for the following formats:
- ``TensorFlow``
- ``sagemaker.tensorflow.TensorFlow``
Args:
node (ast.Call): a node that represents a function call. For more,
see https://docs.python.org/3/library/ast.html#abstract-grammar.
Returns:
bool: If the ``ast.Call`` is instantiating a TensorFlow estimator with ``script_mode``.
"""
is_tf_constructor = matching.matches_name_or_namespaces(node, "TensorFlow", TF_NAMESPACES)
return is_tf_constructor and self._has_script_mode_param(node)
def _has_script_mode_param(self, node):
"""Checks if the ``ast.Call`` node's keywords include ``script_mode``."""
for kw in node.keywords:
if kw.arg == "script_mode":
return True
return False
def modify_node(self, node):
"""Modifies the ``ast.Call`` node's keywords to remove ``script_mode``.
Args:
node (ast.Call): a node that represents a TensorFlow constructor.
Returns:
ast.AST: the original node, which has been potentially modified.
"""
for kw in node.keywords:
if kw.arg == "script_mode":
node.keywords.remove(kw)
return node
|
[
"noreply@github.com"
] |
aws.noreply@github.com
|
c97534d7ad5b129aa5e2e092a9d772af4853ed9a
|
ffadf9541d01cf9af20c419759d48b1eb01bfd35
|
/pachong/PCdemo1/day16/刘士豪20200414/梨视频爬取.py
|
c6ded9d8a6124d9c59c2e87f6c0e2f3f0748bb42
|
[] |
no_license
|
1987617587/lsh_py
|
b1bb1016eaafcba03bbc4a5310c1db04ae227af4
|
80eb5175cd0e5b3c6c5e2ebb906bb78d9a8f9e0d
|
refs/heads/master
| 2021-01-02T05:14:31.330287
| 2020-06-20T05:18:23
| 2020-06-20T05:18:23
| 239,498,994
| 2
| 1
| null | 2020-06-07T23:09:56
| 2020-02-10T11:46:47
|
Python
|
UTF-8
|
Python
| false
| false
| 6,029
|
py
|
# author:lsh
# datetime:2020/4/14 17:32
'''
.::::. _oo0oo_
.::::::::. o8888888o
::::::::::: 88" . "88
..:::::::::::' (| -_- |)
'::::::::::::' 0\ = /0
.:::::::::: ___/`---'\___
'::::::::::::::.. .' \\| |# '.
..::::::::::::. / \\||| : |||# \
``:::::::::::::::: / _||||| -:- |||||- \
::::``:::::::::' .:::. | | \\\ - #/ | |
::::' ':::::' .::::::::. | \_| ''\---/'' |_/ |
.::::' :::: .:::::::'::::. \ .-\__ '-' ___/-. /
.:::' ::::: .:::::::::' ':::::. ___'. .' /--.--\ `. .'___
.::' :::::.:::::::::' ':::::. ."" '< `.___\_<|>_/___.' >' "".
.::' ::::::::::::::' ``::::. | | : `- \`.;`\ _ /`;.`/ - ` : | |
...::: ::::::::::::' ``::. \ \ `_. \_ __\ /__ _/ .-` / /
```` ':. ':::::::::' ::::.. `-.____`.___ \_____/___.-`___.-'
'.:::::' ':'````.. `=---='
女神保佑 永无BUG 佛祖保佑 永无BUG
'''
import os
import re
import requests
import time
import random
import json
import xlwt # excel文档的写操作
def down_video(url,path):
with requests.get(url,stream=True) as response: # 使用字节流的方式下载
print('开始下载视频……')
# 数据块的大小
chunk_size = 10240
# 获取视频的大小
content_size = int(response.headers['content-length'])
print(f'content_size:{content_size}')
with open(path,'wb')as file:
n = 1
for chunk in response.iter_content(chunk_size=chunk_size):
loaded = n*chunk_size / content_size
print(f'已下载:{loaded:%}')
n += 1
file.write(chunk)
print("下载成功")
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36',
'Referer': 'https://www.pearvideo.com/category_8',
'Cookie':'__secdyid=84f00d239b16acbdcfb9cde101a17af47b0d99ea9a3a759a021586856771; JSESSIONID=9BC77A296B4EA4B8841EC7857515E446; PEAR_UUID=63dcb5e7-c77d-4312-b9ee-b74afd0cb7cd; PV_WWW=srv-pv-prod-portal4; _uab_collina=158685677096523995133632; UM_distinctid=1717808bfe41ee-08084333e12f5a-b791b36-1fa400-1717808bfe642d; CNZZDATA1260553744=996779612-1586856014-https%253A%252F%252Fwww.baidu.com%252F%7C1586856014; Hm_lvt_9707bc8d5f6bba210e7218b8496f076a=1586856772; __ads_session=zQ/j+u9ZdQk+8LohtQA=; Hm_lpvt_9707bc8d5f6bba210e7218b8496f076a=1586856796'
}
headers2 = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36',
'Referer': 'https://www.pearvideo.com/',
'Cookie':'__secdyid=84f00d239b16acbdcfb9cde101a17af47b0d99ea9a3a759a021586856771; JSESSIONID=9BC77A296B4EA4B8841EC7857515E446; PEAR_UUID=63dcb5e7-c77d-4312-b9ee-b74afd0cb7cd; PV_WWW=srv-pv-prod-portal4; _uab_collina=158685677096523995133632; UM_distinctid=1717808bfe41ee-08084333e12f5a-b791b36-1fa400-1717808bfe642d; CNZZDATA1260553744=996779612-1586856014-https%253A%252F%252Fwww.baidu.com%252F%7C1586856014; Hm_lvt_9707bc8d5f6bba210e7218b8496f076a=1586856772; __ads_session=zQ/j+u9ZdQk+8LohtQA=; Hm_lpvt_9707bc8d5f6bba210e7218b8496f076a=1586856796'
}
# url = 'https://www.pearvideo.com/category_loading.jsp?reqType=5&categoryId=8&start=12&mrd=0.5616803019773882&filterIds=1668776,1668820,1667839,1667761,1668006,1667974,1667846,1667598,1667770,1667134,1667302,1667266'
# 把上面路由优化,修改参数,进行多页爬取
pageCount = 1
# url = 'https://www.pearvideo.com/category_8'
url = 'https://www.pearvideo.com/category_loading.jsp'
# for page in range(5):
page = 0
while page<= pageCount:
print(f'page:{page}')
params = {
'reqType': '5',
'categoryId': '8',
'start': page*12,
}
page+=1
response = requests.get(url, params=params, headers=headers)
html = response.text
# print(html)
pat = re.compile(r'<a.*?href="(.*?)" class="vervideo-lilink actplay">')
ls = pat.findall(html)
# ['video_1668820', 'video_1668776', 'living_1667761', 'video_1667839', 'video_1668006', 'video_1667974', 'video_1667846', 'video_1667598', 'video_1667770', 'video_1667134', 'video_1667302', 'video_1667266']
print(ls)
for detail_url in ls:
video_detail_url = 'https://www.pearvideo.com/'+detail_url
print(f'video_detail_url:{video_detail_url}')
headers2['Referer'] = 'https://www.pearvideo.com/'+detail_url
print(headers2)
response = requests.get(video_detail_url,headers=headers2)
print(response.text)
detail_html = response.text
pat = re.compile(r'.*?srcUrl="(.*?)"')
mp4_url = pat.findall(detail_html)[0]
print(f'mp4_url:{mp4_url}')
# 开始下载视频
down_video(mp4_url,'videos/'+os.path.basename(mp4_url))
time.sleep(random.random())
|
[
"1987617587@qq.com"
] |
1987617587@qq.com
|
a8bf7488a6744e53a99e1ccd19d70c3f88be861b
|
9a832a66a8a4b17021b2e0f7e3b40362879a7012
|
/arabel/asgi.py
|
dbdc7b43f4e9f73ea21c38c7c9520aa6d284df7f
|
[
"MIT"
] |
permissive
|
niconoe/arabel
|
dcdd29cb14bafef8cf19853ad87b28fc30bc709d
|
ef0ffb1ef3e104799ac95dea55e48aae93b399d5
|
refs/heads/master
| 2023-05-04T22:07:53.244933
| 2021-05-26T14:58:08
| 2021-05-26T14:58:08
| 334,908,666
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 389
|
py
|
"""
ASGI config for arabel project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'arabel.settings')
application = get_asgi_application()
|
[
"nicolas@niconoe.eu"
] |
nicolas@niconoe.eu
|
e56c0a6f34577794fddf1e0e344ffa44cd7c5504
|
7807d8d9d109a3e272fffed91bf841201da39256
|
/trans_ALDS1_1_B/oreo0320_ALDS1_1_B_kotonoha.py
|
fb7da8b5cfb0a8448ae7e1cfdeee8e2de5cd6b7f
|
[] |
no_license
|
y-akinobu/AOJ_to_Kotonoha
|
0e8df43393964fcdd5df06c75545091bd6c0c2e2
|
5a694a55a3d85e3fbc4a07b57edc4374556db9a1
|
refs/heads/main
| 2023-02-05T15:33:16.581177
| 2020-12-30T16:14:44
| 2020-12-30T16:14:44
| 325,524,216
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 226
|
py
|
# mathモジュールを用いる
import math
# map(整数,入力された文字列を空白で分割した列)を展開し順にaとbとする
a,b = map(int,input().split())
# math.gcd(a,b)を出力する
print(math.gcd(a,b))
|
[
"sx2_vn_yuka@outlook.jp"
] |
sx2_vn_yuka@outlook.jp
|
5ebae5874d208063c395bbf6d6118ed2aca27b8c
|
aa6e1dd07a71a73bc08574b76f9e57a3ce8c8286
|
/077.Test_BeeWare/helloworld/macOS/Hello World/Hello World.app/Contents/Resources/app_packages/rubicon/objc/__init__.py
|
15b6e36cede428eead64cb153f86979a662adfe7
|
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
IvanaXu/PyTools
|
0aff5982f50bb300bfa950405192c78473b69537
|
358ae06eef418fde35f424909d4f13049ca9ec7b
|
refs/heads/master
| 2023-06-07T21:45:44.242363
| 2023-06-06T16:00:25
| 2023-06-06T16:00:25
| 163,940,845
| 60
| 8
|
MIT
| 2022-12-23T02:49:05
| 2019-01-03T07:54:16
|
Python
|
UTF-8
|
Python
| false
| false
| 2,188
|
py
|
# Examples of valid version strings
# __version__ = '1.2.3.dev1' # Development release 1
# __version__ = '1.2.3a1' # Alpha Release 1
# __version__ = '1.2.3b1' # Beta Release 1
# __version__ = '1.2.3rc1' # RC Release 1
# __version__ = '1.2.3' # Final Release
# __version__ = '1.2.3.post1' # Post Release 1
__version__ = '0.4.0'
# Import commonly used submodules right away.
# The first few imports are only included for clarity. They are not strictly necessary, because the from-imports below
# also import the types and runtime modules and implicitly add them to the rubicon.objc namespace.
from . import types # noqa: F401
from . import runtime # noqa: F401
from . import api # noqa: F401
# The import of collections is important, however. The classes from collections are not meant to be used directly,
# instead they are registered with the runtime module (using the for_objcclass decorator) so they are used in place of
# ObjCInstance when representing Foundation collections in Python. If this module is not imported, the registration
# will not take place, and Foundation collections will not support the expected methods/operators in Python!
from . import collections # noqa: F401
# Note to developers: when modifying any of the import lists below, please:
# * Keep each list in alphabetical order
# * Update the corresponding list in the documentation at docs/reference/rubicon-objc.rst
# Thank you!
from .types import ( # noqa: F401
CFIndex, CFRange, CGFloat, CGGlyph, CGPoint, CGPointMake, CGRect, CGRectMake, CGSize, CGSizeMake, NSEdgeInsets,
NSEdgeInsetsMake, NSInteger, NSMakePoint, NSMakeRect, NSMakeSize, NSPoint, NSRange, NSRect, NSSize, NSTimeInterval,
NSUInteger, NSZeroPoint, UIEdgeInsets, UIEdgeInsetsMake, UIEdgeInsetsZero, UniChar, unichar,
)
from .runtime import SEL, send_message, send_super # noqa: F401
from .api import ( # noqa: F401
Block, NSArray, NSDictionary, NSMutableArray, NSMutableDictionary, NSObject, NSObjectProtocol, ObjCBlock,
ObjCClass, ObjCInstance, ObjCMetaClass, ObjCProtocol, at, ns_from_py, objc_classmethod, objc_const, objc_ivar,
objc_method, objc_property, objc_rawmethod, py_from_ns,
)
|
[
"1440420407@qq.com"
] |
1440420407@qq.com
|
f92f5f95e7c1c9b68e7a522938c2afe1c00e0d01
|
b38e64f47f84e3aa984f813d1fbeef2717e2bc3d
|
/characters/migrations/0003_class_race.py
|
fbdf75700c0bc20661afb443e1d24f7dabb0b948
|
[
"MIT"
] |
permissive
|
luiz158/django-tutorial-v2
|
2abf28939060750e031fca95429c1d6cbb738f46
|
3d128301357e687542c6627f9d8eca026e04faaa
|
refs/heads/master
| 2021-12-04T04:18:59.535664
| 2015-01-06T21:54:45
| 2015-01-06T21:54:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,052
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('characters', '0002_alignment'),
]
operations = [
migrations.CreateModel(
name='Class',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('name', models.CharField(max_length=200)),
('description', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Race',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('name', models.CharField(max_length=200)),
('description', models.TextField()),
],
options={
},
bases=(models.Model,),
),
]
|
[
"mpirnat@gmail.com"
] |
mpirnat@gmail.com
|
df6b7919f0f4d731c68b54914240fed2aaa8fe5b
|
91d1a6968b90d9d461e9a2ece12b465486e3ccc2
|
/rds_write_3/db-parameter-group_copy.py
|
ee699d9102789428290b58d77fa4169647f2cf6c
|
[] |
no_license
|
lxtxl/aws_cli
|
c31fc994c9a4296d6bac851e680d5adbf7e93481
|
aaf35df1b7509abf5601d3f09ff1fece482facda
|
refs/heads/master
| 2023-02-06T09:00:33.088379
| 2020-12-27T13:38:45
| 2020-12-27T13:38:45
| 318,686,394
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,218
|
py
|
#!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_three_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/copy-db-parameter-group.html
if __name__ == '__main__':
"""
create-db-parameter-group : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/create-db-parameter-group.html
delete-db-parameter-group : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/delete-db-parameter-group.html
describe-db-parameter-groups : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/describe-db-parameter-groups.html
modify-db-parameter-group : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/modify-db-parameter-group.html
reset-db-parameter-group : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/reset-db-parameter-group.html
"""
parameter_display_string = """
# source-db-parameter-group-identifier : The identifier or ARN for the source DB parameter group. For information about creating an ARN, see Constructing an ARN for Amazon RDS in the Amazon RDS User Guide .
Constraints:
Must specify a valid DB parameter group.
Must specify a valid DB parameter group identifier, for example my-db-param-group , or a valid ARN.
# target-db-parameter-group-identifier : The identifier for the copied DB parameter group.
Constraints:
Canât be null, empty, or blank
Must contain from 1 to 255 letters, numbers, or hyphens
First character must be a letter
Canât end with a hyphen or contain two consecutive hyphens
Example: my-db-parameter-group
# target-db-parameter-group-description : A description for the copied DB parameter group.
"""
add_option_dict = {}
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_three_parameter("rds", "copy-db-parameter-group", "source-db-parameter-group-identifier", "target-db-parameter-group-identifier", "target-db-parameter-group-description", add_option_dict)
|
[
"hcseo77@gmail.com"
] |
hcseo77@gmail.com
|
b73428382fda2e1ca751a76df524f3bcf515ef26
|
7967a3ee1c0ba80d2c4be404f5b779882cd24439
|
/playground/config/apps.py
|
8a5b10b147866b35f9dd3f69a721de35112665cc
|
[
"MIT"
] |
permissive
|
LeeHanYeong/django-quill-editor
|
bf0e7150afedc890652b20b288b27c635c777a5f
|
f49eabd65503462f0a9081626dfc66d2d7ddce36
|
refs/heads/master
| 2023-06-24T07:26:55.664214
| 2023-02-07T08:14:56
| 2023-02-07T08:30:14
| 245,107,729
| 184
| 44
|
MIT
| 2023-06-09T09:35:05
| 2020-03-05T08:27:55
|
Python
|
UTF-8
|
Python
| false
| false
| 185
|
py
|
from django.contrib.admin.apps import AdminConfig as DefaultAdminConfig
__all__ = ("AdminConfig",)
class AdminConfig(DefaultAdminConfig):
default_site = "config.admin.AdminSite"
|
[
"dev@lhy.kr"
] |
dev@lhy.kr
|
10219f3aa82ee083283d3ab5e26f3e0d93b5266b
|
cf70b91df736d472d31a54c79bbdf124b7713adc
|
/docs/conf.py
|
00d03b6ce5f611f322a30ce6a1b3ec1349168f29
|
[
"MIT"
] |
permissive
|
andycasey/mcfa
|
1e8a46d7c313988c4c0c57f214bd5268f8e35ae7
|
8c4135e665e47006e9ca725e8bfc67315508366e
|
refs/heads/master
| 2021-06-16T02:27:01.686167
| 2019-09-30T02:18:17
| 2019-09-30T02:18:17
| 142,864,003
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,654
|
py
|
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath("../"))
# -- Project information -----------------------------------------------------
project = 'mcfa'
copyright = '2018, Andrew R. Casey et al.'
author = 'Andrew R. Casey et al.'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'mcfadoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'mcfa.tex', 'mcfa Documentation',
'Andrew R. Casey', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'mcfa', 'mcfa Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'mcfa', 'mcfa Documentation',
author, 'mcfa', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
autodoc_mock_imports = ['numpy', 'scipy', 'sklearn', 'tqdm']
|
[
"andycasey@gmail.com"
] |
andycasey@gmail.com
|
3dffc5c20f744079690693c532bc36ee2d591421
|
ef3a7391b0a5c5d8e276355e97cbe4de621d500c
|
/venv/Lib/site-packages/caffe2/quantization/server/group_norm_dnnlowp_op_test.py
|
973576bc6ed334f54d6524e629e153233989b982
|
[
"Apache-2.0"
] |
permissive
|
countBMB/BenjiRepo
|
143f6da5d198ea6f06404b4559e1f4528b71b3eb
|
79d882263baaf2a11654ca67d2e5593074d36dfa
|
refs/heads/master
| 2022-12-11T07:37:04.807143
| 2019-12-25T11:26:29
| 2019-12-25T11:26:29
| 230,090,428
| 1
| 1
|
Apache-2.0
| 2022-12-08T03:21:09
| 2019-12-25T11:05:59
|
Python
|
UTF-8
|
Python
| false
| false
| 4,621
|
py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import collections
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, dyndep, utils, workspace
from caffe2.quantization.server import utils as dnnlowp_utils
from dnnlowp_test_utils import check_quantized_results_close
from hypothesis import given
dyndep.InitOpsLibrary("//caffe2/caffe2/quantization/server:dnnlowp_ops")
workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"])
class DNNLowPOpGroupNormTest(hu.HypothesisTestCase):
@given(
N=st.integers(0, 4),
G=st.integers(2, 4),
K=st.integers(2, 12),
H=st.integers(4, 16),
W=st.integers(4, 16),
order=st.sampled_from(["NCHW", "NHWC"]),
in_quantized=st.booleans(),
out_quantized=st.booleans(),
weight_quantized=st.booleans(),
**hu.gcs_cpu_only
)
def test_dnnlowp_group_norm(
self,
N,
G,
K,
H,
W,
order,
in_quantized,
out_quantized,
weight_quantized,
gc,
dc,
):
C = G * K
X = np.random.rand(N, C, H, W).astype(np.float32) * 5.0 - 1.0
if order == "NHWC":
X = utils.NCHW2NHWC(X)
gamma = np.random.rand(C).astype(np.float32) * 2.0 - 1.0
beta = np.random.randn(C).astype(np.float32) - 0.5
Output = collections.namedtuple("Output", ["Y", "op_type", "engine"])
outputs = []
op_engine_list = [
("GroupNorm", ""),
("GroupNorm", "DNNLOWP"),
("Int8GroupNorm", "DNNLOWP"),
]
for op_type, engine in op_engine_list:
net = core.Net("test_net")
do_quantize = "DNNLOWP" in engine and in_quantized
do_dequantize = "DNNLOWP" in engine and out_quantized
do_quantize_weight = (
engine == "DNNLOWP" and weight_quantized and len(outputs) > 0
)
if do_quantize:
quantize = core.CreateOperator(
"Quantize", ["X"], ["X_q"], engine=engine, device_option=gc
)
net.Proto().op.extend([quantize])
if do_quantize_weight:
int8_given_tensor_fill, gamma_q_param = dnnlowp_utils.create_int8_given_tensor_fill(
gamma, "gamma_q"
)
net.Proto().op.extend([int8_given_tensor_fill])
X_min = 0 if X.size == 0 else X.min()
X_max = 0 if X.size == 0 else X.max()
X_q_param = dnnlowp_utils.choose_quantization_params(X_min, X_max)
int8_bias_tensor_fill = dnnlowp_utils.create_int8_bias_tensor_fill(
beta, "beta_q", X_q_param, gamma_q_param
)
net.Proto().op.extend([int8_bias_tensor_fill])
group_norm = core.CreateOperator(
op_type,
[
"X_q" if do_quantize else "X",
"gamma_q" if do_quantize_weight else "gamma",
"beta_q" if do_quantize_weight else "beta",
],
["Y_q" if do_dequantize else "Y"],
dequantize_output=0 if do_dequantize else 1,
group=G,
order=order,
is_test=True,
engine=engine,
device_option=gc,
)
if do_quantize_weight:
# When quantized weight is provided, we can't rescale the
# output dynamically by looking at the range of output of each
# batch, so here we provide the range of output observed from
# fp32 reference implementation
dnnlowp_utils.add_quantization_param_args(group_norm, outputs[0][0])
net.Proto().op.extend([group_norm])
if do_dequantize:
dequantize = core.CreateOperator(
"Dequantize", ["Y_q"], ["Y"], engine=engine, device_option=gc
)
net.Proto().op.extend([dequantize])
self.ws.create_blob("X").feed(X, device_option=gc)
self.ws.create_blob("gamma").feed(gamma, device_option=gc)
self.ws.create_blob("beta").feed(beta, device_option=gc)
self.ws.run(net)
outputs.append(
Output(Y=self.ws.blobs["Y"].fetch(), op_type=op_type, engine=engine)
)
check_quantized_results_close(outputs, atol_scale=2.0)
|
[
"bengmen92@gmail.com"
] |
bengmen92@gmail.com
|
b546b3ddc0e7dfe417126377133ce316d35c7020
|
e13c98f36c362717fdf22468b300321802346ef5
|
/documents/migrations/0001_initial.py
|
6c30b613f871a3e0cbc4328170b19e5c35b4b42e
|
[] |
no_license
|
alexmon1989/libraries_portal
|
2415cc49de33459266a9f18ed8bb34ac99d3eb7c
|
277081e09f6347c175775337bffba074a35f3b92
|
refs/heads/master
| 2021-01-23T07:25:53.884795
| 2018-12-25T14:29:29
| 2018-12-25T14:29:29
| 80,501,603
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,144
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-06 13:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('home', '0008_profile_address'),
]
operations = [
migrations.CreateModel(
name='AnotherPerson',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Имя')),
],
options={
'verbose_name_plural': 'Другие пресоны',
'verbose_name': 'Другая пресона',
},
),
migrations.CreateModel(
name='Document',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
('author', models.CharField(max_length=255, verbose_name='Автор')),
('notes', models.TextField(verbose_name='Примечания')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('another_persons', models.ManyToManyField(blank=True, to='documents.AnotherPerson', verbose_name='Другие персоны')),
],
),
migrations.CreateModel(
name='DocumentType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
],
),
migrations.CreateModel(
name='Rubric',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
],
options={
'verbose_name_plural': 'Рубрики',
'verbose_name': 'Рубрика',
},
),
migrations.AddField(
model_name='document',
name='document_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='documents.DocumentType', verbose_name='Тип документа'),
),
migrations.AddField(
model_name='document',
name='library',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='home.Library', verbose_name='Библиотека'),
),
migrations.AddField(
model_name='document',
name='rubrics',
field=models.ManyToManyField(blank=True, to='documents.Rubric', verbose_name='Рубрики'),
),
]
|
[
"alex.mon1989@gmail.com"
] |
alex.mon1989@gmail.com
|
25d7024a3f721b5139b7b411e7d035a562642b33
|
a17b30a9ed9e18e89433aadbb54ac94e7ea12045
|
/tests/test_feature.py
|
79ae55a811979865532f3a906c12e46cbdee7443
|
[
"BSD-3-Clause"
] |
permissive
|
HuttleyLab/mutationorigin
|
b327fcbc9d9403006f2e9f8166b73185698d7dca
|
614aa0bc25531a1a0bc24f94ad0ca0fc101aa88a
|
refs/heads/develop
| 2021-06-30T01:37:05.359810
| 2020-02-10T02:26:55
| 2020-02-10T02:26:55
| 210,751,092
| 1
| 3
|
BSD-3-Clause
| 2021-01-14T03:22:40
| 2019-09-25T03:53:11
|
Python
|
UTF-8
|
Python
| false
| false
| 3,705
|
py
|
from unittest import TestCase, main
from mutation_origin.feature import (isproximal, get_feature_indices,
feature_indices_upto, seq_2_features,
seq_feature_labels_upto)
__author__ = "Gavin Huttley"
__copyright__ = "Copyright 2014, Gavin Huttley"
__credits__ = ["Yicheng Zhu", "Cheng Soon Ong", "Gavin Huttley"]
__license__ = "BSD"
__version__ = "0.3"
__maintainer__ = "Gavin Huttley"
__email__ = "Gavin.Huttley@anu.edu.au"
__status__ = "Development"
class TestEncoder(TestCase):
def test_isproximal(self):
"""given a dimension value, return columns in a feature matrix with
defined dimensional neighborhood"""
self.assertTrue(isproximal([1]))
self.assertTrue(isproximal([0, 1]))
self.assertTrue(isproximal([1, 0]))
self.assertTrue(isproximal([0, 1, 2]))
self.assertTrue(isproximal([2, 3, 4]))
self.assertFalse(isproximal([0, 2]))
self.assertFalse(isproximal([0, 3, 4]))
self.assertFalse(isproximal([0, 1, 3]))
self.assertFalse(isproximal([1, 2, 4]))
def test_get_feature_indices(self):
"""selecting feature indices"""
# case of single positions
got = get_feature_indices(2, 1)
self.assertEqual(got, [(0,), (1,), (2,), (3,)])
got = get_feature_indices(2, 2)
self.assertEqual(got, [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)])
got = get_feature_indices(2, 2, proximal=True)
self.assertEqual(got, [(0, 1), (1, 2), (2, 3)])
def test_feature_indices_upto(self):
"""correctly produces all feature indices"""
got = feature_indices_upto(4, 1, proximal=False)
self.assertEqual(got, [(0,), (1,), (2,), (3,), (4,), (5,), (6,), (7,)])
got = feature_indices_upto(2, 2, proximal=False)
self.assertEqual(got, [(0,), (1,), (2,), (3,),
(0, 1), (0, 2), (0, 3),
(1, 2), (1, 3),
(2, 3)])
got = feature_indices_upto(2, 2, proximal=True)
self.assertEqual(got, [(0,), (1,), (2,), (3,),
(0, 1), (1, 2), (2, 3)])
got = feature_indices_upto(2, 3, proximal=False)
self.assertEqual(got, [(0,), (1,), (2,), (3,),
(0, 1), (0, 2), (0, 3),
(1, 2), (1, 3),
(2, 3),
(0, 1, 2), (0, 1, 3),
(0, 2, 3), (1, 2, 3)])
def test_seq_2_features(self):
"""convert a sequence to string features"""
seq = "CAGA"
indices = feature_indices_upto(2, 1, proximal=False)
got = seq_2_features(seq, indices)
self.assertEqual(got, ['C', 'A', 'G', 'A'])
indices = feature_indices_upto(2, 2, proximal=False)
got = seq_2_features(seq, indices)
self.assertEqual(got, ['C', 'A', 'G', 'A',
'CA', 'CG', 'CA',
'AG', 'AA', 'GA'])
indices = feature_indices_upto(2, 2, proximal=True)
got = seq_2_features(seq, indices)
self.assertEqual(got, ['C', 'A', 'G', 'A',
'CA', 'AG', 'GA'])
def test_seq_feature_labels_upto(self):
"""construction of sequence feature labels"""
# the possible labels for a given dimension
# is just the k-mers for that dimension
for dim in range(1, 5):
got = seq_feature_labels_upto(dim)
for i in range(1, dim + 1):
self.assertEqual(4**i, len(got[i].classes_))
if __name__ == '__main__':
main()
|
[
"Gavin.Huttley@anu.edu.au"
] |
Gavin.Huttley@anu.edu.au
|
2cf68c4c488cae807414c8d867e8fab21bbd5f08
|
935e6fc5f32dfd6dcd7db4c2ef2b6083e9dcd867
|
/examples/whbm/lstm_task.py
|
4637ae76103386793980ff053a3f90f6a6978202
|
[] |
no_license
|
zkmartin/tsframe
|
0e805d76c89f647aa102e0acd5d75831f5dd808f
|
b522180f8179b52c4b1e510882813da912243e33
|
refs/heads/master
| 2020-03-21T13:14:08.559186
| 2018-06-12T14:03:00
| 2018-06-12T14:03:00
| 138,594,568
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 793
|
py
|
import tensorflow as tf
import core
from tframe import console
import model_lib as models
def main(_):
console.start('WHBM task (LSTM model)')
# Configurations
th = core.th
th.model = models.lstm
th.num_blocks = 1
th.memory_depth = 2
th.hidden_dim = 100
th.epoch = 50000
th.learning_rate = 1e-4
th.batch_size = 8
th.num_steps = 100
th.val_preheat = 500
th.validation_per_round = 2
th.print_cycle = 2
# th.train = False
th.smart_train = True
th.max_bad_apples = 4
th.lr_decay = 0.5
th.save_model = True
th.overwrite = True
th.export_note = True
th.summary = True
th.monitor = False
description = ''
th.mark = '{}x{}{}'.format(th.num_blocks, th.memory_depth, description)
core.activate()
if __name__ == '__main__':
tf.app.run()
|
[
"willi4m@zju.edu.cn"
] |
willi4m@zju.edu.cn
|
8df4ca9b5e0fe7752819761c02071cc06fc4b1a2
|
68e76ef27df38b0fe2c1c993a9c15896563f950d
|
/2 Практика Робот/robot-tasks-master/task_27.py
|
adb36b2f4c4d8b0d56eece184d7dc0228bed4853
|
[] |
no_license
|
Jumas-Cola/mipt_cs_on_python3_answers
|
72e9341656daa4afa35f8d39de917eb5471ee132
|
a2d128c4ce391bdeea6d20eb955855ad5bc5a0b4
|
refs/heads/master
| 2020-03-27T23:44:09.088994
| 2019-07-29T13:55:35
| 2019-07-29T13:55:35
| 147,341,552
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 242
|
py
|
#!/usr/bin/python3
from pyrob.api import *
@task
def task_7_5():
n = 0
i = 0
move_right()
while not wall_is_on_the_right():
if i==n:
fill_cell()
n+=1
i=0
move_right()
i+=1
if __name__ == '__main__':
run_tasks()
|
[
"kbbyfl91@gmail.com"
] |
kbbyfl91@gmail.com
|
8f90c43fdb9608255f7aee58ef6c166d69b5e865
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p00001/s858862768.py
|
ef31f0508caf06e2a57f4dcb7d9882d5a5cb5fd8
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 137
|
py
|
#coding: utf-8
temp = []
for i in range(10):
N = input()
temp.append(N)
temp.sort()
print temp[-1]
print temp[-2]
print temp[-3]
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
1aece47b2e3ac6f10e5639dc6cb4c83423d6d772
|
d01a8a10cb6b5bdde50e5f522cb8bd5012910393
|
/footfolder_25603/wsgi.py
|
615846566d9480ca8e337c50ac41ebfff8a07680
|
[] |
no_license
|
crowdbotics-apps/footfolder-25603
|
7231ab4c4db7f082ff1488bc69e00f637950b3a6
|
82be1f77f7f2658ab6244a1913b0a8b8109452b4
|
refs/heads/master
| 2023-04-02T20:48:15.819920
| 2021-04-09T21:13:24
| 2021-04-09T21:13:24
| 356,399,532
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
"""
WSGI config for footfolder_25603 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'footfolder_25603.settings')
application = get_wsgi_application()
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
d9442ec4991157e8ecacec81963170be831c63f4
|
3a28b1a12d0710c06f6360381ad8be6cf3707907
|
/modular_model/triHPC/triHPCThermo/HPCFeed2SelfCstmLiqEtlp_pP.py
|
50a3ca49480d7c661fda3be8fe4136376d2ddf41
|
[] |
no_license
|
WheatZhang/DynamicModelling
|
6ce1d71d3b55176fd4d77a6aedbaf87e25ce4d02
|
ea099245135fe73e8c9590502b9c8b87768cb165
|
refs/heads/master
| 2020-06-15T14:12:50.373047
| 2019-07-05T01:37:06
| 2019-07-05T01:37:06
| 195,319,788
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 247
|
py
|
def LiqEtlp_pP(P,T,x_N2):
x = (P-5.40396585e+02)/1.23902400e-01
y = (T--1.78102248e+02)/2.35833333e-03
z = (x_N2-9.97848887e-01)/4.61257558e-04
output = \
1*-7.45932272e-01
liq_etlp = output*1.00000000e+00+0.00000000e+00
return liq_etlp
|
[
"1052632241@qq.com"
] |
1052632241@qq.com
|
03e81ca2b65853869b3c82f32c0a5025d22b0183
|
b99d44bc1eea1681185429dab0c238e9fa45dc2e
|
/datatypes/Data/Functor/pure.src.py
|
11f50c806d872d8946d83dd63337787f3565129f
|
[] |
no_license
|
Testing-PureScript-Python/datatypes
|
0933218ed21329be440a0218ee996b206b9ecf50
|
838cbca1a87cb4eea8a22c796799fb7fad69191d
|
refs/heads/master
| 2021-01-05T21:33:40.556174
| 2020-02-24T05:02:32
| 2020-02-24T05:02:32
| 241,143,733
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,528
|
py
|
from py_sexpr.terms import *
from py_sexpr.stack_vm.emit import module_code
res = block( "No document"
, assign_star( "$foreign"
, call( var('import_module')
, "datatypes.ffi.Data.Functor" ) )
, assign_star( "ps_Control_Semigroupoid"
, call( var('import_module')
, "datatypes.Control.Semigroupoid.pure" ) )
, assign_star( "ps_Data_Function"
, call( var('import_module')
, "datatypes.Data.Function.pure" ) )
, assign_star( "ps_Data_Unit"
, call( var('import_module')
, "datatypes.Data.Unit.pure" ) )
, assign_star( "ps_Functor"
, define( None
, ["ps_map", ".this"]
, block( set_item( var(".this")
, "map"
, var("ps_map") )
, var(".this") ) ) )
, assign_star( "ps_map"
, define( None
, ["ps_dict"]
, block( ret( get_item( var("ps_dict")
, "map" ) ) ) ) )
, assign_star( "ps_mapFlipped"
, define( None
, ["ps_dictFunctor"]
, block( ret( define( None
, ["ps_fa"]
, block( ret( define( None
, [ "ps_f" ]
, block( ret( call( call( call( var( "ps_map" )
, var( "ps_dictFunctor" ) )
, var( "ps_f" ) )
, var( "ps_fa" ) ) ) ) ) ) ) ) ) ) ) )
, assign_star( "ps_void"
, define( None
, ["ps_dictFunctor"]
, block( ret( call( call( var("ps_map")
, var( "ps_dictFunctor" ) )
, call( get_item( var( "ps_Data_Function" )
, "const" )
, get_item( var( "ps_Data_Unit" )
, "unit" ) ) ) ) ) ) )
, assign_star( "ps_voidLeft"
, define( None
, ["ps_dictFunctor"]
, block( ret( define( None
, ["ps_f"]
, block( ret( define( None
, [ "ps_x" ]
, block( ret( call( call( call( var( "ps_map" )
, var( "ps_dictFunctor" ) )
, call( get_item( var( "ps_Data_Function" )
, "const" )
, var( "ps_x" ) ) )
, var( "ps_f" ) ) ) ) ) ) ) ) ) ) ) )
, assign_star( "ps_voidRight"
, define( None
, ["ps_dictFunctor"]
, block( ret( define( None
, ["ps_x"]
, block( ret( call( call( var( "ps_map" )
, var( "ps_dictFunctor" ) )
, call( get_item( var( "ps_Data_Function" )
, "const" )
, var( "ps_x" ) ) ) ) ) ) ) ) ) )
, assign_star( "ps_functorFn"
, new( var("ps_Functor")
, call( get_item( var("ps_Control_Semigroupoid")
, "compose" )
, get_item( var("ps_Control_Semigroupoid")
, "semigroupoidFn" ) ) ) )
, assign_star( "ps_functorArray"
, new( var("ps_Functor")
, get_item(var("$foreign"), "arrayMap") ) )
, assign_star( "ps_flap"
, define( None
, ["ps_dictFunctor"]
, block( ret( define( None
, ["ps_ff"]
, block( ret( define( None
, [ "ps_x" ]
, block( ret( call( call( call( var( "ps_map" )
, var( "ps_dictFunctor" ) )
, define( None
, [ "ps_f" ]
, block( ret( call( var( "ps_f" )
, var( "ps_x" ) ) ) ) ) )
, var( "ps_ff" ) ) ) ) ) ) ) ) ) ) ) )
, assign( "exports"
, record( ("Functor", var("ps_Functor"))
, ("map", var("ps_map"))
, ("mapFlipped", var("ps_mapFlipped"))
, ("void", var("ps_void"))
, ("voidRight", var("ps_voidRight"))
, ("voidLeft", var("ps_voidLeft"))
, ("flap", var("ps_flap"))
, ("functorFn", var("ps_functorFn"))
, ("functorArray", var("ps_functorArray")) ) ) )
res = module_code(res, filename="C:\\Users\\twshe\\Desktop\\mydb\\com-haskell\\testing\\datatypes\\.spago\\prelude\\v4.1.1\\src\\Data\\Functor.purs", name="datatypes.Data.Functor.pure")
|
[
"twshere@outlook.com"
] |
twshere@outlook.com
|
5c21a1f652a75a295576e53ff349f00acfdecbdf
|
9af29df29012ff521074aa1f3a6eacfe7f3eb6a9
|
/449/main.py
|
16040660dbe2ad5bb44c5084a592f09b3f83d7fe
|
[] |
no_license
|
rh01/spring-go-code
|
e3a0cd1246683aa1bed8b29baa1c8b5ea87253c6
|
706ad785cbc4abb3566710c6e2904fe743c1cc6f
|
refs/heads/master
| 2021-04-13T02:36:54.892843
| 2020-03-22T06:52:08
| 2020-03-22T06:52:08
| 249,129,346
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,301
|
py
|
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Codec:
def serialize(self, root):
"""Encodes a tree to a single string.
:type root: TreeNode
:rtype: str
"""
res = []
def encode(node):
if root is None:
res.append("#")
return
else:
res.append(str(node.val))
res.append()
encode(node.left)
encode(node.right)
encode(root)
return "_".join(res)
def deserialize(self, data):
"""Decodes your encoded data to tree.
:type data: str
:rtype: TreeNode
"""
ds = data.split("_")
l = len(ds)
def decode(level):
if level >= l:
return
if ds[level] == "#":
return None
node = TreeNode(ds[level])
node.left(level+1)
node.right(level+1)
return node
return decode(0)
# Your Codec object will be instantiated and called as such:
# codec = Codec()
# codec.deserialize(codec.serialize(root))
|
[
"1048157315@qq.com"
] |
1048157315@qq.com
|
1d6041b82892195a800cd204f526e36d4a3cbb2b
|
23f8076467b0cc276a91a3ee0a6fbd6e30d3905d
|
/resource/trac-plugins/workfloweditorplugin/workfloweditor/workfloweditor_admin.py
|
2a3d481fc0c5eefd49d67bd1a17ef4200bf1bb60
|
[] |
no_license
|
okamototk/kanonconductor
|
b6f7d04b2e5c5acd6fd4ee6f6d2f05a4b02bce45
|
e6335c264e7a9a2e961e9a72db3660c2da1c24e3
|
refs/heads/master
| 2021-01-13T02:11:45.895523
| 2012-11-22T14:03:02
| 2012-11-22T14:03:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,149
|
py
|
# -*- coding: utf-8 -*-
from trac.core import *
from trac.web.chrome import ITemplateProvider, add_stylesheet, add_script
from trac.admin import IAdminPanelProvider
from trac.web.api import ITemplateStreamFilter, IRequestHandler
from trac.web.chrome import Chrome
from api import LocaleUtil
class WorkflowEditorAdmin(Component):
implements(ITemplateProvider, ITemplateStreamFilter, IAdminPanelProvider)
# ITemplateProvider method
def get_htdocs_dirs(self):
from pkg_resources import resource_filename
return [('workfloweditor', resource_filename(__name__, 'htdocs'))]
# ITemplateProvider method
def get_templates_dirs(self):
from pkg_resources import resource_filename
return [resource_filename(__name__, 'templates')]
# ITemplateStreamFilter method
def filter_stream(self, req, method, filename, stream, data):
return stream
# IAdminPanelProvider method
def get_admin_panels(self, req):
if req.perm.has_permission('TRAC_ADMIN'):
# localization
locale = LocaleUtil().get_locale(req)
if (locale == 'ja'):
yield ('ticket', u'チケットシステム', 'workfloweditor', u'ワークフロー')
else:
yield ('ticket', 'Ticket System', 'workfloweditor', 'Workflow')
# IAdminPanelProvider method
def render_admin_panel(self, req, cat, page, path_info):
req.perm.assert_permission('TRAC_ADMIN')
add_script(req, 'workfloweditor/js/jquery.jqGrid.js')
add_script(req, 'workfloweditor/js/grid/jqModal.js')
add_script(req, 'workfloweditor/js/grid/jqDnR.js')
add_script(req, 'workfloweditor/js/grid/jquery.tablednd.js')
add_script(req, 'workfloweditor/js/ui/ui.core.js')
add_script(req, 'workfloweditor/js/ui/ui.tabs.pack.js')
add_script(req, 'workfloweditor/js/workfloweditor.js')
add_stylesheet(req, 'workfloweditor/css/grid.css')
add_stylesheet(req, 'workfloweditor/css/jqModal.css')
add_stylesheet(req, 'workfloweditor/css/ui.tabs.css')
add_stylesheet(req, 'workfloweditor/css/workfloweditor.css')
if req.method == 'POST':
self._update_config(req)
page_param = {}
self._create_page_param(req, page_param)
# localization
locale = LocaleUtil().get_locale(req)
if (locale == 'ja'):
add_script(req, 'workfloweditor/js/workfloweditor-locale-ja.js')
page_template = 'workfloweditor_admin_ja.html'
else:
page_template = 'workfloweditor_admin.html'
return page_template, {'template': page_param}
def _update_config(self, req):
# get ticket-workflow section
section = self.config._sections['ticket-workflow']
# delete old data
for (name, value) in section.options():
self.config.remove('ticket-workflow', name)
# parse input data
input_string = req.args['workflow_config']
config_list = input_string.split('\n')
for config_string in config_list:
if config_string.find('=') == -1:
continue
(name, value) = config_string.split('=', 1)
# set to memory
section.set(name.strip(), value.strip())
# save to file
self.config.save()
def _create_page_param(self, req, page_param):
# page_param['workflow_config']
# sort config for display
section = self.config._sections['ticket-workflow']
name_list = []
for (name, value) in section.options():
name_list.append(name)
name_list.sort()
# create config data for display
ret_val = ''
for name in name_list:
ret_val += name + '=' + section.get(name) + '\n'
page_param['workflow_config'] = ret_val
# page_param['workflow_default_config']
# localization
locale = LocaleUtil().get_locale(req)
if (locale == 'ja'):
init_file = 'trac_jp.ini'
else:
init_file = 'trac.ini'
# read defalut config
template = Chrome(self.env).load_template(init_file, 'text')
stream = template.generate()
default_config = stream.render('text')
page_param['workflow_default_config'] = default_config.decode('utf-8')
class WorkflowChangeHandler(Component):
implements(IRequestHandler)
# IRequestHandler method
def match_request(self, req):
match = False
if req.path_info == '/admin/ticket/workfloweditor/edit':
match = True
return match
# IRequestHandler method
def process_request(self, req):
req.send_response(200)
req.send_header('Content-Type', 'content=text/html; charset=UTF-8')
req.end_headers()
req.write("OK")
|
[
"devnull@localhost"
] |
devnull@localhost
|
e83b9ea8504433bfed386da9ae633fcd91098dd9
|
73c01a3f052f8ef63890ec3c2e28403ad41e9a71
|
/td/migrations/0004_auto_20170207_1445.py
|
47bae81cbf8dd662e87669a0574724e21c14fb19
|
[] |
no_license
|
Jokey90/aho
|
4c007c65c819efb726a732a8f36067c5a0226100
|
8bcd41e9ef7d40f07499429f385d4fec590636f6
|
refs/heads/master
| 2020-03-21T22:28:36.395996
| 2018-06-29T09:25:05
| 2018-06-29T09:25:05
| 139,128,834
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,958
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-02-07 14:45
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('td', '0003_proxytracking'),
]
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(choices=[('pts', 'ПТС'), ('sts', 'СТС'), ('diag_card', 'Диагностическая карта'), ('fuel_card', 'Топливная карта'), ('corp_card', 'Корпоративная карта'), ('osago', 'Полис ОСАГО'), ('kasko', 'Полис КАСКО'), ('key-car', 'Ключ от автомобиля'), ('key-alarm', 'Ключ от сигнализации'), ('key-card', 'Ключ-карта'), ('key-parking', 'Пропуск на парковку')], max_length=50, verbose_name='Тип ключа')),
('number', models.CharField(blank=True, default='', max_length=50, null=True, verbose_name='Номер')),
('pin', models.CharField(blank=True, default='', max_length=50, null=True, verbose_name='Пин код')),
('start_date', models.DateField(blank=True, null=True, verbose_name='Дата начала')),
('end_date', models.DateField(blank=True, null=True, verbose_name='Дата окончания')),
('company', models.CharField(blank=True, default='', max_length=50, null=True, verbose_name='Компания')),
('comment', models.CharField(blank=True, default='', max_length=255, null=True, verbose_name='Комментарий')),
('active', models.BooleanField(default=True, verbose_name='Активен')),
('scan', models.FileField(blank=True, null=True, upload_to='scans/documents/', verbose_name='Скан')),
],
options={
'verbose_name': 'Атрибут',
'verbose_name_plural': 'Атрибуты',
},
),
migrations.CreateModel(
name='ItemTracking',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(verbose_name='Дата передачи')),
('add_date', models.DateTimeField(auto_now_add=True)),
('added_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='td.Proxy', verbose_name='Доверенность')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.Employee', verbose_name='Кому выдана')),
],
options={
'verbose_name': 'Выдача документов/ключей',
'verbose_name_plural': 'Выдачи документов/ключей',
},
),
migrations.RemoveField(
model_name='key',
name='car',
),
migrations.RemoveField(
model_name='key',
name='owner',
),
migrations.RemoveField(
model_name='car',
name='corp_card_number',
),
migrations.RemoveField(
model_name='car',
name='corp_card_pin',
),
migrations.RemoveField(
model_name='car',
name='corp_card_place',
),
migrations.RemoveField(
model_name='car',
name='corp_card_scan',
),
migrations.RemoveField(
model_name='car',
name='diag_card_date',
),
migrations.RemoveField(
model_name='car',
name='diag_card_number',
),
migrations.RemoveField(
model_name='car',
name='diag_card_place',
),
migrations.RemoveField(
model_name='car',
name='diag_card_scan',
),
migrations.RemoveField(
model_name='car',
name='fuel_card_date',
),
migrations.RemoveField(
model_name='car',
name='fuel_card_number',
),
migrations.RemoveField(
model_name='car',
name='fuel_card_pin',
),
migrations.RemoveField(
model_name='car',
name='fuel_card_place',
),
migrations.RemoveField(
model_name='car',
name='fuel_card_scan',
),
migrations.RemoveField(
model_name='car',
name='kasko_company',
),
migrations.RemoveField(
model_name='car',
name='kasko_date',
),
migrations.RemoveField(
model_name='car',
name='kasko_number',
),
migrations.RemoveField(
model_name='car',
name='kasko_place',
),
migrations.RemoveField(
model_name='car',
name='kasko_scan',
),
migrations.RemoveField(
model_name='car',
name='osago_company',
),
migrations.RemoveField(
model_name='car',
name='osago_date',
),
migrations.RemoveField(
model_name='car',
name='osago_number',
),
migrations.RemoveField(
model_name='car',
name='osago_place',
),
migrations.RemoveField(
model_name='car',
name='osago_scan',
),
migrations.RemoveField(
model_name='car',
name='pts_number',
),
migrations.RemoveField(
model_name='car',
name='pts_place',
),
migrations.RemoveField(
model_name='car',
name='pts_scan',
),
migrations.RemoveField(
model_name='car',
name='sts_number',
),
migrations.RemoveField(
model_name='car',
name='sts_place',
),
migrations.RemoveField(
model_name='car',
name='sts_scan',
),
migrations.DeleteModel(
name='Key',
),
migrations.AddField(
model_name='item',
name='car',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='td.Car', verbose_name='Автомобиль'),
),
]
|
[
"Kishkurno_AS@dsdf.cds.ru"
] |
Kishkurno_AS@dsdf.cds.ru
|
efc9bfc1f5bb128ff465c9c98e06e75c9ccd4d0f
|
7c3116ca951c1c989fcc6cd673993ce6b1d4be5a
|
/modules/iconcache_connector.py
|
626ff1385dbd7b7fefbfaf8cae8c755cde714200
|
[
"Apache-2.0"
] |
permissive
|
Kimwonkyung/carpe
|
c8c619c29350d6edc464dbd9ba85aa3b7f847b8a
|
58a8bf7a7fc86a07867890c2ce15c7271bbe8e78
|
refs/heads/master
| 2022-12-15T13:51:47.678875
| 2020-09-11T05:25:43
| 2020-09-11T05:25:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,068
|
py
|
# -*- coding: utf-8 -*-
import os
from modules import manager
from modules import interface
from modules import logger
from modules.windows_iconcache import IconCacheParser as ic
from dfvfs.lib import definitions as dfvfs_definitions
class IconCacheConnector(interface.ModuleConnector):
NAME = 'iconcache_connector'
DESCRIPTION = 'Module for iconcache_connector'
_plugin_classes = {}
def __init__(self):
super(IconCacheConnector, self).__init__()
def Connect(self, configuration, source_path_spec, knowledge_base):
print('[MODULE]: IconCacheConnector Connect')
this_file_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'schema' + os.sep
# 모든 yaml 파일 리스트
yaml_list = [this_file_path + 'lv1_os_win_icon_cache.yaml']
# 모든 테이블 리스트
table_list = ['lv1_os_win_icon_cache']
# 모든 테이블 생성
if not self.check_table_from_yaml(configuration, yaml_list, table_list):
return False
try:
if source_path_spec.parent.type_indicator != dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION:
par_id = configuration.partition_list['p1']
else:
par_id = configuration.partition_list[getattr(source_path_spec.parent, 'location', None)[1:]]
if par_id == None:
return False
owner = ''
query = f"SELECT name, parent_path, extension FROM file_info WHERE par_id='{par_id}' " \
f"and extension = 'db' and size > 24 and name regexp 'iconcache_[0-9]' and ("
for user_accounts in knowledge_base._user_accounts.values():
for hostname in user_accounts.values():
if hostname.identifier.find('S-1-5-21') == -1:
continue
query += f"parent_path like '%{hostname.username}%' or "
query = query[:-4] + ");"
#print(query)
iconcache_files = configuration.cursor.execute_query_mul(query)
#print(f'iconcache_files: {len(iconcache_files)}')
if len(iconcache_files) == 0:
return False
insert_iconcache_info = []
for iconcache in iconcache_files:
iconcache_path = iconcache[1][iconcache[1].find('/'):] + '/' + iconcache[0] # document full path
fileExt = iconcache[2]
fileName = iconcache[0]
owner = iconcache[1][iconcache[1].find('/'):].split('/')[2]
# Windows.old 폴더 체크
if 'Windows.old' in iconcache_path:
fileExt = iconcache[2]
fileName = iconcache[0]
owner = iconcache[1][iconcache[1].find('/'):].split('/')[3] + "(Windows.old)"
output_path = configuration.root_tmp_path + os.sep + configuration.case_id + os.sep + configuration.evidence_id + os.sep + par_id
img_output_path = output_path + os.sep + "iconcache_img" + os.sep + owner + os.sep + fileName[:-3]
self.ExtractTargetFileToPath(
source_path_spec=source_path_spec,
configuration=configuration,
file_path=iconcache_path,
output_path=output_path)
fn = output_path + os.path.sep + fileName
app_path = os.path.abspath(os.path.dirname(__file__)) + os.path.sep + "windows_iconcache"
results = ic.main(fn, app_path, img_output_path)
if not results:
os.remove(output_path + os.sep + fileName)
continue
for i in range(len(results["ThumbsData"])):
if i == 0:
continue
result = results["ThumbsData"][i]
filename = result[0]
filesize = result[1]
imagetype = result[2]
data = result[3]
sha1 = result[4]
tmp = []
tmp.append(par_id)
tmp.append(configuration.case_id)
tmp.append(configuration.evidence_id)
tmp.append(owner)
tmp.append(filename)
tmp.append(filesize)
tmp.append(imagetype)
tmp.append(data)
tmp.append(sha1)
insert_iconcache_info.append(tuple(tmp))
os.remove(output_path + os.sep + fileName)
# IconCache
print('[MODULE]: IconCache')
query = "Insert into lv1_os_win_icon_cache values (%s, %s, %s, %s, %s, %s, %s, %s, %s);"
configuration.cursor.bulk_execute(query, insert_iconcache_info)
print('[MODULE]: IconCache Complete')
except Exception as e:
print("IconCache Connector Error", e)
manager.ModulesManager.RegisterModule(IconCacheConnector)
|
[
"jbc0729@gmail.com"
] |
jbc0729@gmail.com
|
9bc346a7ae74d0660986cc88c38cd2578d71e903
|
741333ced9ea1b326997dc031e5de27529bad04a
|
/glue_vispy_viewers/extern/vispy/testing/__init__.py
|
d8a7266c45d1e6c776b6e2397a6158aff13fb7d4
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
jzuhone/glue-vispy-viewers
|
f1b7f506d3263c4b0c2f4032d4940b931b2c1ada
|
d940705f4ba95f8d7a9a74d37fb68c71080b490a
|
refs/heads/master
| 2020-06-20T19:10:02.866527
| 2019-06-24T11:40:39
| 2019-06-24T11:40:39
| 197,217,964
| 0
| 0
|
BSD-2-Clause
| 2019-07-16T15:14:53
| 2019-07-16T15:14:52
| null |
UTF-8
|
Python
| false
| false
| 2,415
|
py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Testing
=======
This module provides functions useful for running tests in vispy.
Tests can be run in a few ways:
* From Python, you can import ``vispy`` and do ``vispy.test()``.
* From the source root, you can do ``make test`` which wraps to
a call to ``python make test``.
There are various diffrent testing "modes", including:
* "full": run all tests.
* any backend name (e.g., "glfw"): run application/GL tests using a
specific backend.
* "nobackend": run tests that do not require a backend.
* "examples": run repo examples to check for errors and warnings.
* "flake": check style errors.
Examples get automatically tested unless they have a special comment toward
the top ``# vispy: testskip``. Examples that should be tested should be
formatted so that 1) a ``Canvas`` class is defined, or a ``canvas`` class
is instantiated; and 2) the ``app.run()`` call is protected by a check
if ``__name__ == "__main__"``. This makes it so that the event loop is not
started when running examples in the test suite -- the test suite instead
manually updates the canvas (using ``app.process_events()``) for under one
second to ensure that things like timer events are processed.
For examples on how to test various bits of functionality (e.g., application
functionality, or drawing things with OpenGL), it's best to look at existing
examples in the test suite.
The code base gets automatically tested by Travis-CI (Linux) and AppVeyor
(Windows) on Python 2.6, 2.7, 3.4. There are multiple testing modes that
use e.g. full dependencies, minimal dependencies, etc. See ``.travis.yml``
to determine what automatic tests are run.
"""
from ._testing import (SkipTest, requires_application, requires_ipython, # noqa
requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true, # noqa
raises) # noqa
from ._runners import test # noqa
|
[
"thomas.robitaille@gmail.com"
] |
thomas.robitaille@gmail.com
|
84d070f945255f5301c9b2a7602b9f80389ce53e
|
45f93a9d47204d76b8bf25a71dfb79403e75c33c
|
/next-greater-element-i.py
|
36f563052966cec348db92db4c5eb4254417f009
|
[] |
no_license
|
tahmid-tanzim/problem-solving
|
0173bce1973ac3e95441a76c10324c0e1b0a57c3
|
6ddb51de6772130f209474e76f39ca2938f444f0
|
refs/heads/master
| 2023-06-25T02:18:03.690263
| 2023-06-20T06:58:46
| 2023-06-20T06:58:46
| 137,173,850
| 4
| 1
| null | 2022-03-30T08:28:41
| 2018-06-13T06:44:25
|
Python
|
UTF-8
|
Python
| false
| false
| 1,340
|
py
|
#!/usr/bin/python3
# https://leetcode.com/problems/next-greater-element-i/
from typing import List
# Time O(n)
# Space O(1)
def nextGreaterElement(nums1: List[int], nums2: List[int]) -> List[int]:
nums1Index = {}
for i, n1 in enumerate(nums1):
nums1Index[n1] = i
nums1[i] = -1
n = len(nums2)
stack = []
for i in range(n):
while len(stack) > 0 and nums2[i] > stack[-1]:
value = stack.pop()
index = nums1Index[value]
nums1[index] = nums2[i]
if nums2[i] in nums1Index:
stack.append(nums2[i])
return nums1
if __name__ == '__main__':
inputs = (
{
"nums1": [4, 1, 2],
"nums2": [1, 3, 4, 2],
"expected": [-1, 3, -1]
},
{
"nums1": [2, 4],
"nums2": [1, 2, 3, 4],
"expected": [3, -1]
},
)
test_passed = 0
for idx, val in enumerate(inputs):
output = nextGreaterElement(val["nums1"], val["nums2"])
if output == val['expected']:
print(f"{idx}. CORRECT Answer\nOutput: {output}\nExpected: {val['expected']}\n")
test_passed += 1
else:
print(f"{idx}. WRONG Answer\nOutput:{output}\nExpected:{val['expected']}\n")
print(f"Passed: {test_passed:3}/{idx + 1}\n")
|
[
"tahmid.tanzim@gmail.com"
] |
tahmid.tanzim@gmail.com
|
c1e42865ba3ad91fb08d55eeef704a910a0e7138
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02771/s406306655.py
|
4ada1942e5bf7fc9e5cd7254eff15a8aba906b50
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
A, B, C = map(int,input().split())
if A == B and B != C:
print('Yes')
elif C == B and C != A:
print('Yes')
elif A == C and C != B:
print('Yes')
else:
print('No')
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
6f101ccbdad195f757a4123805328db4ef6ba9c5
|
ef5f98cdaca58bc9c1ba1a94a1ccf7bebc3f1260
|
/ir_actions_act_url.py
|
d2968068725bcdde7011beb4ddd33709c85f326e
|
[
"MIT"
] |
permissive
|
tonygalmiche/is_plastigray
|
512ad911b3118c6aa2aab49f64ad7871fb80f195
|
774feea510fc0854776016dbbbc7472ebd1248c5
|
refs/heads/master
| 2023-07-25T21:49:56.284434
| 2023-07-18T13:15:28
| 2023-07-18T13:15:28
| 24,811,999
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,794
|
py
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api, _
from openerp.http import request
class ir_actions_act_url(models.Model):
_inherit = 'ir.actions.act_url'
# def get_soc(self, cr, uid):
# user = self.pool['res.users'].browse(cr, uid, [uid])[0]
# soc = user.company_id.partner_id.is_code
# return soc
def get_company(self, cr, uid):
user = self.pool['res.users'].browse(cr, uid, [uid])[0]
company = user.company_id
return company
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
if not context: context = {}
results = super(ir_actions_act_url, self).read(cr, uid, ids, fields=fields, context=context, load=load)
if load=='_classic_read' and len(ids) == 1:
if results[0]['name']==u'is_url_parc_presses_action':
user = self.pool['res.users'].browse(cr, uid, [uid], context=context)[0]
soc = user.company_id.is_code_societe
ip = request.httprequest.environ['REMOTE_ADDR']
url='http://raspberry-theia/atelier.php?atelier=inj&soc='+str(soc)+'&uid='+str(uid)
results[0].update({'url': url})
if results[0]['name']==u'is_url_parc_presses_new_action':
user = self.pool['res.users'].browse(cr, uid, [uid], context=context)[0]
soc = user.company_id.is_code_societe
ip = request.httprequest.environ['REMOTE_ADDR']
url='http://raspberry-theia4/atelier.php?atelier=inj&soc='+str(soc)+'&uid='+str(uid)
results[0].update({'url': url})
if results[0]['name']==u'is_url_parc_assemblage_action':
user = self.pool['res.users'].browse(cr, uid, [uid], context=context)[0]
soc = user.company_id.is_code_societe
ip = request.httprequest.environ['REMOTE_ADDR']
url='http://raspberry-theia4/atelier.php?atelier=ass&soc='+str(soc)+'&uid='+str(uid)
results[0].update({'url': url})
if results[0]['name']==u'is_url_indicateur_rebuts_action':
user = self.pool['res.users'].browse(cr, uid, [uid], context=context)[0]
soc = user.company_id.is_code_societe
ip = request.httprequest.environ['REMOTE_ADDR']
url='http://odoo/odoo-theia/rebuts.php?soc='+str(soc)+'&uid='+str(uid)
results[0].update({'url': url})
if results[0]['name']==u'is_url_indicateur_trs_action':
user = self.pool['res.users'].browse(cr, uid, [uid], context=context)[0]
soc = user.company_id.is_code_societe
ip = request.httprequest.environ['REMOTE_ADDR']
url='http://odoo/odoo-theia/trs.php?soc='+str(soc)+'&uid='+str(uid)
results[0].update({'url': url})
#if results[0]['name']==u'is_url_indicateur_trs_new_action':
# user = self.pool['res.users'].browse(cr, uid, [uid], context=context)[0]
# soc = user.company_id.is_code_societe
# ip = request.httprequest.environ['REMOTE_ADDR']
# url='http://odoo/odoo-theia/trs-new.php?soc='+str(soc)+'&uid='+str(uid)
# results[0].update({'url': url})
if results[0]['name']==u'is_url_planning_action':
ip = request.httprequest.environ['REMOTE_ADDR']
company=self.get_company(cr,uid)
soc=company.partner_id.is_code
url=company.is_url_intranet_odoo or ''
url=url+'/odoo-erp/planning/?soc='+str(soc)+'&uid='+str(uid)
results[0].update({'url': url})
if results[0]['name']==u'is_url_analyse_cbn_action':
ip = request.httprequest.environ['REMOTE_ADDR']
company=self.get_company(cr,uid)
soc=company.partner_id.is_code
url=company.is_url_intranet_odoo or ''
url=url+'/odoo-erp/cbn/Sugestion_CBN.php?Soc='+str(soc)+'&product_id=&uid='+str(uid)
results[0].update({'url': url})
if results[0]['name']==u'is_url_pic_3_ans_action':
ip = request.httprequest.environ['REMOTE_ADDR']
company=self.get_company(cr,uid)
soc=company.partner_id.is_code
url=company.is_url_intranet_odoo or ''
url=url+'/odoo-erp/analyses/pic-3-ans.php?Soc='+str(soc)+'&uid='+str(uid)
results[0].update({'url': url})
if results[0]['name']==u'is_url_pic_3_mois':
ip = request.httprequest.environ['REMOTE_ADDR']
company=self.get_company(cr,uid)
soc=company.partner_id.is_code
url=company.is_url_intranet_odoo or ''
url=url+'/odoo-erp/analyses/pic-3-mois.php?Soc='+str(soc)+'&uid='+str(uid)
results[0].update({'url': url})
if results[0]['name']==u'is_url_theia':
company=self.get_company(cr,uid)
soc=company.partner_id.is_code
url=company.is_url_odoo_theia or ''
#url='http://odoo-cpi1'
#if soc=='3':
# url='http://odoo-theia3'
#if soc=='4':
# url='http://odoo-theia4'
results[0].update({'url': url})
if results[0]['name']==u'is_url_theia_suivi_prod':
company=self.get_company(cr,uid)
soc=company.partner_id.is_code
url=company.is_url_intranet_theia or ''
url=url+'/atelier.php?soc='+soc
results[0].update({'url': url})
#if results[0]['name']==u'is_url_theia_rebuts':
# company=self.get_company(cr,uid)
# soc=company.partner_id.is_code
# url=company.is_url_intranet_odoo or ''
# url=url+'/odoo-cpi/rebuts.php?soc='+soc
# results[0].update({'url': url})
#if results[0]['name']==u'is_url_theia_trs':
# company=self.get_company(cr,uid)
# soc=company.partner_id.is_code
# url=company.is_url_intranet_odoo or ''
# url=url+'/odoo-cpi/trs.php?soc='+soc
# results[0].update({'url': url})
return results
|
[
"tony.galmiche@infosaone.com"
] |
tony.galmiche@infosaone.com
|
c6431345804aab5097e1f006eec9f898f72ba44b
|
bc8509d57a162fb685da06a98c67dc8130d96316
|
/src/slim/nets/cyclegan_test.py
|
593af0862b804778ed17a102fa23ecd5566d776a
|
[
"Apache-2.0"
] |
permissive
|
Ptolemy-DL/Ptolemy
|
2065e2d157d641010567062410bee4608691d059
|
f72a531286d17c69e0e2e84d0ad8a5b0587e2e08
|
refs/heads/master
| 2023-05-29T08:58:18.328258
| 2021-06-15T09:28:16
| 2021-06-15T09:28:16
| 284,590,756
| 115
| 5
|
NOASSERTION
| 2020-10-24T04:18:51
| 2020-08-03T03:06:35
|
Python
|
UTF-8
|
Python
| false
| false
| 4,398
|
py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.contrib.slim.nets.cyclegan."""
from __future__ import absolute_import, division, print_function
import tensorflow as tf
from nets import cyclegan
# TODO(joelshor): Add a test to check generator endpoints.
class CycleganTest(tf.test.TestCase):
def test_generator_inference(self):
"""Check one inference step."""
img_batch = tf.zeros([2, 32, 32, 3])
model_output, _ = cyclegan.cyclegan_generator_resnet(img_batch)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(model_output)
def _test_generator_graph_helper(self, shape):
"""Check that generator can take small and non-square inputs."""
output_imgs, _ = cyclegan.cyclegan_generator_resnet(tf.ones(shape))
self.assertAllEqual(shape, output_imgs.shape.as_list())
def test_generator_graph_small(self):
self._test_generator_graph_helper([4, 32, 32, 3])
def test_generator_graph_medium(self):
self._test_generator_graph_helper([3, 128, 128, 3])
def test_generator_graph_nonsquare(self):
self._test_generator_graph_helper([2, 80, 400, 3])
def test_generator_unknown_batch_dim(self):
"""Check that generator can take unknown batch dimension inputs."""
img = tf.placeholder(tf.float32, shape=[None, 32, None, 3])
output_imgs, _ = cyclegan.cyclegan_generator_resnet(img)
self.assertAllEqual([None, 32, None, 3], output_imgs.shape.as_list())
def _input_and_output_same_shape_helper(self, kernel_size):
img_batch = tf.placeholder(tf.float32, shape=[None, 32, 32, 3])
output_img_batch, _ = cyclegan.cyclegan_generator_resnet(
img_batch, kernel_size=kernel_size)
self.assertAllEqual(img_batch.shape.as_list(),
output_img_batch.shape.as_list())
def input_and_output_same_shape_kernel3(self):
self._input_and_output_same_shape_helper(3)
def input_and_output_same_shape_kernel4(self):
self._input_and_output_same_shape_helper(4)
def input_and_output_same_shape_kernel5(self):
self._input_and_output_same_shape_helper(5)
def input_and_output_same_shape_kernel6(self):
self._input_and_output_same_shape_helper(6)
def _error_if_height_not_multiple_of_four_helper(self, height):
self.assertRaisesRegexp(
ValueError,
'The input height must be a multiple of 4.',
cyclegan.cyclegan_generator_resnet,
tf.placeholder(tf.float32, shape=[None, height, 32, 3]))
def test_error_if_height_not_multiple_of_four_height29(self):
self._error_if_height_not_multiple_of_four_helper(29)
def test_error_if_height_not_multiple_of_four_height30(self):
self._error_if_height_not_multiple_of_four_helper(30)
def test_error_if_height_not_multiple_of_four_height31(self):
self._error_if_height_not_multiple_of_four_helper(31)
def _error_if_width_not_multiple_of_four_helper(self, width):
self.assertRaisesRegexp(
ValueError,
'The input width must be a multiple of 4.',
cyclegan.cyclegan_generator_resnet,
tf.placeholder(tf.float32, shape=[None, 32, width, 3]))
def test_error_if_width_not_multiple_of_four_width29(self):
self._error_if_width_not_multiple_of_four_helper(29)
def test_error_if_width_not_multiple_of_four_width30(self):
self._error_if_width_not_multiple_of_four_helper(30)
def test_error_if_width_not_multiple_of_four_width31(self):
self._error_if_width_not_multiple_of_four_helper(31)
if __name__ == '__main__':
tf.test.main()
|
[
"ygan10@ur.rochester.edu"
] |
ygan10@ur.rochester.edu
|
7f02a5306eb52284c806b3ab7df0859ec1c2c51c
|
8e69eee9b474587925e22413717eb82e4b024360
|
/v2.5.7/toontown/speedchat/TTSCSellbotInvasionMenu.py
|
8118a9bd04309cc70834ec5261fd1357faa3fcfd
|
[
"MIT"
] |
permissive
|
TTOFFLINE-LEAK/ttoffline
|
afaef613c36dc3b70514ccee7030ba73c3b5045b
|
bb0e91704a755d34983e94288d50288e46b68380
|
refs/heads/master
| 2020-06-12T15:41:59.411795
| 2020-04-17T08:22:55
| 2020-04-17T08:22:55
| 194,348,185
| 5
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,641
|
py
|
from otp.otpbase import PythonUtil
from otp.speedchat.SCMenu import SCMenu
from otp.speedchat.SCMenuHolder import SCMenuHolder
from otp.speedchat.SCStaticTextTerminal import SCStaticTextTerminal
from toontown.speedchat.TTSCIndexedTerminal import TTSCIndexedTerminal
from otp.otpbase import OTPLocalizer
SellbotInvasionMenu = [
(
OTPLocalizer.SellbotInvasionMenuSections[0], range(30400, 30404))]
class TTSCSellbotInvasionMenu(SCMenu):
def __init__(self):
SCMenu.__init__(self)
self.__messagesChanged()
def destroy(self):
SCMenu.destroy(self)
def clearMenu(self):
SCMenu.clearMenu(self)
def __messagesChanged(self):
self.clearMenu()
try:
lt = base.localAvatar
except:
return
for section in SellbotInvasionMenu:
if section[0] == -1:
for phrase in section[1]:
if phrase not in OTPLocalizer.SpeedChatStaticText:
print 'warning: tried to link Winter phrase %s which does not seem to exist' % phrase
break
self.append(SCStaticTextTerminal(phrase))
else:
menu = SCMenu()
for phrase in section[1]:
if phrase not in OTPLocalizer.SpeedChatStaticText:
print 'warning: tried to link Halloween phrase %s which does not seem to exist' % phrase
break
menu.append(SCStaticTextTerminal(phrase))
menuName = str(section[0])
self.append(SCMenuHolder(menuName, menu))
|
[
"s0mberdemise@protonmail.com"
] |
s0mberdemise@protonmail.com
|
1d602b92c8fee5c4b37ab8b37affd34b151a01b7
|
9a343c495459e79dc408a102730bcaeac7fa8886
|
/CMDB_SYSTEM/autoclient/src/plugins/board.py
|
3da6198756f35073241b05ffde6ee417b3325017
|
[
"MIT"
] |
permissive
|
MMingLeung/Python_Study
|
62d3ae92bf6760de0804aa5792f53fb3799486a2
|
4ff1d02d2b6dd54e96f7179fa000548936b691e7
|
refs/heads/master
| 2022-12-27T12:53:05.186800
| 2018-03-07T04:34:36
| 2018-03-07T04:34:36
| 92,124,981
| 3
| 1
|
MIT
| 2021-06-10T18:35:33
| 2017-05-23T03:28:52
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,639
|
py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
from lib.conf.config import settings
class Board(object):
def __init__(self):
pass
@classmethod
def initial(cls):
return cls()
def process(self, command_func, debug):
if debug:
output = open(os.path.join(settings.BASE_DIR, 'debug_files/board.out'), 'r', encoding='utf-8').read()
else:
output = command_func("sudo dmidecode -t1")
return self.parse(output)
'''
SMBIOS 2.7 present.
Handle 0x0001, DMI type 1, 27 bytes
System Information
Manufacturer: Parallels Software International Inc.
Product Name: Parallels Virtual Platform
Version: None
Serial Number: Parallels-1A 1B CB 3B 64 66 4B 13 86 B0 86 FF 7E 2B 20 30
UUID: 3BCB1B1A-6664-134B-86B0-86FF7E2B2030
Wake-up Type: Power Switch
SKU Number: Undefined
Family: Parallels VM
'''
def parse(self, content):
result = {}
key_map = {
'Manufacturer': 'manufacturer',
'Product Name': 'model',
'Serial Number': 'sn',
}
#循环遍历切分的信息
for item in content.split('\n'):
#根据":"切分
row_data = item.strip().split(':')
#如果列表长度是2
if len(row_data) == 2:
#所需信息的key与遍历的数据匹配
if row_data[0] in key_map:
#赋值
result[key_map[row_data[0]]] = row_data[1].strip() if row_data[1] else row_data[1]
return result
|
[
"mingmingleung1991@gmail.com"
] |
mingmingleung1991@gmail.com
|
d54177f734b5221d7c6a4fe45641dab66c10a92b
|
7bd5ca970fbbe4a3ed0c7dadcf43ba8681a737f3
|
/codeforces/cf251-275/cf255/b2.py
|
ce0f660a579831dc53f02607d6e65c334c0b0e0d
|
[] |
no_license
|
roiti46/Contest
|
c0c35478cd80f675965d10b1a371e44084f9b6ee
|
c4b850d76796c5388d2e0d2234f90dc8acfaadfa
|
refs/heads/master
| 2021-01-17T13:23:30.551754
| 2017-12-10T13:06:42
| 2017-12-10T13:06:42
| 27,001,893
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 317
|
py
|
# -*- coding: utf-8 -*-
import sys,copy,math,heapq,itertools as it,fractions,re,bisect,collections as coll
s = raw_input()
k = int(raw_input())
w = map(int, raw_input().split())
c = chr(w.index(max(w)) + ord("a"))
s += c * k
ans = 0
for i, si in enumerate(s):
ans += w[ord(s[i]) - ord("a")] * (i + 1)
print ans
|
[
"roiti46@gmail.com"
] |
roiti46@gmail.com
|
7e8ffd682bae9bbaff7485ca16af4ec980de96df
|
89a90707983bdd1ae253f7c59cd4b7543c9eda7e
|
/programming_python/Gui/Tools/queuetest-gui.py
|
af7ab12cc6f3f174c8f6247680d767335d9e5fb5
|
[] |
no_license
|
timothyshull/python_reference_code
|
692a7c29608cadfd46a6cc409a000023e95b9458
|
f3e2205dd070fd3210316f5f470d371950945028
|
refs/heads/master
| 2021-01-22T20:44:07.018811
| 2017-03-17T19:17:22
| 2017-03-17T19:17:22
| 85,346,735
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,094
|
py
|
# GUI that displays data produced and queued by worker threads
import _thread
import queue
import time
dataQueue = queue.Queue() # infinite size
def producer(id):
for i in range(5):
time.sleep(0.1)
print('put')
dataQueue.put('[producer id=%d, count=%d]' % (id, i))
def consumer(root):
try:
print('get')
data = dataQueue.get(block=False)
except queue.Empty:
pass
else:
root.insert('end', 'consumer got => %s\n' % str(data))
root.see('end')
root.after(250, lambda: consumer(root)) # 4 times per sec
def makethreads():
for i in range(4):
_thread.start_new_thread(producer, (i,))
if __name__ == '__main__':
# main GUI thread: spawn batch of worker threads on each mouse click
from tkinter.scrolledtext import ScrolledText
root = ScrolledText()
root.pack()
root.bind('<Button-1>', lambda event: makethreads())
consumer(root) # start queue check loop in main thread
root.mainloop() # pop-up window, enter tk event loop
|
[
"timothyshull@gmail.com"
] |
timothyshull@gmail.com
|
459925a74c6e9585cf8d4b23622b67d49986ab33
|
51aa2894c317f60726fe9a778999eb7851b6be3e
|
/140_gui/pyqt_pyside/examples/Advanced_Python_Scripting/008_Class_widget/006_Tree_widget_QTreeWidget.py
|
7c346ad7464b09da172de7525381e20a3b937bde
|
[] |
no_license
|
pranaymate/Python_Topics
|
dd7b288ab0f5bbee71d57080179d6481aae17304
|
33d29e0a5bf4cde104f9c7f0693cf9897f3f2101
|
refs/heads/master
| 2022-04-25T19:04:31.337737
| 2020-04-26T00:36:03
| 2020-04-26T00:36:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,436
|
py
|
from PySide.QtGui import *
from PySide.QtCore import *
import os
path = os.path.dirname(os.path.dirname(__file__))
class simpleWindow(QWidget):
def __init__(self):
super(simpleWindow, self).__init__()
ly = QHBoxLayout()
self.setLayout(ly)
self.tree = QTreeWidget()
ly.addWidget(self.tree)
self.tree.header().hide()
# connect
self.tree.itemChanged.connect(self.action) # signal itemChanged vozvrachaet item i kolonky v kotoroj proizoshlo izmenenie
# start
self.resize(500,400)
self.updateTree()
def updateTree(self):
self.tree.blockSignals(True) # eto komanda blokiryet vse signalu, signalu ne bydyt emititsja shto bu ne slychilos'
self.fillTree()
self.tree.blockSignals(False) # eto komanda rablokiryet blokirovky signalov
def fillTree(self, parent=None, root=None):
if not parent:
parent = self.tree.invisibleRootItem()
if not root:
root = path
for f in os.listdir(root):
if f[0] in ['.', '_']: continue # iskluchaet papki s tochkoj i podchorkivaniem v peredi v nazvanii
item = QTreeWidgetItem()
item.setText(0, f) # TreeWidget podderzivaet kolonki, i mu dolznu ykazat' v kakyjy kolonky mu kladjom etot tekst.
parent.addChild(item)
fullpath = os.path.join(root, f)
if os.path.isdir(fullpath):
self.fillTree(item, fullpath)
item.setExpanded(1)
else:
item.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsEditable)
item.setData(0, Qt.UserRole, {'path': os.path.normpath(fullpath)}) # pozvoljaet polozit v jachejky lybue dannue.
# Kontejner kyda mu mozem vremmeno polozit te dannue, kotorue nam nyzno
# takze sychestvyet klass QVariant, kotoruj pozvoljaet soderzat' lyboj tip dannuh
#
def action(self, item):
print item
print item.text(0)
s = item.data(0, Qt.UserRole)
# print s
if __name__ == '__main__':
app = QApplication([])
w = simpleWindow()
w.show()
app.exec_()
|
[
"sergejyurskyj@yahoo.com"
] |
sergejyurskyj@yahoo.com
|
99843c7e7a1a0218b814ee58d5be89d0b2b70fee
|
b456cbde5527e5ef1617c4670f79052d9d9b1e0c
|
/fundemo/filter_demo.py
|
69e7002d9dbac921461db5d82938cf9a06002ac5
|
[] |
no_license
|
srikanthpragada/PYTHON_07_JAN_2019_DEMO
|
071269dd59b954ecb69d33b63b0bece1f29d8563
|
f8e211da5460072b34526b50eebfe4df242c11a4
|
refs/heads/master
| 2020-04-16T03:31:44.032616
| 2019-02-16T12:27:31
| 2019-02-16T12:27:31
| 165,234,686
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 171
|
py
|
def positive(n):
return n >= 0
nums = [10, -10, 20, 5, -7, 15]
for n in filter(positive, nums):
print(n)
for n in filter(lambda v: v >= 0, nums):
print(n)
|
[
"srikanthpragada@gmail.com"
] |
srikanthpragada@gmail.com
|
994a21e1b5351bb8803115122538763a18ca4061
|
8ce656578e04369cea75c81b529b977fb1d58d94
|
/cabinet/management/commands/install_agent_document_categories.py
|
f7066c4849307c4450974b965c055fa0181053d4
|
[] |
no_license
|
JJvzd/django_exp
|
f9a08c40a6a7535777a8b5005daafe581d8fe1dc
|
b1df4681e67aad49a1ce6426682df66b81465cb6
|
refs/heads/master
| 2023-05-31T13:21:24.178394
| 2021-06-22T10:19:43
| 2021-06-22T10:19:43
| 379,227,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,460
|
py
|
from django.core.management import BaseCommand
from clients.models import AgentDocumentCategory
class Command(BaseCommand):
help = 'Ининцализация документов агента'
def handle(self, *args, **options):
data = [
{
'name': 'Согласие на обработку персональных данных.',
'for_physical_person': True,
'for_individual_entrepreneur': True,
'for_organization': True,
'type': 'doc_ConfirmPersonalData',
},
{
'name': 'Устав',
'for_organization': True,
'type': 'doc_Charter',
},
{
'name': 'Копия паспорта',
'for_individual_entrepreneur': True,
'for_physical_person': True,
'type': 'doc_Passport',
},
{
'name': 'Скан свидетельства ИНН',
'for_individual_entrepreneur': True,
'for_organization': True,
'for_physical_person': True,
'type': 'doc_ScanINN',
},
{
'name': 'Скан свидетельства ОГРН.',
'for_individual_entrepreneur': True,
'for_organization': True,
'type': 'doc_ScanOGRN',
},
{
'name': 'Решение о назначение или приказ о заключение крупных сделок',
'for_organization': True,
'type': 'doc_BigDeal',
},
{
'name': 'Документ, подтверждающий применение режима налогообложения',
'for_individual_entrepreneur': True,
'for_organization': True,
'type': 'doc_ConfirmTax',
},
{
'name': 'Документ, подтверждающий применение режима налогообложения',
'for_individual_entrepreneur': True,
'for_organization': True,
'type': 'doc_ConfirmTax',
},
{
'name': 'Карточка компании.',
'for_individual_entrepreneur': True,
'for_organization': True,
'for_physical_person': True,
'type': 'doc_CompanyInfo',
},
{
'name': 'Договор ТХ для ознакомления.',
'for_individual_entrepreneur': True,
'for_organization': True,
'for_physical_person': True,
'auto_generate': True,
'type': 'doc_ContractExample',
},
{
'name': 'Договор ТХ',
'for_individual_entrepreneur': True,
'for_organization': True,
'for_physical_person': True,
'auto_generate': True,
'type': 'doc_Contract',
},
]
for (order, params) in enumerate(data):
AgentDocumentCategory.objects.update_or_create(
type=params.pop('type'),
defaults=dict(order=order, **params),
)
|
[
"javad@MacBook-Pro-Namig.local"
] |
javad@MacBook-Pro-Namig.local
|
d1f9a4f51096ddd9ae18d5367d377658e2bb5622
|
11fe265badfae33041cf5fcdde1aa08aa1ab6839
|
/LeetCode/290.WordPattern/Solution.py
|
304580b9b2cf4a2bf6f2f11f799295df3b755fb2
|
[] |
no_license
|
CharlotteKuang/algorithm
|
04344a0f3d2675625843d47fbf7ea8ef5621ccc8
|
e5750c2eee33fcc94d5be1643576ace541036bf5
|
refs/heads/master
| 2021-01-19T09:12:48.102258
| 2016-03-21T05:46:11
| 2016-03-21T05:46:11
| 33,219,348
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 742
|
py
|
class Solution(object):
def wordPattern(self, pattern, str):
"""
:type pattern: str
:type str: str
:rtype: bool
"""
strArr = str.split(' ')
if len(strArr) != len(pattern): return False
count = 0
strDict = {}
for p in pattern:
if not p in strDict:
strDict[p] = strArr[count]
else:
if strDict[p] != strArr[count]:
return False
count += 1
count = 0
pDict = {}
for s in strArr:
if not s in pDict:
pDict[s] = pattern[count]
else:
if pDict[s] != pattern[count]:
return False
count += 1
return True
if __name__ == '__main__':
sol = Solution()
#pattern, string = "abba","dog dog dog dog"
pattern = "abba"
string = "dog cat cat dog"
print sol.wordPattern(pattern, string)
|
[
"charlottekuang430@gmail.com"
] |
charlottekuang430@gmail.com
|
d60e472f2c0a6abc4f19eb90c7e3b6c0f3fdef2b
|
8dfe4b53fae92795405d789d52148d1291836afa
|
/python/python学习/day1/习题二-找出tmp目录下所有相同大小的文件.py
|
732d9fc54c4c063aaf8d66a4aec841a91b161712
|
[] |
no_license
|
ymyjohnny/python
|
e07c54a88954e090cf3d30a4c6f6ac46353063fb
|
b483fd55e577d4dcceb5762bddf833df23874f3a
|
refs/heads/master
| 2021-01-10T01:10:19.038424
| 2019-07-02T02:40:23
| 2019-07-02T02:40:23
| 45,223,843
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 713
|
py
|
import os
from os import path
from os.path import join, getsize
d_size ={}
for root, dirs, files in os.walk('/tmp'):
for file in files:
# if '-' not in file:
# continue
# filenameall = os.path.join(dirs, file)
filenameall = path.join(root,file)
#print filenameall
try:
sizef = os.stat(filenameall)
except:
continue
if sizef.st_size == 0:
continue
#print sizef.st_size
d_size.setdefault(sizef.st_size, []).append(filenameall)
#print d_size
for k,v in d_size.items():
if len(v) <= 1:
continue
print v
#print d_size
|
[
"ymyjohnny@adsame.com"
] |
ymyjohnny@adsame.com
|
b18236cb57a00898c5431242292ced8deffcd973
|
7bb747cb9a36b83fa1ba7e907f6198065f9bcab5
|
/models/GAN3D.py
|
785d3f7dd0b21e453601356a27467c3d221b18a4
|
[
"MIT"
] |
permissive
|
stjordanis/3D-GAN-pytorch
|
87531716b1574798558f741fa84a80aeaacc7479
|
e3f640dbb8335cde239334b3b1ad143acd784c56
|
refs/heads/master
| 2022-04-17T12:52:03.358450
| 2020-04-15T16:48:25
| 2020-04-15T16:48:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,379
|
py
|
import torch
import torch.nn as nn
from torchsummary import summary
"""
Implementation based on original paper NeurIPS 2016
https://papers.nips.cc/paper/6096-learning-a-probabilistic-latent-space-of-object-shapes-via-3d-generative-adversarial-modeling.pdf
"""
class Discriminator(torch.nn.Module):
def __init__(self, in_channels=3, dim=64, out_conv_channels=512):
super(Discriminator, self).__init__()
conv1_channels = int(out_conv_channels / 8)
conv2_channels = int(out_conv_channels / 4)
conv3_channels = int(out_conv_channels / 2)
self.out_conv_channels = out_conv_channels
self.out_dim = int(dim / 16)
self.conv1 = nn.Sequential(
nn.Conv3d(
in_channels=in_channels, out_channels=conv1_channels, kernel_size=4,
stride=2, padding=1, bias=False
),
nn.BatchNorm3d(conv1_channels),
nn.LeakyReLU(0.2, inplace=True)
)
self.conv2 = nn.Sequential(
nn.Conv3d(
in_channels=conv1_channels, out_channels=conv2_channels, kernel_size=4,
stride=2, padding=1, bias=False
),
nn.BatchNorm3d(conv2_channels),
nn.LeakyReLU(0.2, inplace=True)
)
self.conv3 = nn.Sequential(
nn.Conv3d(
in_channels=conv2_channels, out_channels=conv3_channels, kernel_size=4,
stride=2, padding=1, bias=False
),
nn.BatchNorm3d(conv3_channels),
nn.LeakyReLU(0.2, inplace=True)
)
self.conv4 = nn.Sequential(
nn.Conv3d(
in_channels=conv3_channels, out_channels=out_conv_channels, kernel_size=4,
stride=2, padding=1, bias=False
),
nn.BatchNorm3d(out_conv_channels),
nn.LeakyReLU(0.2, inplace=True)
)
self.out = nn.Sequential(
nn.Linear(out_conv_channels * self.out_dim * self.out_dim * self.out_dim, 1),
nn.Sigmoid(),
)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
# Flatten and apply linear + sigmoid
x = x.view(-1, self.out_conv_channels * self.out_dim * self.out_dim * self.out_dim)
x = self.out(x)
return x
class Generator(torch.nn.Module):
def __init__(self, in_channels=512, out_dim=64, out_channels=1, noise_dim=200, activation="sigmoid"):
super(Generator, self).__init__()
self.in_channels = in_channels
self.out_dim = out_dim
self.in_dim = int(out_dim / 16)
conv1_out_channels = int(self.in_channels / 2.0)
conv2_out_channels = int(conv1_out_channels / 2)
conv3_out_channels = int(conv2_out_channels / 2)
self.linear = torch.nn.Linear(noise_dim, in_channels * self.in_dim * self.in_dim * self.in_dim)
self.conv1 = nn.Sequential(
nn.ConvTranspose3d(
in_channels=in_channels, out_channels=conv1_out_channels, kernel_size=(4, 4, 4),
stride=2, padding=1, bias=False
),
nn.BatchNorm3d(conv1_out_channels),
nn.ReLU(inplace=True)
)
self.conv2 = nn.Sequential(
nn.ConvTranspose3d(
in_channels=conv1_out_channels, out_channels=conv2_out_channels, kernel_size=(4, 4, 4),
stride=2, padding=1, bias=False
),
nn.BatchNorm3d(conv2_out_channels),
nn.ReLU(inplace=True)
)
self.conv3 = nn.Sequential(
nn.ConvTranspose3d(
in_channels=conv2_out_channels, out_channels=conv3_out_channels, kernel_size=(4, 4, 4),
stride=2, padding=1, bias=False
),
nn.BatchNorm3d(conv3_out_channels),
nn.ReLU(inplace=True)
)
self.conv4 = nn.Sequential(
nn.ConvTranspose3d(
in_channels=conv3_out_channels, out_channels=out_channels, kernel_size=(4, 4, 4),
stride=2, padding=1, bias=False
)
)
if activation == "sigmoid":
self.out = torch.nn.Sigmoid()
else:
self.out = torch.nn.Tanh()
def project(self, x):
"""
projects and reshapes latent vector to starting volume
:param x: latent vector
:return: starting volume
"""
return x.view(-1, self.in_channels, self.in_dim, self.in_dim, self.in_dim)
def forward(self, x):
x = self.linear(x)
x = self.project(x)
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
return self.out(x)
def test_gan3d():
noise_dim = 200
in_channels = 512
dim = 64 # cube volume
model_generator = Generator(in_channels=512, out_dim=dim, out_channels=1, noise_dim=noise_dim)
noise = torch.rand(1, noise_dim)
generated_volume = model_generator(noise)
print("Generator output shape", generated_volume.shape)
model_discriminator = Discriminator(in_channels=1, dim=dim, out_conv_channels=in_channels)
out = model_discriminator(generated_volume)
print("Discriminator output", out)
summary(model_generator, (1, noise_dim))
summary(model_discriminator, (1, 64, 64, 64))
test_gan3d()
|
[
"noreply@github.com"
] |
stjordanis.noreply@github.com
|
3203f96749773440ba87ed366bd845ea5a43a2c9
|
f6080f777407734e4b42a0100df57f40f17f3ad2
|
/DSA 450 GFG/reverse_linked_list_iterative.py
|
791cbfa29840d79af71d950be8fd0124e0a84ec3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
MannyP31/CompetitiveProgrammingQuestionBank
|
abf136410a9a76b186995b0e851cb3869938b5f5
|
23fd547b06a65ae0f5246d2500c4c81fab1a92e0
|
refs/heads/master
| 2023-08-24T23:06:13.740397
| 2021-09-30T06:38:34
| 2021-09-30T06:38:34
| 358,646,838
| 2
| 0
|
MIT
| 2021-09-30T06:38:35
| 2021-04-16T15:43:10
|
C++
|
UTF-8
|
Python
| false
| false
| 960
|
py
|
#https://leetcode.com/problems/reverse-linked-list/
# Iterative method
#Approach :
# Store the head in a temp variable called current .
# curr = head , prev = null
# Now for a normal linked list , the current will point to the next node and so on till null
# For reverse linked list, the current node should point to the previous node and the first node here will point to null
# Keep iterating the linkedlist until the last node and keep changing the next of the current node to prev node and also
# update the prev node to current node and current node to next node
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def reverseList(self, head):
curr = head
prev = None
while(curr != None):
next = curr.next
curr.next = prev
prev = curr
curr = next
return prev
|
[
"dsrao0712@gmail.com"
] |
dsrao0712@gmail.com
|
010fa572fce96bbea946684f5f4c6dd203f393d4
|
66a530b297725b1a2d1c95f95883145c04614ae1
|
/0x06-python-classes/4-square.py
|
b06dd0ffb43c9324bf62690a848170202420b3da
|
[] |
no_license
|
Yagomfh/holbertonschool-higher_level_programming
|
4e6f28186eae18eaba60017fe49ac446a02cbdc5
|
1d15597a6040a8ee15b08447c478d0a2e79b5854
|
refs/heads/main
| 2023-04-23T18:23:28.096644
| 2021-05-18T08:12:27
| 2021-05-18T08:12:27
| 319,253,389
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,031
|
py
|
#!/usr/bin/python3
"""Define a class Square."""
class Square:
"""A class Square."""
def __init__(self, size=0):
"""Initialize a new Square.
Args:
size (int): The size of the new square.
Raises:
TypeError: if size is not an int
ValueError: if size is < 0
"""
if type(size) != int:
raise TypeError('size must be an integer')
if size < 0:
raise ValueError('size must be >= 0')
self.__size = size
def area(self):
"""Calculates the area of a square.
Returns: the size raise to the power of two
"""
return self.__size ** 2
@property
def size(self):
"""Get/set the current size of the square."""
return self.__size
@size.setter
def size(self, value):
if type(value) != int:
raise TypeError('size must be an integer')
if value < 0:
raise ValueError('size must be >= 0')
self.__size = value
|
[
"yagomfh@gmail.com"
] |
yagomfh@gmail.com
|
125b0fb3e4af48e20e3f8383e2f0dfe78cb291fe
|
d57b51ec207002e333b8655a8f5832ed143aa28c
|
/.history/gos_20200614063006.py
|
3b1d50fca3f5888ff5d4a21fa47bff0e281d9930
|
[] |
no_license
|
yevheniir/python_course_2020
|
b42766c4278a08b8b79fec77e036a1b987accf51
|
a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b
|
refs/heads/master
| 2022-11-15T07:13:24.193173
| 2020-07-11T15:43:26
| 2020-07-11T15:43:26
| 278,890,802
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,556
|
py
|
# # Імпорт фажливих бібліотек
# from BeautifulSoup import BeautifulSoup
# import urllib2
# import re
# # Створення функції пошуку силок
# def getLinks(url):
# # отримання та присвоєння контенту сторінки в змінну
# html_page = urllib2.urlopen(url)
# # Перетворення контенту в обєкт бібліотеки BeautifulSoup
# soup = BeautifulSoup(html_page)
# # створення пустого масиву для лінків
# links = []
# # ЗА ДОПОМОГОЮ ЧИКЛУ ПРОХЛДИМСЯ ПО ВСІХ ЕЛЕМЕНТАХ ДЕ Є СИЛКА
# for link in soup.findAll('a', attrs={'href': re.compile("^http://")}):
# # Додаємо всі силки в список
# links.append(link.get('href'))
# # повертаємо список
# return links
# -----------------------------------------------------------------------------------------------------------
# # # Імпорт фажливих бібліотек
# import subprocess
# # Створення циклу та використання функції range для генерації послідовних чисел
# for ping in range(1,10):
# # генерування IP адреси базуючись на номері ітерації
# address = "127.0.0." + str(ping)
# # виклик функції call яка робить запит на IP адрес та запис відповіді в змінну
# res = subprocess.call(['ping', '-c', '3', address])
# # За допомогою умовних операторів перевіряємо відповідь та виводимо результат
# if res == 0:
# print "ping to", address, "OK"
# elif res == 2:
# print "no response from", address
# else:
# print "ping to", address, "failed!"
# -----------------------------------------------------------------------------------------------------------
# # Імпорт фажливих бібліотек
# import requests
# # Ітеруємося по масиву з адресами зображень
# for i, pic_url in enumerate(["http://x.com/nanachi.jpg", "http://x.com/nezuko.jpg"]):
# # Відкриваємо файл базуючись на номері ітерації
# with open('pic{0}.jpg'.format(i), 'wb') as handle:
# # Отримуємо картинку
# response = requests.get(pic_url, stream=True)
# # Використовуючи умовний оператор перевіряємо чи успішно виконався запит
# if not response.ok:
# print(response)
# # Ітеруємося по байтах картинки та записуємо батчаси в 1024 до файлу
# for block in response.iter_content(1024):
# # Якщо байти закінчилися, завершуємо алгоритм
# if not block:
# break
# # Записуємо байти в файл
# handle.write(block)
# -----------------------------------------------------------------------------------------------------------
# # Створюємо клас для рахунку
# class Bank_Account:
# # В конструкторі ініціалізуємо рахунок як 0
# def __init__(self):
# self.balance=0
# print("Hello!!! Welcome to the Deposit & Withdrawal Machine")
# # В методі депозит, використовуючи функцію input() просимо ввести суму поповенння та додаємо цю суму до рахунку
# def deposit(self):
# amount=float(input("Enter amount to be Deposited: "))
# self.balance += amount
# print("\n Amount Deposited:",amount)
# # В методі депозит, використовуючи функцію input() просимо ввести суму отримання та віднімаємо цю суму від рахунку
# def withdraw(self):
# amount = float(input("Enter amount to be Withdrawn: "))
# # За допомогою умовного оператора перевіряємо чи достатнього грошей на рахунку
# if self.balance>=amount:
# self.balance-=amount
# print("\n You Withdrew:", amount)
# else:
# print("\n Insufficient balance ")
# # Виводимо бааланс на екран
# def display(self):
# print("\n Net Available Balance=",self.balance)
# # Створюємо рахунок
# s = Bank_Account()
# # Проводимо операції з рахунком
# s.deposit()
# s.withdraw()
# s.display()
# -----------------------------------------------------------------------------------------------------------
# # Створюємо рекурсивну функцію яка приймає десяткове число
# def decimalToBinary(n):
# # перевіряємо чи число юільше 1
# if(n > 1):
# # Якщо так, ділемо на 2 юез остачі та рекурсивно викликаємо функцію
# decimalToBinary(n//2)
# # Якщо ні, виводимо на остачу ділення числа на 2
# print(n%2, end=' ')
# # Створюємо функцію яка приймає бінарне число
# def binaryToDecimal(binary):
# # Створюємо додаткову змінну
# binary1 = binary
# # Ініціалізуємо ще 3 змінню даючи їм значення 0
# decimal, i, n = 0, 0, 0
# # Ітеруємося до тих пір поки передане нами число не буде 0
# while(binary != 0):
# # Отримуємо остачу від ділення нашого чила на 10 на записуємо в змінну
# dec = binary % 10
# # Додаємо до результату суму попереднього результату та добуток від dec та піднесення 2 до степеня номеру ітерації
# decimal = decimal + dec * pow(2, i)
# # Змінюємо binary
# binary = binary//10
# # Додаємо 1 до кількості ітерацій
# i += 1
# # Виводимо результат
# print(decimal)
# -----------------------------------------------------------------------------------------------------------
# # Імпорт фажливих бібліотек
# import re
# # В умовному операторі перевіряємо чи підходить введена пошта під знайдений з інтернету regex
# if re.match(r"[^@]+@[^@]+\.[^@]+", "nanachi@gmail.com"):
# # Якщо так, виводиму valid
# print("valid")
# -----------------------------------------------------------------------------------------------------------
# # Створення функції яка приймає текст для шифрування та здвиг
# def encrypt(text,s):
# # Створення змінної для результату
# result = ""
# # Ітеруємося по тексту використовуючи range та довжину тексту
# for i in range(len(text)):
# # Беремо літеру базуючись на номері ітерації
# char = text[i]
# # Перевіряємо чи ця літера велика
# if (char.isupper()):
# # Кодуємо літеру базуючись на її номері
# result += chr((ord(char) + s-65) % 26 + 65)
# else:
# # Кодуємо літеру базуючись на її номері
# result += chr((ord(char) + s - 97) % 26 + 97)
# # Повертаємо результат
# return result
# -----------------------------------------------------------------------------------------------------------
numbers = ["0502342349", "0500897897", "0992342349"]
result = {}
for num in numbers:
result[num[:3]] = []
for num in numbers:
result[num[:3]].append()
print(result)
|
[
"yevheniira@intelink-ua.com"
] |
yevheniira@intelink-ua.com
|
06c45efbe18b822fa8d30305bf3275b8c87b3b59
|
4ade586eb178b3cfb80f4fcabef023b1f1001d0c
|
/tips/DFS/arc_031_2.py
|
0abc82e408dc433f1ca32f30a0ad15e637d377b9
|
[] |
no_license
|
TakeruEndo/atcoder
|
e3c5ef8ca802aa4995c471deca7b25daf56a06ef
|
5c812377096ae255b2fa51b3a29c1b2ea686ad57
|
refs/heads/master
| 2022-06-02T08:05:55.003353
| 2020-05-02T08:59:32
| 2020-05-02T08:59:32
| 234,338,212
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,280
|
py
|
import sys
sys.setrecursionlimit(1000000)
# 4方向への移動bベクトル
dx = [1, 0, -1, 0]
dy = [0, 1, 0, -1]
def dfs(h, w, type_):
if type_ == 'first':
pass
else:
d[h][w] = 1
global count
count += 1
# 4方向を探索
for dire in range(4):
nh = h + dy[dire]
nw = w + dx[dire]
# 場外アウト、壁ならスルー
if nh < 0 or nh >= H or nw < 0 or nw >= W:
continue
if d[nh][nw] == 1:
continue
if field[nh][nw] == 'x':
continue
dfs(nh, nw, 'other')
if __name__=='__main__':
H, W = 10, 10
field = []
# 陸地のカウント数
land_c = 0
# 適当な陸地座標
lx, ly = 0, 0
for i in range(H):
field.append(list(input()))
# oの数をカウントする
o_count = 0
for h in range(H):
for w in range(W):
if field[h][w] == 'o':
o_count += 1
for h in range(H):
for w in range(W):
if field[h][w] == 'x':
d = [[0]*W for i in range(H)]
count = 0
dfs(h, w, 'first')
if count-1 == o_count:
print('YES')
sys.exit()
print('NO')
|
[
"sitentaduro1118@gmail.com"
] |
sitentaduro1118@gmail.com
|
ec3c89592d4d8c69c233e4564f3b388d26f5e4f4
|
91fb65972d69ca25ddd892b9d5373919ee518ee7
|
/pibm-training/sample-programs/exception_002.py
|
74b1fd23e26fd08cf017cd245626f8fdde4a1499
|
[] |
no_license
|
zeppertrek/my-python-sandpit
|
c36b78e7b3118133c215468e0a387a987d2e62a9
|
c04177b276e6f784f94d4db0481fcd2ee0048265
|
refs/heads/master
| 2022-12-12T00:27:37.338001
| 2020-11-08T08:56:33
| 2020-11-08T08:56:33
| 141,911,099
| 0
| 0
| null | 2022-12-08T04:09:28
| 2018-07-22T16:12:55
|
Python
|
UTF-8
|
Python
| false
| false
| 251
|
py
|
#exception_002.py
while True:
try:
n = input("Please enter an integer: ")
n = int(n)
break
except ValueError:
print("No valid integer! Please try again ...")
print ("Great, you successfully entered an integer!")
|
[
"zeppertrek@gmail.com"
] |
zeppertrek@gmail.com
|
708ea2020f4815fbe87b1959cbb649477a0ad86b
|
b28c2e04e2a093a7e83b214c877ea30978ff862e
|
/twitter_clustering/fetch_tweets.py
|
52d24a15e40e4d372cfb9cb8f5584ff41e3151f6
|
[] |
no_license
|
markmo/experiments
|
ec00dcb6219cd422873ae3a018fc2bc8cadedd5c
|
f7d3f25dfef2472ec1b5bed30be7b46daa448257
|
refs/heads/master
| 2020-05-31T17:55:21.537201
| 2011-04-12T20:53:41
| 2011-04-12T20:53:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,094
|
py
|
# -*- coding: utf-8 =*-
import codecs
# import json
import sys
import twitter
HASHTAG = sys.argv[1]
PAGES = int(sys.argv[2])
RPP = int(sys.argv[3])
# http://search.twitter.com/search.json?pages=15&rpp=100&q=%23webstock&show_user=true
# Twitter will only return a max of approx. 1500 results (rpp * pages),
# showing a mix of recent and popular results
t = twitter.Twitter(domain='search.twitter.com')
search_results = []
for page in range(1, PAGES):
search_results.append(t.search(q=HASHTAG, rpp=RPP, show_user=False))
# print json.dumps(tweets, sort_keys=True, indent=1)
f = codecs.open('./output/tweets', 'w', encoding="UTF-8")
count = 0
for result in search_results:
for t in result['results']:
count += 1
f.write(''.join([
t['from_user_id_str'], '\t',
t['from_user'], '\t',
' '.join(t['text'].splitlines()), '\n' # creates a list from text breaking at line boundaries, and joins them back up using the object of the join method as the delimiter
]))
print "Wrote %i records." % (count)
f.close()
|
[
"markmo@me.com"
] |
markmo@me.com
|
71a3bd14e7095ed858fc0f5c4537109f5b7edef0
|
ad13583673551857615498b9605d9dcab63bb2c3
|
/output/instances/nistData/union/duration-decimal/Schema+Instance/NISTXML-SV-IV-union-duration-decimal-enumeration-4-4.py
|
31399b0d101328ef12787056cf9e2b96842cc399
|
[
"MIT"
] |
permissive
|
tefra/xsdata-w3c-tests
|
397180205a735b06170aa188f1f39451d2089815
|
081d0908382a0e0b29c8ee9caca6f1c0e36dd6db
|
refs/heads/main
| 2023-08-03T04:25:37.841917
| 2023-07-29T17:10:13
| 2023-07-30T12:11:13
| 239,622,251
| 2
| 0
|
MIT
| 2023-07-25T14:19:04
| 2020-02-10T21:59:47
|
Python
|
UTF-8
|
Python
| false
| false
| 616
|
py
|
from output.models.nist_data.union.duration_decimal.schema_instance.nistschema_sv_iv_union_duration_decimal_enumeration_4_xsd.nistschema_sv_iv_union_duration_decimal_enumeration_4 import NistschemaSvIvUnionDurationDecimalEnumeration4
from output.models.nist_data.union.duration_decimal.schema_instance.nistschema_sv_iv_union_duration_decimal_enumeration_4_xsd.nistschema_sv_iv_union_duration_decimal_enumeration_4 import NistschemaSvIvUnionDurationDecimalEnumeration4Type
obj = NistschemaSvIvUnionDurationDecimalEnumeration4(
value=NistschemaSvIvUnionDurationDecimalEnumeration4Type.VALUE_MINUS_6_6957506428
)
|
[
"tsoulloftas@gmail.com"
] |
tsoulloftas@gmail.com
|
ce27f610b1f9156233d4b024e14ac4d729503599
|
1375f57f96c4021f8b362ad7fb693210be32eac9
|
/kubernetes/test/test_v1_replication_controller_status.py
|
b142b6da9d514589c5adfdf4056cc4689abbee8d
|
[
"Apache-2.0"
] |
permissive
|
dawidfieluba/client-python
|
92d637354e2f2842f4c2408ed44d9d71d5572606
|
53e882c920d34fab84c76b9e38eecfed0d265da1
|
refs/heads/master
| 2021-12-23T20:13:26.751954
| 2017-10-06T22:29:14
| 2017-10-06T22:29:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 973
|
py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_replication_controller_status import V1ReplicationControllerStatus
class TestV1ReplicationControllerStatus(unittest.TestCase):
""" V1ReplicationControllerStatus unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ReplicationControllerStatus(self):
"""
Test V1ReplicationControllerStatus
"""
model = kubernetes.client.models.v1_replication_controller_status.V1ReplicationControllerStatus()
if __name__ == '__main__':
unittest.main()
|
[
"mehdy@google.com"
] |
mehdy@google.com
|
f2f2341b156aee87b251c8e69a037bcec17fe382
|
a8062308fb3bf6c8952257504a50c3e97d801294
|
/problems/N1015_Smallest_Integer_Divisible_By_K.py
|
ecc44268c08d271cd52b6ffb21cb2292647e28db
|
[] |
no_license
|
wan-catherine/Leetcode
|
650d697a873ad23c0b64d08ad525bf9fcdb62b1b
|
238995bd23c8a6c40c6035890e94baa2473d4bbc
|
refs/heads/master
| 2023-09-01T00:56:27.677230
| 2023-08-31T00:49:31
| 2023-08-31T00:49:31
| 143,770,000
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,431
|
py
|
class Solution(object):
"""
num = a*K + r ==> num%K == r%K
num_1 = num*10 + 1 = 10*a*K + 10*r + 1 ==> num_1%K == (10*r + 1)%K
so for all num, num%K == (10*(last r) + 1)%K
r = num%K ==> r < K
so the while at most run K times, then :
1. it find num which num%K == 0
2. there is a cycle of r , we use used to check.
"""
def smallestRepunitDivByK(self, K):
"""
:type K: int
:rtype: int
"""
n = 1
res = 1
used = set()
while True:
r = n % K
if not r:
return res
if r in used:
return -1
used.add(r)
n = 10*r + 1
res += 1
"""
Here we check K % 2 and K % 5 , then all other will be divisible.
we have 1, 2, ..., k-3, k-2, k-1 remainder.
now if the num isn't divisible, it means ri, rj in those remainders are equal ==>
(10*ri + 1)%K == (10*rj + 1)%K
10*ri + 1 + a*K = 10*rj + 1
(rj-ri) = a*K/10 ==> 0 < a < 10
if K%2== 0 or K%5 == 0 , a can be 5 or 2, then rj can be same as ri.
so it won't be divisible.
"""
def smallestRepunitDivByK_(self, K):
if K % 2 == 0 or K % 5 == 0:
return -1
n = 1
res = 1
while True:
r = n % K
if not r:
return res
n = 10*r + 1
res += 1
|
[
"rarry2012@gmail.com"
] |
rarry2012@gmail.com
|
9df1c852a8bc6e84f68ce40d11133309ae3de95a
|
d75cbad7a79e24b49f405c6529633ea65c9b286d
|
/basic_linear_regression.py
|
37b7c00364080d4167bba791a2f3655a849f9e08
|
[] |
no_license
|
aj2622/ML_HW1
|
bc49e61781f108c66dfd598423915e27c72f7b3a
|
7497f8d71f6b731fc232058d6a0597af4884a53f
|
refs/heads/master
| 2020-04-22T08:49:52.188349
| 2017-10-31T14:23:02
| 2017-10-31T14:23:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,108
|
py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
if __name__ == '__main__':
test_data = pd.read_csv('data/4_test.csv')
train_data = pd.read_csv('data/4_train.csv')
train_t = np.array(train_data['t'].values)
test_t = np.array(test_data['t'].values)
train_x = np.array(train_data['x'].values)
test_x = np.array(test_data['x'].values)
train_elist = []
test_elist = []
for order in range(0, 10):
Xm = np.zeros((len(train_x), order+1))
Xm[:, 0] = 1
for i in range(1, order+1):
Xm[:, i] = train_x**i
Xt = np.transpose(Xm)
w = np.dot(np.dot(np.linalg.inv(np.dot(Xt, Xm)), Xt), train_t)
w = np.flip(w, 0)
p = np.poly1d(w)
train_y = p(train_x)
test_y = p(test_x)
train_elist.append(np.sqrt(np.mean((train_y - train_t) ** 2)))
test_elist.append(np.sqrt(np.mean((test_y - test_t) ** 2)))
plt.plot(train_elist, marker='o')
plt.plot(test_elist, 'r-', marker='o')
plt.xlim(-1, 10)
plt.xlabel('M')
plt.ylabel('RMS Error')
plt.show()
|
[
"ya70201@gmail.com"
] |
ya70201@gmail.com
|
dc217de412ac4cd7870e2b1f1bb8531cab88a313
|
accfe90fbd439fa4ef0655114a5b3d9d771ca837
|
/euler53.py
|
f0facc7f50f6052f742c862cf253746770a693ac
|
[] |
no_license
|
hackingmath/Project-Euler
|
57641ba6e82706e5a3585e8072c70db81d97d984
|
83863e5351ba4425bd803ae0d01efeb824ffa7ca
|
refs/heads/master
| 2022-02-22T09:06:10.215593
| 2019-07-13T16:21:47
| 2019-07-13T16:21:47
| 111,737,031
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 452
|
py
|
'''Problem 53: Combinatoric selections'''
import time
t1 = time.time()
def fact(n):
if n <= 1:
return 1
return n*fact(n-1)
def combinations(n,r):
'''returns nCr'''
return fact(n)/(fact(r)*fact(n-r))
tot = 0
for n in range(1,101):
for r in range(1,101):
if combinations(n,r)>1000000:
tot += 1
print(tot)
t2 = time.time()
print(t2-t1)
#4075 Correct in 0.34 seconds
|
[
"noreply@github.com"
] |
hackingmath.noreply@github.com
|
a9f2c564853bc7441c44a4c883e66286c4babe0a
|
4d9bdaf3deb0f78dc99eb87b8cd4386e8f3069de
|
/scripts/PostStacksFiltering/genMissingLoci.py
|
139e11d32999f22d8bad689cab5607c7e34c1d16
|
[] |
no_license
|
mfisher5/PCod-Compare-repo
|
5b6f688097feb279a26cd12f43bf91f9a5a83e15
|
2320781656836c1d9534be1a406b1cae4c47ebe1
|
refs/heads/master
| 2020-03-07T12:33:25.000207
| 2018-06-10T23:05:23
| 2018-06-10T23:05:23
| 127,480,623
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,424
|
py
|
### This file generates Eleni's Missing Loci filtering file ###
## MF 2/24/2017
## Arguments:
##-- population map
#########################
import sys
popmap = open(sys.argv[1], "r")
script = open("FilterLoci_by_MissingValues.py", "w")
## Start writing the MAF filtering script ##
script.write("### This scripts removes loci with too much missing data (you set the threshold)\n#Adjusted from Eleni's script (June 15,2015) to take arguments\n#MF 2/28/2017\n\n#################\n\n\nimport sys\n\n")
script.write("# Open your files for reading and writing\ngenotypes_file = open(sys.argv[1],'r')\nclean_output_file = open(sys.argv[2],'w')\nblacklisted_output_file = open(sys.argv[3], 'w')\n\n")
## Count the missing genotypes in each population
#--- generate the same ordered dictionary used in the MAF filtering
import collections
PopDict = collections.OrderedDict()
PopList = []
for line in popmap:
linelist = line.strip().split()
newpop = linelist[1]
if newpop not in PopDict:
PopDict[newpop] = 1
PopList.append(newpop)
elif newpop in PopDict:
count = PopDict[newpop]
count += 1
PopDict[newpop] = count
#--- print message
length = str(len(PopList))
print "You have " + length + " populations."
print "These are your populations, with the number of samples in each:"
print PopDict
#--- initiate the for loop
script.write("\n#run for loop to counting missing genotypes by locus for each population\n\ncount = 0\nbad_count = 0\n\nfor mystring in genotypes_file: # Read in each line in the file as a string\n")
script.write("\tif count == 0:\n\t\tgenotypes_header = mystring\n\t\tclean_output_file.write(genotypes_header)\n\t\tblacklisted_output_file.write(genotypes_header)\n\t\tcount += 1\n")
script.write("\telse:\n\t\tcount += 1\n\t\toverall_percent_missingdata = []\n\t\t" + r"stripped_string = mystring.strip('\n')" + "\n\t\t" + "locus_name = stripped_string.split(',')[0]" + "\n")
#--- use dictionary to generate column indices (should be the same order as in MAF filtering)
PopList = PopDict.keys()
column = 1
column_indices = {}
for pop in PopList:
n_samples = PopDict[pop]
new_column = column + n_samples
column_indices[pop] = [column, new_column]
column = new_column
for pop in PopList:
cols = column_indices[pop]
newstr = pop + " = stripped_string.split(',')[" + str(cols[0]) + ":" + str(cols[1]) + "]"
script.write("\t\t" + newstr + "\n")
script.write("\n#per population counts\n")
#--- count missing data in each population
for pop in PopList:
newstr = "#next pop" + "\n\t\tCount_MissingGenotypesByLocus_" + pop + " = float(" + pop + ".count('0000'))" + "\n\t\tNumberOf_" + pop + "_individuals = float(len(" + pop + "))\n\t\tPercent_MissingData_" + pop + " = float(Count_MissingGenotypesByLocus_" + pop + "/NumberOf_" + pop + "_individuals)\n\t\t" + "overall_percent_missingdata.append(Percent_MissingData_" + pop + ")\n"
script.write(newstr)
#--- write good loci to one file, bad loci to another
script.write("\n#write loci to appropriate file\n\t\tif all(i < 0.50 for i in overall_percent_missingdata):\n\t\t\tclean_output_file.write(mystring)\n\t\telse: \n\t\t\tblacklisted_output_file.write(mystring)\n\t\t\tbad_count += 1")
#--- print output when finished
script.write("\n#print output\nn_loci = str(count - 1)\n" + "print 'processed ' + n_loci + ' loci'" + "\n" + "print 'Number of loci removed: ' + str(bad_count)")
script.close()
popmap.close()
|
[
"mfisher5@uw.edu"
] |
mfisher5@uw.edu
|
4a29737770e7086e49a02c6b2ad4f68990dfe713
|
dc7c62f22e5b7da4691d2bdf9a1da2f3ba9edd75
|
/Course_case/2018_11_13/triangular2.py
|
d06a373b587ee46f8804945adac150cc981320b4
|
[] |
no_license
|
xiaohaiguicc/CS5001
|
563c17637f06f0074ccb743db4f0bdd2a326f978
|
51698ba8bfc2201639e6f4d358e0fc531780d2fc
|
refs/heads/master
| 2020-04-06T17:32:55.046301
| 2018-12-20T23:53:05
| 2018-12-20T23:53:05
| 157,664,298
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 384
|
py
|
import sys
def main(number):
print(add_up(number, 0, 0))
def add_up(number, counter, total):
if(counter == number):
return total
else:
counter += 1
total = total + counter
print(total)
return add_up(number, counter, total)
# Tail recursion should maintain some values so more than one arguments
main(int(sys.argv[1]))
|
[
"xiaohaiguicc@gmail.com"
] |
xiaohaiguicc@gmail.com
|
a8c8e4c916d17a090cfcb804aa6462fed27bdbb2
|
50948d4cb10dcb1cc9bc0355918478fb2841322a
|
/azure-mgmt-network/azure/mgmt/network/v2018_10_01/models/p2_svpn_profile_parameters_py3.py
|
27b1bcd7ec17e77255ff1d4ced6bc9eb6304a6ed
|
[
"MIT"
] |
permissive
|
xiafu-msft/azure-sdk-for-python
|
de9cd680b39962702b629a8e94726bb4ab261594
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
refs/heads/master
| 2023-08-12T20:36:24.284497
| 2019-05-22T00:55:16
| 2019-05-22T00:55:16
| 187,986,993
| 1
| 0
|
MIT
| 2020-10-02T01:17:02
| 2019-05-22T07:33:46
|
Python
|
UTF-8
|
Python
| false
| false
| 1,204
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class P2SVpnProfileParameters(Model):
"""Vpn Client Parameters for package generation.
:param authentication_method: VPN client Authentication Method. Possible
values are: 'EAPTLS' and 'EAPMSCHAPv2'. Possible values include: 'EAPTLS',
'EAPMSCHAPv2'
:type authentication_method: str or
~azure.mgmt.network.v2018_10_01.models.AuthenticationMethod
"""
_attribute_map = {
'authentication_method': {'key': 'authenticationMethod', 'type': 'str'},
}
def __init__(self, *, authentication_method=None, **kwargs) -> None:
super(P2SVpnProfileParameters, self).__init__(**kwargs)
self.authentication_method = authentication_method
|
[
"lmazuel@microsoft.com"
] |
lmazuel@microsoft.com
|
0a76d52c519c2b9ff4438e8567ebfbc0b5e3cb2e
|
d8f16b24ba0db0abdcecbbce1cffdb2406a0e652
|
/onlineshopping/myshopping/migrations/0007_auto_20200310_1644.py
|
85939b0c355318b8cf5b90416ea6310e30142333
|
[] |
no_license
|
roshan-pokhrel2/djangoecommerce
|
5c482221dd8dbd043fa8239345797444a4db2224
|
21b52e33396af9b0e2af338f0dd3186026f40edb
|
refs/heads/master
| 2021-04-04T22:09:58.792737
| 2020-03-20T08:19:38
| 2020-03-20T08:19:38
| 248,493,578
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 471
|
py
|
# Generated by Django 3.0.3 on 2020-03-10 16:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('myshopping', '0006_order_orderitem'),
]
operations = [
migrations.RemoveField(
model_name='orderitem',
name='item',
),
migrations.DeleteModel(
name='Order',
),
migrations.DeleteModel(
name='OrderItem',
),
]
|
[
"you@example.com"
] |
you@example.com
|
19a3eee629fa47e078d018c096fe5cbfebf9e3ca
|
9b527131c291b735a163226d1daac2397c25b712
|
/Lecture5/roma_changing_signs.py
|
c0d266b142cfb3c1ac106e243bb79b49848d36dc
|
[] |
no_license
|
arnabs542/BigO-Coding-material
|
dbc8895ec6370933069b2e40e0610d4b05dddcf2
|
3b31bddb1240a407aa22f8eec78956d06b42efbc
|
refs/heads/master
| 2022-03-19T18:32:53.667852
| 2019-11-27T23:55:04
| 2019-11-27T23:55:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 760
|
py
|
if __name__ == '__main__':
n, k = map(int, input().split())
arr = list(map(int, input().split()))
i = 0
current_sum = 0
done = False
while k > 0:
if arr[i] < 0:
k -= 1
arr[i] *= (-1)
current_sum += arr[i]
elif arr[i] == 0:
k = 0
current_sum += sum(arr[i:])
done = True
else: #arr[i] > 0
if k % 2 == 0:
k = 0
current_sum += sum(arr[i:])
done = True
else: #k odd
k = 0
current_sum = max(current_sum - arr[i-1] + arr[i] - arr[i-1],
current_sum - arr[i])
if i < n-1 and k > 0:
i += 1
else:
break
if k > 0:
if k % 2 != 0:
current_sum -= arr[n-1]
arr[n-1] *= (-1)
current_sum += arr[n-1]
if i <= n-1 and not done:
current_sum += sum(arr[i+1:])
print(current_sum)
|
[
"tranhoangkhuongvn@gmail.com"
] |
tranhoangkhuongvn@gmail.com
|
56291aa4a0f783a753deddf4f50d79d3d9595070
|
d1f971b9fa0edfa633b62887cf9d173d6a86a440
|
/concepts/Exercises/fractions_example.py
|
9c1d73c4e52a77b235af4df8a39a46bdb6118ce0
|
[] |
no_license
|
papan36125/python_exercises
|
d45cf434c15aa46e10967c13fbe9658915826478
|
748eed2b19bccf4b5c700075675de87c7c70c46e
|
refs/heads/master
| 2020-04-28T10:01:10.361108
| 2019-05-10T13:45:35
| 2019-05-10T13:45:35
| 175,187,760
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 161
|
py
|
import fractions
# Output: 3/2
print(fractions.Fraction(1.5))
# Output: 5
print(fractions.Fraction(5))
# Output: 1/3
print(fractions.Fraction(1,3))
|
[
"noreply@github.com"
] |
papan36125.noreply@github.com
|
02214ea3f8469684440d875077648f0e178654d6
|
4142b8c513d87361da196631f7edd82f11465abb
|
/python/round690/1462A.py
|
a873968e7e5d1a7c9aafc73f818dd309e372ef2b
|
[] |
no_license
|
npkhanhh/codeforces
|
b52b66780426682ea1a3d72c66aedbe6dc71d7fe
|
107acd623b0e99ef0a635dfce3e87041347e36df
|
refs/heads/master
| 2022-02-08T17:01:01.731524
| 2022-02-07T10:29:52
| 2022-02-07T10:29:52
| 228,027,631
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 302
|
py
|
from sys import stdin
for _ in range(int(stdin.readline())):
n = int(stdin.readline())
a = list(map(int, stdin.readline().split()))
res = []
for i in range(n // 2):
res.append(a[i])
res.append(a[-i - 1])
if n % 2 == 1:
res.append(a[n // 2])
print(*res)
|
[
"npkhanh93@gmail.com"
] |
npkhanh93@gmail.com
|
64d8680345a04444e6e646f2a2095d2dc6a64b52
|
256746f29f9995accd4fee35b9b8981264ca2e37
|
/Ch05/2017-8-21_2.py
|
e37173b77da869c3f53193f47a2a80dc66a3786c
|
[] |
no_license
|
Vagacoder/Python_for_everyone
|
adadd55561b2200d461afbc1752157ad7326698e
|
b2a1d1dcbc3cce5499ecc68447e1a04a8e59dc66
|
refs/heads/master
| 2021-06-22T00:26:02.169461
| 2019-05-25T16:06:04
| 2019-05-25T16:06:04
| 114,508,951
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,016
|
py
|
## Ch05 P 5.26
def read_digit(code):
bar1 = read_bar(code[0])
bar2 = read_bar(code[1])
bar3 = read_bar(code[2])
bar4 = read_bar(code[3])
bar5 = read_bar(code[4])
number = bar1 * 7 + bar2 * 4 + bar3 * 2 + bar4 * 1 + bar5 * 0
if number == 11:
number = 0
return number
def read_bar(bar):
if bar == '|':return 1
elif bar == ':':return 0
def read_zipcode(zip):
if len(zip) != 32 or zip[0] != '|' or zip[-1] != '|':
return 'Wrong bar code!'
digit1 = read_digit(zip[1:6])
digit2 = read_digit(zip[6:11])
digit3 = read_digit(zip[11:16])
digit4 = read_digit(zip[16:21])
digit5 = read_digit(zip[21:26])
check = read_digit(zip[26:31])
sum = digit1+digit2+digit3+digit4+digit5
if (sum + check) %10 != 0:
return 'Wrong check bar code!'
else:
return str(digit1) + str(digit2) + str(digit3) + str(digit4) + str(digit5)
print(read_zipcode('||:|::::||::::||||::::|:|:::|:||'))
|
[
"noreply@github.com"
] |
Vagacoder.noreply@github.com
|
3bfd0f28ba277e393000a71c1308dee534790150
|
d57148c74b79954ff762ce3a02c1b0ef3e79d6a1
|
/libs/smartmeshsdk-REL-1.3.0.1/libs/VManagerSDK/vmanager/models/user_list_element.py
|
bcb914e1417f2a8286c6556ef01fbd45bcd0d182
|
[
"BSD-3-Clause"
] |
permissive
|
realms-team/solmanager
|
62fb748b140361cf620b7dd8ff6df755afd42bbe
|
95fa049df041add5f8d37c053ef560d0e5d06dff
|
refs/heads/master
| 2020-04-11T10:00:21.086457
| 2018-11-20T15:49:27
| 2018-11-20T15:49:27
| 40,271,406
| 0
| 0
|
BSD-3-Clause
| 2018-11-20T15:49:28
| 2015-08-05T22:15:39
|
Python
|
UTF-8
|
Python
| false
| false
| 3,057
|
py
|
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class UserListElement(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
UserListElement - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'user_id': 'str'
}
self.attribute_map = {
'user_id': 'userId'
}
self._user_id = None
@property
def user_id(self):
"""
Gets the user_id of this UserListElement.
User identifier
:return: The user_id of this UserListElement.
:rtype: str
"""
return self._user_id
@user_id.setter
def user_id(self, user_id):
"""
Sets the user_id of this UserListElement.
User identifier
:param user_id: The user_id of this UserListElement.
:type: str
"""
self._user_id = user_id
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"twatteyne@gmail.com"
] |
twatteyne@gmail.com
|
d6bf95c21fddc999194bb12a05edb9c1c0d457aa
|
2ed6e49f6bb841a36f51e7562a71788682f2f282
|
/backend/backend/git_utils.py
|
56837bde860f73ea789a3c21d8e06a4b3571e8bf
|
[
"Apache-2.0",
"MIT",
"CC-BY-4.0"
] |
permissive
|
arthur-flam/qaboard
|
7fcaf3c351bd94d457dd14b9f9c6793a583d6841
|
7a11c3c2279595f87bc113c7d383d11241d83946
|
refs/heads/master
| 2022-10-07T13:55:13.356189
| 2020-06-05T07:07:05
| 2020-06-05T07:07:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,173
|
py
|
from git import Repo
from git import RemoteProgress
from git.exc import NoSuchPathError
class Repos():
"""Holds data for multiple repositories."""
def __init__(self, git_server, clone_directory):
self._repos = {}
self.git_server = git_server
self.clone_directory = clone_directory
def __getitem__(self, project_path):
"""
Return a git-python Repo object representing a clone
of $QABOARD_GIT_SERVER/project_path at $QABOARD_DATA
project_path: the full git repository namespace, eg dvs/psp_swip
"""
clone_location = str(self.clone_directory / project_path)
try:
repo = Repo(clone_location)
except NoSuchPathError:
try:
print(f'Cloning <{project_path}> to {self.clone_directory}')
repo = Repo.clone_from(
# for now we expect everything to be on gitlab-srv via http
f'git@{self.git_server}:{project_path}',
str(clone_location)
)
except Exception as e:
print(f'[ERROR] Could not clone: {e}. Please set $QABOARD_DATA to a writable location and verify your network settings')
raise(e)
self._repos[project_path] = repo
return self._repos[project_path]
def git_pull(repo):
"""Updates the repo and warms the cache listing the latests commits.."""
class MyProgressPrinter(RemoteProgress):
def update(self, op_code, cur_count, max_count=100.0, message="[No message]"):
# print('...')
# print(op_code, cur_count, max_count, (cur_count or 0)/max_count, message)
pass
try:
for fetch_info in repo.remotes.origin.fetch(progress=MyProgressPrinter()):
# print(f"Updated {fetch_info.ref} to {fetch_info.commit}")
pass
except Exception as e:
print(e)
def find_branch(commit_hash, repo):
"""Tries to get from which branch a commit comes from. It's a *guess*."""
std_out = repo.git.branch(contains=commit_hash, remotes=True)
branches = [l.split(' ')[-1] for l in std_out.splitlines()]
important_branches = ['origin/release', 'origin/master', 'origin/develop']
for b in important_branches:
if b in branches:
return b
if branches:
return branches[0]
return 'unknown'
|
[
"arthur.flam@samsung.com"
] |
arthur.flam@samsung.com
|
9254d9688bf09016bb2da20222a8382e55e09309
|
22d4bdff084db5becb8c76d5d8c3ce6ea095d3d8
|
/tcapy/vis/computationresults.py
|
a9064c413d1e8b0f07ce94559ac9eeb040c73b7c
|
[
"Apache-2.0"
] |
permissive
|
sbnair/tcapy
|
1768220657bdd4d3bdc0f2e8248e971c76ed4953
|
380d49139d7af9fd4cf63d406029833c9a41cc70
|
refs/heads/master
| 2021-05-17T14:13:22.206884
| 2020-03-27T17:10:06
| 2020-03-27T17:10:06
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,549
|
py
|
from __future__ import print_function
__author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
#
# Copyright 2018 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
#
# See the License for the specific language governing permissions and limitations under the License.
#
import abc
from tcapy.util.utilfunc import UtilFunc
from tcapy.vis.displaylisteners import PlotRender
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class ComputationResults(ABC):
"""Abstract class holds the results of a computation in a friendly format, splitting out the various dataset which can be used
for charts. Also converts these datasets to Plotly Figure objects, ready to be plotted in HTML documents.
"""
def __init__(self, dict_of_df, computation_request, text_preamble=''):
self._plot_render = PlotRender()
self._util_func = UtilFunc()
self.text_preamble = text_preamble
self._computation_request = computation_request
self._rendered = False
@abc.abstractmethod
def render_computation_charts(self):
"""Takes the various dataframes computation results output, and then renders these as Plotly JSON charts (data and
all their graphical properties), which are easy to plot later.
Returns
-------
"""
pass
##### Other data (eg. text)
@property
def text_preamble(self):
return self.__text_preamble
@text_preamble.setter
def text_preamble(self, text_preamble):
self.__text_preamble = text_preamble
|
[
"saeedamen@hotmail.com"
] |
saeedamen@hotmail.com
|
a74d2f981d3af22b80dfd2637fd66240f655a8e6
|
96dcea595e7c16cec07b3f649afd65f3660a0bad
|
/tests/components/mysensors/test_light.py
|
8d4ce445779881be401c869f41257377a2583ea5
|
[
"Apache-2.0"
] |
permissive
|
home-assistant/core
|
3455eac2e9d925c92d30178643b1aaccf3a6484f
|
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
|
refs/heads/dev
| 2023-08-31T15:41:06.299469
| 2023-08-31T14:50:53
| 2023-08-31T14:50:53
| 12,888,993
| 35,501
| 20,617
|
Apache-2.0
| 2023-09-14T21:50:15
| 2013-09-17T07:29:48
|
Python
|
UTF-8
|
Python
| false
| false
| 8,086
|
py
|
"""Provide tests for mysensors light platform."""
from __future__ import annotations
from collections.abc import Callable
from unittest.mock import MagicMock, call
from mysensors.sensor import Sensor
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_RGB_COLOR,
ATTR_RGBW_COLOR,
DOMAIN as LIGHT_DOMAIN,
)
from homeassistant.core import HomeAssistant
async def test_dimmer_node(
hass: HomeAssistant,
dimmer_node: Sensor,
receive_message: Callable[[str], None],
transport_write: MagicMock,
) -> None:
"""Test a dimmer node."""
entity_id = "light.dimmer_node_1_1"
state = hass.states.get(entity_id)
assert state
assert state.state == "off"
# Test turn on
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{"entity_id": entity_id},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;2;1\n")
receive_message("1;1;1;0;2;1\n")
receive_message("1;1;1;0;3;100\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "on"
assert state.attributes[ATTR_BRIGHTNESS] == 255
transport_write.reset_mock()
# Test turn on brightness
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{"entity_id": entity_id, "brightness": 128},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;3;50\n")
receive_message("1;1;1;0;2;1\n")
receive_message("1;1;1;0;3;50\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "on"
assert state.attributes[ATTR_BRIGHTNESS] == 128
transport_write.reset_mock()
# Test turn off
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_off",
{"entity_id": entity_id},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;2;0\n")
receive_message("1;1;1;0;2;0\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "off"
async def test_rgb_node(
hass: HomeAssistant,
rgb_node: Sensor,
receive_message: Callable[[str], None],
transport_write: MagicMock,
) -> None:
"""Test a rgb node."""
entity_id = "light.rgb_node_1_1"
state = hass.states.get(entity_id)
assert state
assert state.state == "off"
# Test turn on
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{"entity_id": entity_id},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;2;1\n")
receive_message("1;1;1;0;2;1\n")
receive_message("1;1;1;0;3;100\n")
receive_message("1;1;1;0;40;ffffff\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "on"
assert state.attributes[ATTR_BRIGHTNESS] == 255
assert state.attributes[ATTR_RGB_COLOR] == (255, 255, 255)
transport_write.reset_mock()
# Test turn on brightness
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{"entity_id": entity_id, "brightness": 128},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;3;50\n")
receive_message("1;1;1;0;2;1\n")
receive_message("1;1;1;0;3;50\n")
receive_message("1;1;1;0;40;ffffff\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "on"
assert state.attributes[ATTR_BRIGHTNESS] == 128
assert state.attributes[ATTR_RGB_COLOR] == (255, 255, 255)
transport_write.reset_mock()
# Test turn off
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_off",
{"entity_id": entity_id},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;2;0\n")
receive_message("1;1;1;0;2;0\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "off"
transport_write.reset_mock()
# Test turn on rgb
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{"entity_id": entity_id, ATTR_RGB_COLOR: (255, 0, 0)},
blocking=True,
)
assert transport_write.call_count == 2
assert transport_write.call_args_list[0] == call("1;1;1;1;2;1\n")
assert transport_write.call_args_list[1] == call("1;1;1;1;40;ff0000\n")
receive_message("1;1;1;0;2;1\n")
receive_message("1;1;1;0;3;50\n")
receive_message("1;1;1;0;40;ff0000\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "on"
assert state.attributes[ATTR_BRIGHTNESS] == 128
assert state.attributes[ATTR_RGB_COLOR] == (255, 0, 0)
async def test_rgbw_node(
hass: HomeAssistant,
rgbw_node: Sensor,
receive_message: Callable[[str], None],
transport_write: MagicMock,
) -> None:
"""Test a rgbw node."""
entity_id = "light.rgbw_node_1_1"
state = hass.states.get(entity_id)
assert state
assert state.state == "off"
# Test turn on
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{"entity_id": entity_id},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;2;1\n")
receive_message("1;1;1;0;2;1\n")
receive_message("1;1;1;0;3;100\n")
receive_message("1;1;1;0;41;ffffffff\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "on"
assert state.attributes[ATTR_BRIGHTNESS] == 255
assert state.attributes[ATTR_RGBW_COLOR] == (255, 255, 255, 255)
transport_write.reset_mock()
# Test turn on brightness
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{"entity_id": entity_id, "brightness": 128},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;3;50\n")
receive_message("1;1;1;0;2;1\n")
receive_message("1;1;1;0;3;50\n")
receive_message("1;1;1;0;41;ffffffff\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "on"
assert state.attributes[ATTR_BRIGHTNESS] == 128
assert state.attributes[ATTR_RGBW_COLOR] == (255, 255, 255, 255)
transport_write.reset_mock()
# Test turn off
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_off",
{"entity_id": entity_id},
blocking=True,
)
assert transport_write.call_count == 1
assert transport_write.call_args == call("1;1;1;1;2;0\n")
receive_message("1;1;1;0;2;0\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "off"
transport_write.reset_mock()
# Test turn on rgbw
await hass.services.async_call(
LIGHT_DOMAIN,
"turn_on",
{"entity_id": entity_id, ATTR_RGBW_COLOR: (255, 0, 0, 0)},
blocking=True,
)
assert transport_write.call_count == 2
assert transport_write.call_args_list[0] == call("1;1;1;1;2;1\n")
assert transport_write.call_args_list[1] == call("1;1;1;1;41;ff000000\n")
receive_message("1;1;1;0;2;1\n")
receive_message("1;1;1;0;3;50\n")
receive_message("1;1;1;0;41;ff000000\n")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == "on"
assert state.attributes[ATTR_BRIGHTNESS] == 128
assert state.attributes[ATTR_RGBW_COLOR] == (255, 0, 0, 0)
|
[
"noreply@github.com"
] |
home-assistant.noreply@github.com
|
dcabca991e405722f266c26a79a6218eee88a8e5
|
bcda330eab528871da7fe007851f9554f2e19e66
|
/dfp/get_orders.py
|
839da7a3b615c878b631f55ed7c0aa7da9f8bd90
|
[
"MIT"
] |
permissive
|
gmalta/dfp-prebid-setup
|
2b80cc6ac53240c8e2caec6abadb6df349ada6ae
|
d965f9a70e56a8444ecd80566028f09964b51d04
|
refs/heads/master
| 2023-08-19T02:02:30.265693
| 2021-09-22T19:47:17
| 2021-09-22T19:47:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,087
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from googleads import ad_manager
from dfp.client import get_client
logger = logging.getLogger(__name__)
def get_order_by_name(order_name):
"""
Gets an order by name from DFP.
Args:
order_name (str): the name of the DFP order
Returns:
a DFP order, or None
"""
dfp_client = get_client()
order_service = dfp_client.GetService('OrderService', version='v202108')
# Filter by name.
query = 'WHERE name = :name'
values = [{
'key': 'name',
'value': {
'xsi_type': 'TextValue',
'value': order_name
}
}]
statement = ad_manager.FilterStatement(query, values)
response = order_service.getOrdersByStatement(statement.ToStatement())
no_order_found = False
try:
no_order_found = True if len(response['results']) < 1 else False
except (AttributeError, KeyError):
no_order_found = True
if no_order_found:
return None
else:
order = response['results'][0]
logger.info(u'Found an order with name "{name}".'.format(name=order['name']))
return order
def get_all_orders(print_orders=False):
"""
Logs all orders in DFP.
Returns:
None
"""
dfp_client = get_client()
# Initialize appropriate service.
order_service = dfp_client.GetService('OrderService', version='v202108')
# Create a statement to select orders.
statement = ad_manager.FilterStatement()
print('Getting all orders...')
# Retrieve a small amount of orders at a time, paging
# through until all orders have been retrieved.
while True:
response = order_service.getOrdersByStatement(statement.ToStatement())
if 'results' in response and len(response['results']) > 0:
for order in response['results']:
msg = u'Found an order with name "{name}".'.format(name=order['name'])
if print_orders:
print(msg)
statement.offset += ad_manager.SUGGESTED_PAGE_LIMIT
else:
print('No additional orders found.')
break
def main():
get_all_orders(print_orders=True)
if __name__ == '__main__':
main()
|
[
"kevin.jennison1@gmail.com"
] |
kevin.jennison1@gmail.com
|
65837fe0fa38a44d96ddeb2ef1f74ed692cc0f06
|
fa14db78eac812bed2acf5801f4cbae87753d5aa
|
/setup.py
|
003a7ee178ac36569d02a6cea310d9bc66e5f341
|
[] |
no_license
|
rogerlew/dictset
|
61abcf14313d29150d4e320c4a28fce73bcaa8ca
|
97ece69bf4ceb265c36f6ea79f98b6553b9ccc1c
|
refs/heads/master
| 2016-09-03T07:41:45.098506
| 2011-05-13T19:41:57
| 2011-05-13T19:41:57
| 32,486,726
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,422
|
py
|
# Copyright (c) 2011, Roger Lew [see LICENSE.txt]
# This software is funded in part by NIH Grant P20 RR016454.
##from distutils.core import setup
from setuptools import setup
setup(name='dictset',
version='0.3.1.2',
description='A specialized Python container datatype for managing collections of sets.',
author='Roger Lew',
author_email='rogerlew@gmail.com',
license = "BSD",
classifiers=["Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.0",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Software Development :: Libraries :: Python Modules"],
url='http://code.google.com/p/dictset/',
py_modules=['dictset'],
)
"""setup.py sdist upload --identity="Roger Lew" --sign"""
|
[
"rogerlew@gmail.com"
] |
rogerlew@gmail.com
|
3c9255c8322cd490ba9c22476e6343bc1b6cc27b
|
95fe42bbae799ef76d21af95d51807d0f6e29638
|
/conf/settings.py
|
5e6a56205a322686ea3cb3edd7e8ad75798da3ab
|
[] |
no_license
|
PhyCosmos/Back-end
|
8a8eda71a30eee3de7a58acb9829004db5664845
|
1826db6efaef5114267e8e684fc66f2316280259
|
refs/heads/main
| 2023-06-21T16:08:53.287483
| 2021-06-07T17:14:08
| 2021-06-07T17:14:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,394
|
py
|
"""
Django settings for conf project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "conf.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "conf.wsgi.application"
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": os.getenv("DATABASE_NAME"),
"USER": os.getenv("DATABASE_USER"),
"PASSWORD": os.getenv("DATABASE_PASSWORD"),
"HOST": os.getenv("DATABASE_HOST"),
"PORT": os.getenv("DATABASE_PORT"),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "Asia/Seoul"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = "/static/"
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
[
"ehsgnl123@naver.com"
] |
ehsgnl123@naver.com
|
34a1c01f9276612864b317d7f6156216a0bbf4c8
|
723e52a46ef0b3b3927a842799fa8b99d7a678a6
|
/generic_crud/serializers.py
|
dee90946baae87a30031eae803273dbc2f1bc630
|
[] |
no_license
|
MilanTagline2021/rest-api-views
|
aa2f17279405f839ad86f03e0eea034d82c6bbb5
|
5f1b7743d84ce754d4e69192fdb9f0526945e801
|
refs/heads/master
| 2023-08-21T02:27:37.917621
| 2021-10-21T10:59:38
| 2021-10-21T10:59:38
| 419,275,142
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 219
|
py
|
from rest_framework import serializers
from generic_crud.models import Student
class StudentSerializers(serializers.ModelSerializer):
class Meta:
model = Student
fields = ['id','name','roll','city']
|
[
"milans.tagline@gmail.com"
] |
milans.tagline@gmail.com
|
55f888fe74738900a474126796c6ce311ab676a7
|
5d32d0e65aa3bfa677fd1b8c92569e07e9b82af1
|
/Section 7 - Dictionaries/buildDictResults v2a.py
|
2d965ef573e752a8df80252691d83a5278a6d5d0
|
[
"CC0-1.0"
] |
permissive
|
pdst-lccs/lccs-python
|
b74ef2a02ac8ad2637f713fff5559f4e56c9827d
|
95cb7ece05716521e9951d7a40de8fb20a88021f
|
refs/heads/master
| 2023-05-28T00:46:57.313972
| 2023-05-22T10:16:43
| 2023-05-22T10:16:43
| 240,501,524
| 21
| 18
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 550
|
py
|
# Event: LCCS Python Fundamental Skills Workshop
# Date: Dec 2018
# Author: Joe English, PDST
# eMail: computerscience@pdst.ie
# Purpose: A program to demonstrate how to build a dictionary
# Version 2a. A dictionary to store multiple results for a student
results = {}
name = input("Enter student name: ")
results['name'] = name
while True:
subject = input ("Enter subject name: ")
if subject == "":
break
mark = input ("Enter percentage mark for "+subject+": ")
results[subject] = mark
print(results)
|
[
"noreply@github.com"
] |
pdst-lccs.noreply@github.com
|
73f7a9ecc236f542ada437f4643ea62163cf9f9a
|
bb33e6be8316f35decbb2b81badf2b6dcf7df515
|
/source/res/scripts/client/gui/clans/restrictions.py
|
66ef025b31ed78f0d1115478c56bf6c0174e879a
|
[] |
no_license
|
StranikS-Scan/WorldOfTanks-Decompiled
|
999c9567de38c32c760ab72c21c00ea7bc20990c
|
d2fe9c195825ececc728e87a02983908b7ea9199
|
refs/heads/1.18
| 2023-08-25T17:39:27.718097
| 2022-09-22T06:49:44
| 2022-09-22T06:49:44
| 148,696,315
| 103
| 39
| null | 2022-09-14T17:50:03
| 2018-09-13T20:49:11
|
Python
|
UTF-8
|
Python
| false
| false
| 6,260
|
py
|
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/clans/restrictions.py
import weakref
from constants import CLAN_MEMBER_FLAGS
from debug_utils import LOG_DEBUG, LOG_WARNING
from account_helpers import isOutOfWallet, isClanEnabled
from gui.clans.settings import error, success, CLIENT_CLAN_RESTRICTIONS as _CCR
from gui.clans.settings import isValidPattern
from helpers import dependency
from skeletons.gui.shared import IItemsCache
MAY_SEE_TREASURY = CLAN_MEMBER_FLAGS.LEADER | CLAN_MEMBER_FLAGS.VICE_LEADER | CLAN_MEMBER_FLAGS.TREASURER
class ClanMemberPermissions(object):
def __init__(self, bwRoleMask):
self.__roleMask = bwRoleMask
def canChangeSettings(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_CHANGE_SETTINGS)
def canChangeRole(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_CHANGE_ROLE)
def canActivateReserves(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_ACTIVATE_ORDER)
def canEditRecruiterProfile(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_EDIT_RECRUIT_PROFILE)
def canChangeCommander(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_CHANGE_COMMANDER)
def canHandleClanInvites(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_HANDLE_INVITES)
def canRemoveMembers(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_REMOVE_MEMBERS)
def canRemoveClan(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_REMOVE_CLAN)
def canTrade(self):
return self.__checkFlags(MAY_SEE_TREASURY)
def canExchangeMoney(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_EXCHANGE_MONEY)
def canSendApplication(self):
return self.isValidAccountType()
def canRevokeApplication(self):
LOG_DEBUG('Application revoking is not supported')
return False
def canAcceptInvite(self):
return self.isValidAccountType()
def canDeclineInvite(self):
return self.isValidAccountType()
def canSeeClans(self):
return True
@dependency.replace_none_kwargs(itemsCache=IItemsCache)
def isValidAccountType(self, itemsCache=None):
attrs = itemsCache.items.stats.attributes if itemsCache is not None else 0
return not (isOutOfWallet(attrs) and not isClanEnabled(attrs))
def __checkFlags(self, flags):
return self.__roleMask & flags != 0
class DefaultClanMemberPermissions(ClanMemberPermissions):
def __init__(self):
super(DefaultClanMemberPermissions, self).__init__(0)
class BaseAccountClanLimits(object):
def canHandleClanInvites(self, clan):
return error(_CCR.DEFAULT)
def canSendApplication(self, clan):
return error(_CCR.DEFAULT)
def canRevokeApplication(self, clan):
return error(_CCR.DEFAULT)
def canAcceptApplication(self, clan):
return error(_CCR.DEFAULT)
def canDeclineApplication(self, clan):
return error(_CCR.DEFAULT)
def canSendInvite(self, clan):
return error(_CCR.DEFAULT)
def canRevokeInvite(self, clan):
return error(_CCR.DEFAULT)
def canAcceptInvite(self, clan):
return error(_CCR.DEFAULT)
def canDeclineInvite(self, clan):
return error(_CCR.DEFAULT)
def canSearchClans(self, pattern):
return error(_CCR.DEFAULT)
def canSeeTreasury(self, clan):
return error(_CCR.DEFAULT)
class DefaultAccountClanLimits(BaseAccountClanLimits):
pass
class AccountClanLimits(BaseAccountClanLimits):
def __init__(self, profile):
super(AccountClanLimits, self).__init__()
self.__profile = weakref.proxy(profile)
def canSeeTreasury(self, clan):
return self.__checkPermissions('canExchangeMoney', clan)
def canSendApplication(self, clan):
if self.__profile.isInClan():
if self.__profile.getClanDbID() == clan.getDbID():
return error(_CCR.OWN_CLAN)
return error(_CCR.ALREADY_IN_CLAN)
if self.__profile.hasClanInvite(clan.getDbID()):
return error(_CCR.CLAN_INVITE_ALREADY_RECEIVED)
if self.__profile.isClanApplicationSent(clan.getDbID()):
return error(_CCR.CLAN_APPLICATION_ALREADY_SENT)
if self.__profile.isInvitesLimitReached():
return error(_CCR.SENT_INVITES_LIMIT_REACHED)
if not clan.canAcceptsJoinRequests():
return error(_CCR.CLAN_CONSCRIPTION_CLOSED)
if not self.__profile.getPermissions(clan).isValidAccountType():
return error(_CCR.FORBIDDEN_ACCOUNT_TYPE)
if not clan.hasFreePlaces():
return error(_CCR.CLAN_IS_FULL)
return error(_CCR.CLAN_ENTER_COOLDOWN) if self.__profile.isInClanEnterCooldown() else self.__checkPermissions('canSendApplication', clan)
def canRevokeApplication(self, clan):
return self.__checkPermissions('canRevokeApplication', clan)
def canHandleClanInvites(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canAcceptApplication(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canDeclineApplication(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canSendInvite(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canRevokeInvite(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canAcceptInvite(self, clan):
return self.__checkPermissions('canAcceptInvite', clan)
def canDeclineInvite(self, clan):
return self.__checkPermissions('canDeclineInvite', clan)
def canSearchClans(self, pattern):
return error(_CCR.SEARCH_PATTERN_INVALID) if not isValidPattern(pattern) else self.__checkPermissions('canSeeClans')
def __checkPermissions(self, permName, clan=None):
perms = self.__profile.getPermissions(clan)
if not hasattr(perms, permName):
LOG_WARNING('There is error while checking account clan permissions', clan, permName)
return error(_CCR.DEFAULT)
return error(_CCR.DEFAULT) if not getattr(perms, permName)() else success()
|
[
"StranikS_Scan@mail.ru"
] |
StranikS_Scan@mail.ru
|
c8505627362494931737ca5085fe06aca508dd70
|
731c6170829acf912143f9e65d86f45bce9102ea
|
/test/test_device.py
|
caf9bb0a60e0bc05ed0e29dd1b6a4314f93db19f
|
[] |
no_license
|
Oculus-Dei/Oculus-Dei
|
9774eae334a86d8e2b4a91d6da889f546a695ee2
|
cde8cec5898e11aa274c609e1d4106b6e51f7896
|
refs/heads/master
| 2021-01-19T06:36:32.709805
| 2016-07-31T17:30:10
| 2016-07-31T17:30:10
| 60,718,426
| 3
| 0
| null | 2016-07-12T17:01:53
| 2016-06-08T17:53:48
|
Python
|
UTF-8
|
Python
| false
| false
| 175
|
py
|
# encoding: utf-8
"""
Created by misaka-10032 (longqic@andrew.cmu.edu).
TODO: purpose
"""
import ocd.device
def test_device():
d = ocd.device.Device()
assert True
|
[
"longqicai@gmail.com"
] |
longqicai@gmail.com
|
61cf66905bc0a97d62d0b3be655a9527446f7069
|
ad3e8de09b8a314f4a6d9a59c54a421805776dc5
|
/chapter2_notebook.py
|
a82b3519e837bab532e53b551c2ce2e4337e76e6
|
[] |
no_license
|
georstef/Python_ObjectOrientedProgramming
|
6401a96195238fcc1624c519984e37e0894f92a7
|
6c00f30b31e8754dd18aa45bb42033c4d15b46ae
|
refs/heads/master
| 2020-12-24T17:25:58.577160
| 2015-11-08T19:01:50
| 2015-11-08T19:01:50
| 15,637,218
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,332
|
py
|
import datetime
# global (singleton)
last_id = 0
class Note:
def __init__(self, memo, tags=''):
self.memo = memo
self.tags = tags
self.creation_date = datetime.date.today()
global last_id
last_id += 1
self.id = last_id
def match(self, filter):
'''
returns true if filter exists in memo or tags
'''
return (filter in self.memo) or (filter in self.tags)
class Notebook:
def __init__(self):
self.notes = []
def new_note(self, memo, tags = ''):
self.notes.append(Note(memo, tags))
def find_note(self, note_id):
for note in self.notes:
if str(note.id) == str(note_id):
return note
return None
def modify_memo(self, note_id, memo):
try:
self.find_note(note_id).memo = memo
return True
except AttributeError:
print('Note not found.')
return False
def modify_tags(self, note_id, tags):
try:
self.find_note(note_id).tags = tags
return True
except AttributeError:
print('Note not found.')
return False
def search(self, filter):
return [note for note in self.notes if note.match(filter)]
|
[
"georstef@gmail.com"
] |
georstef@gmail.com
|
2d5465cffaec39f1e38615d0dc066898635e8dfc
|
7775a073201f568022bbb4ed3d04cb1639ae5e65
|
/AtguiguShop/apps/trade/migrations/0001_initial.py
|
e2f1b9b40d2d911ffa6afe615e9928b8c1f80b8c
|
[] |
no_license
|
a289237642/rest-api
|
25db777fa1ca85fee77f86b8ae92d3656ce2ef40
|
fd2c70245cb12212dcd2fd8899f789c3e41d1af2
|
refs/heads/master
| 2020-04-24T14:12:13.312539
| 2019-03-14T08:41:27
| 2019-03-14T08:41:27
| 172,012,413
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,218
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-18 11:28
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('goods', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='OrderGoods',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('goods_num', models.IntegerField(default=0, verbose_name='商品数量')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
],
options={
'verbose_name_plural': '订单商品详情',
'verbose_name': '订单商品详情',
},
),
migrations.CreateModel(
name='OrderInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_sn', models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='订单号')),
('trade_no', models.CharField(blank=True, max_length=100, null=True, unique=True, verbose_name='交易号')),
('pay_status', models.CharField(blank=True, choices=[('PAYING', '待支付'), ('TRADE_SUCESS', '支付成功'), ('TRADE_CLOSE', '支付关闭'), ('TRADE_FAIL', '支付失败'), ('TRADE_FINSHED', '交易结束')], default='PAYING', max_length=30, null=True, verbose_name='订单状态')),
('post_script', models.CharField(max_length=200, verbose_name='订单留言')),
('order_mount', models.FloatField(default=0.0, verbose_name='订单金额')),
('pay_time', models.DateTimeField(blank=True, null=True, verbose_name='支付时间')),
('signer_name', models.CharField(max_length=30, verbose_name='签收人')),
('signer_mobile', models.CharField(max_length=11, verbose_name='联系电话')),
('address', models.CharField(max_length=200, verbose_name='收货地址')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
],
options={
'verbose_name_plural': '订单',
'verbose_name': '订单',
},
),
migrations.CreateModel(
name='ShopingCart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nums', models.IntegerField(default=0, verbose_name='商品数量')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
('goods', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.Goods', verbose_name='商品')),
],
options={
'verbose_name_plural': '购物车',
'verbose_name': '购物车',
},
),
]
|
[
"a289237642@163.com"
] |
a289237642@163.com
|
4c9ab3dc477a8746e0cb8fbe150a54692002b6d8
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5686275109552128_1/Python/SiaKateb/B.py
|
6a3e7e9c2dee218a9b3e27a1dcc9f988f0fd6609
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992
| 2017-05-23T09:23:38
| 2017-05-23T09:23:38
| 84,313,442
| 2
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 286
|
py
|
__author__ = 'siavash'
T = input()
for t in range(T):
D = input()
P = list(map(int, raw_input().split(" ")))
ans = max(P)
Z = 2
while Z < ans:
ans = min(ans, sum([(x - 1) // Z for x in P]) + Z)
Z += 1
print 'Case #{0}: {1}'.format(t + 1, ans)
|
[
"eewestman@gmail.com"
] |
eewestman@gmail.com
|
a0c388980789e8279aead795a5456b5d20c71d8c
|
3bae1ed6460064f997264091aca0f37ac31c1a77
|
/apps/cloud_api_generator/generatedServer/tasklets/disk/getXMLSchema/disk_getXMLSchema.py
|
7fb7c7b36663e1883b9bbf6c12744ce01caf7615
|
[] |
no_license
|
racktivity/ext-pylabs-core
|
04d96b80ac1942754257d59e91460c3a141f0a32
|
53d349fa6bee0ccead29afd6676979b44c109a61
|
refs/heads/master
| 2021-01-22T10:33:18.523799
| 2017-06-08T09:09:28
| 2017-06-08T09:09:28
| 54,314,984
| 0
| 0
| null | 2017-06-08T09:09:29
| 2016-03-20T11:55:01
|
Python
|
UTF-8
|
Python
| false
| false
| 180
|
py
|
__author__ = 'aserver'
__tags__ = 'disk', 'getXMLSchema'
__priority__= 3
def main(q, i, params, tags):
params['result'] = ''
def match(q, i, params, tags):
return True
|
[
"devnull@localhost"
] |
devnull@localhost
|
adbdceca5ed229f3715c5b02449a90573cf11f36
|
673e829dda9583c8dd2ac8d958ba1dc304bffeaf
|
/data/multilingual/Latn.MCD/Serif_12/pdf_to_json_test_Latn.MCD_Serif_12.py
|
e2a6c3fafba9bcc5590f3fd798d4299201704f19
|
[
"BSD-3-Clause"
] |
permissive
|
antoinecarme/pdf_to_json_tests
|
58bab9f6ba263531e69f793233ddc4d33b783b7e
|
d57a024fde862e698d916a1178f285883d7a3b2f
|
refs/heads/master
| 2021-01-26T08:41:47.327804
| 2020-02-27T15:54:48
| 2020-02-27T15:54:48
| 243,359,934
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 305
|
py
|
import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.MCD/Serif_12/udhr_Latn.MCD_Serif_12.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
|
[
"antoine.carme@laposte.net"
] |
antoine.carme@laposte.net
|
9bd9447b16887d8c853d33373cfe76d2145cc3ee
|
1ba2f5282084f967aed5df2f614b79366ea8070c
|
/sort/bubble_sort/bubble_sort.py
|
bb27adb5aa11000f9146b1eae2954c22a32fddb3
|
[] |
no_license
|
rohitmi0023/cs_programs
|
fcdb45a7ff3291c6fa0b44cfdbfd76d826bd805f
|
7396daf0c2a65574da674c6dfef91a09138034ac
|
refs/heads/master
| 2023-07-17T08:47:54.806128
| 2021-09-01T16:36:52
| 2021-09-01T16:36:52
| 205,328,085
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 805
|
py
|
def bubble_sort_func(lists):
# In total we have to make length - 1 iterations
for i in range(1, len(lists)):
# In an iteration we have to further make length - i iterations
for j in range(0, len(lists) - i):
# Comparing the current and the next element
if lists[j] > lists[j + 1]:
# swapping if the current element is bigger than the next one
lists[j], lists[j + 1] = lists[j + 1], lists[j]
return lists
# User defined length of the list
SIZE = input("Enter the number of elements ")
# Creating an empty list
LISTS = []
for k in range(0, SIZE):
element = input('Enter the number: ')
# appending each user defined value into the list
LISTS.append(element)
print("Sorted array is: ", bubble_sort_func(LISTS))
|
[
"you@example.com"
] |
you@example.com
|
53d3bdc39fa2dc67b61a5ac0caf041498f56ccdb
|
6206620e1a20c2d8847e63be974f90408c0cfa3c
|
/Advanced_python/polymorphism/method_overriding.py
|
1bedabce0838dc15938ad1f2989ed30fdd8f7934
|
[] |
no_license
|
vinuv296/luminar_python_programs
|
c1eb4f91201634c999b427dd13b79968486e9e9e
|
08c078e25c63b71e97c60a6b2ddd3911e2c915d2
|
refs/heads/master
| 2023-04-05T05:02:47.023113
| 2021-04-19T10:18:18
| 2021-04-19T10:18:18
| 358,550,878
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 207
|
py
|
class Parent:
def properties(self):
print("10 lakh rs,2 Car")
def mary(self):
print("with raju")
class Child(Parent):
def mary(self):
print("With Gopi")
c=Child()
c.mary()
|
[
"vinuv296@gmail.com"
] |
vinuv296@gmail.com
|
b8dbb70f4b3a26af37196a9a463f6061d6e2e864
|
bfa4447ec5e92017aec95ee6d349d91b5733afca
|
/test/test_assign/files/otter-correct/student/tests/q1.py
|
bf7a73d9e1cf2f82d664f62d60f74fae56746002
|
[
"BSD-3-Clause"
] |
permissive
|
ucbds-infra/otter-grader
|
4020c14614fc62a93ce564c6b8ad88269defac97
|
e6ece6b53ef2291f2724ff9965f09d910ad10e7e
|
refs/heads/master
| 2023-08-23T22:46:15.793814
| 2023-08-18T21:53:52
| 2023-08-18T21:53:52
| 208,363,438
| 112
| 62
|
BSD-3-Clause
| 2023-09-12T00:01:41
| 2019-09-13T23:40:57
|
Python
|
UTF-8
|
Python
| false
| false
| 526
|
py
|
OK_FORMAT = True
test = { 'name': 'q1',
'points': 3.75,
'suites': [ { 'cases': [ {'code': '>>> isinstance(x, int)\nTrue', 'hidden': False, 'locked': False},
{'code': '>>> None\n', 'hidden': False, 'locked': False},
{'code': '>>> 0 < x < 100\nTrue', 'hidden': False, 'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
|
[
"cpyles@berkeley.edu"
] |
cpyles@berkeley.edu
|
ed2e5a15b0308028be8c4dcf2f25b68e7a37e18a
|
771b8d8b60783ed8181de344e418691bd2cf882d
|
/nb_hook/migrations/0001_initial.py
|
18adea7e3bab930b9cade0de8bcf78deae8ae901
|
[] |
no_license
|
mozilla/mozilla_webhook_sync
|
3c99eaa43e36e21b5a55e95c9e75c613fbef6aaa
|
8e955e1f6416bbb4e04246f0bbc67acab6e73af3
|
refs/heads/master
| 2023-09-03T22:53:22.737607
| 2017-11-06T11:22:40
| 2017-11-06T11:22:40
| 66,299,225
| 0
| 0
| null | 2016-10-18T21:54:49
| 2016-08-22T18:48:53
|
Python
|
UTF-8
|
Python
| false
| false
| 679
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-19 22:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='TestHook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('content', models.TextField(blank=True, null=True)),
],
),
]
|
[
"walter@fissionstrategy.com"
] |
walter@fissionstrategy.com
|
7a0b281c0707828a0e8c7afce58a3cbce2546ca8
|
faf793376991092615975a559c6bed4e093acc44
|
/SECTION 25 first step with Django/180 to install Django.py
|
0ce11cffde7701e2493656f21e382c11f53f4e95
|
[] |
no_license
|
jdiaz-dev/practicing-python
|
2385f2541759cfc9ed221b62030c28e8cf6ddde4
|
139b7dd4332e9ab3dd73abee0308cff41f4657fe
|
refs/heads/master
| 2023-04-05T06:13:53.590830
| 2023-03-19T16:06:00
| 2023-03-19T16:06:00
| 320,443,146
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 771
|
py
|
"""
--there are many ways to install Django
--first: podemos montar un entorno virual de Python, que permite saltar de una version a otra de python y saltar de otra version a otra de Django, esto esta bien; pero la instalación se complicaría, y no es necesario para una entorno local de desarrollo
--la instalación de Django se hace directamente en el sistema
command to instal Django:
py -m pip install Django==3.1.4
to check version of Django
py -m django --version
to check disponible commands
py manage.py help
to migrate the project
py manage.py migrate
--it generate a database sqlite with functionalities by default of django
to run server
py manage.py runserver
"""
|
[
"lordgelsin26@gmail.com"
] |
lordgelsin26@gmail.com
|
4ce3a8efd95d236d73a447758148f57878a4bfdb
|
ebc7c4d82eed374060bf6bbc7df76930412ba26a
|
/plait/api.py
|
98476b79c0cb686fc7f39bb0936a197961f50a2a
|
[
"MIT"
] |
permissive
|
dustinlacewell/plait
|
6878b2124069373fd2cafdcf5ba1ca628bda64ec
|
b57bc353298401af41b286fbefa6120b236be102
|
refs/heads/master
| 2021-01-10T12:23:34.931115
| 2015-12-04T21:55:30
| 2015-12-04T21:55:30
| 45,318,025
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 980
|
py
|
import inspect
from twisted.internet import reactor
from twisted.internet.threads import blockingCallFromThread as blockingCFT
from plait.task import thread_locals
class RemoteCallError(Exception): pass
def run(cmd, fail=False):
"""
Execute a command on the remote host.
Blocks by calling into the main reactor thread. The result is a CFTResult
object which will contain the stdout of the operation. It will also have
a stderr attribute which if not empty indicates the remote command failed.
"""
worker = thread_locals.worker
# block until result is available or main thread dies
result = blockingCFT(reactor, worker.execFromThread, cmd)
if result.failed and fail:
exception = RemoteCallError(result.stderr)
exception.result = result
stack = inspect.stack()[1]
exception.error = stack[1], stack[2]
raise exception
return result
def sudo(cmd, *args, **kwargs):
return run("sudo " + cmd)
|
[
"dlacewell@gmail.com"
] |
dlacewell@gmail.com
|
6f7783056afc32c59c56147ec14e1af860df6a49
|
b48e2c61292ad9c6621bee95cd70265911d6d636
|
/tests/test_marketstack/test_import.py
|
8e369050c5f3d39e3e813414f40f454211bfeaca
|
[
"BSD-3-Clause"
] |
permissive
|
kvh/snapflow-stocks
|
9f544769b14887338e35df7c6592a035c7bddd6c
|
2531749d86b9ca8a47b3443605d6a9f69c219a03
|
refs/heads/master
| 2023-06-04T15:47:59.157490
| 2021-06-18T04:15:22
| 2021-06-18T04:15:22
| 320,654,176
| 1
| 0
|
BSD-3-Clause
| 2020-12-15T20:08:19
| 2020-12-11T18:33:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,807
|
py
|
import os
from snapflow import graph, produce
def ensure_api_key() -> str:
api_key = os.environ.get("MARKETSTACK_ACCESS_KEY")
if api_key is not None:
return api_key
api_key = input("Enter Marketstack access key: ")
return api_key
def test_eod():
from snapflow_stocks import module as stocks
api_key = ensure_api_key()
g = graph()
# Initial graph
prices = g.create_node(
stocks.functions.marketstack_import_eod_prices,
params={"access_key": api_key, "tickers": ["AAPL"]},
)
blocks = produce(prices, execution_timelimit_seconds=1, modules=[stocks])
records = blocks[0].as_records()
assert len(records) >= 100
def test_tickers():
from snapflow_stocks import module as stocks
api_key = ensure_api_key()
g = graph()
# Initial graph
tickers = g.create_node(
stocks.functions.marketstack_import_tickers,
params={"access_key": api_key, "exchanges": ["XNAS"]},
)
blocks = produce(tickers, execution_timelimit_seconds=1, modules=[stocks])
records = blocks[0].as_records()
assert len(records) >= 100
def test_tickers_into_eod():
from snapflow_stocks import module as stocks
api_key = ensure_api_key()
g = graph()
# Initial graph
tickers = g.create_node(
stocks.functions.marketstack_import_tickers,
params={"access_key": api_key, "exchanges": ["XNAS"]},
)
prices = g.create_node(
stocks.functions.marketstack_import_eod_prices,
params={"access_key": api_key},
inputs={"tickers_input": tickers},
)
blocks = produce(prices, execution_timelimit_seconds=1, modules=[stocks])
records = blocks[0].as_records()
assert len(records) >= 100
if __name__ == "__main__":
test_tickers_into_eod()
|
[
"kenvanharen@gmail.com"
] |
kenvanharen@gmail.com
|
020896c5168cdb397dd61350967710c264a45466
|
ad3fd0595c60cd10130676203ca695781ba59fa5
|
/RicardoFabbri18022013_2257_gml_fb/scripts/rdfFBEgoGML.py
|
6c096e21a08746b169e5094ef97e3471bd11bb1c
|
[
"CC0-1.0"
] |
permissive
|
labmacambira/fbEgoGML
|
e3d5b499df5ad48613f5071cae8965a5b22b3315
|
3b725b0a6c1998788bcb9b6ebcc966ed4cb083fd
|
refs/heads/master
| 2021-05-30T15:07:38.155574
| 2015-12-11T10:13:15
| 2015-12-11T10:13:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,911
|
py
|
import social as S, percolation as P, os
import importlib
#importlib.reload(P.rdf)
importlib.reload(S)
importlib.reload(S.fb)
importlib.reload(S.fb.read)
importlib.reload(S.fb.gml2rdf)
c=P.utils.check
umbrella_dir="fbEgoGML/"
fpath="./publishing/fb4/"
dpath="../data/fb/gml/"
scriptpath=os.path.realpath(__file__)
fnames_=[
("AntonioAnzoategui18022013_182134.gml",None,"100003608428288","antonio.anzoateguifabbri"),
("BrunoMialich31012013_2126.gml",None,"10000045475708","bruno.mialich"),
("CalebLuporini13042013.gml",None,"1110305437","calebml"),
("CalebLuporini19022013.gml",None,"1110305437","calebml"),
("CamilaBatista23022014.gml",None,"100001707143512","camila.batista.3382"),
("DanielPenalva18022013.gml",None,"100000077490764","barthor.la.zule"),
# ("RafaelReinehr09042013_1148.gml",None,"814059950","reinehr"), #gml better
("GabiThume19022013_0440.gml",None,"100002011676407","gabithume"),
("GrahamForrest28012013.gml",None,0,0),
("LailaManuelle17012013_0258.gml",None,"1713144485","laila.manuelle"),
("LarissaAnzoategui20022013_0207.gml",None,"1760577842","larissa.chogui"),
("LuisCirne07032013.gml",None,"717903828","lufcirne"),
("MariliaMelloPisani10042013_0255.gml",None,"100000812625301","marilia.pisani"),
("Mirtes16052013.gml",None,0,0),
("PedroPauloRocha10032013.gml",None,"836944624","dpedropaulorocha"),
("PeterForrest28012013_1602.gml",None,"770029747","peter.forrest.18"), # ateh aqui ok
("RafaelReinehr09042013_1148.gml",None,"814059950","reinehr"), #gml better
("RamiroGiroldo20022013_0149.gml",None,"100001810878626","ramiro.giroldo"),
("RenatoFabbri03032013.gml",None,"781909429","renato.fabbri"),
("RenatoFabbri11072013.gml",None,"781909429","renato.fabbri"),
("RenatoFabbri18042013.gml",None,"781909429","renato.fabbri"),
("RenatoFabbri20012013.gml",None,"781909429","renato.fabbri"),
("RenatoFabbri29112012_0521.gml",None,"781909429","renato.fabbri"),
("RicardoFabbri18022013_2257.gml",None,"1011765","ricardofabbri"),
("RitaWu08042013.gml",None,"100009639240215",0),
("RonaldCosta12062013.gml",None,"1457302032","scherolt"),
("ThaisTeixeira19022013_062820.gml",None,"100001089120349","thais.t.fabbri"),
("VilsonVieira18022013.gml",None,"529899682","aut0mata"),
("ViniciusSampaio18022013_2050.gml",None,"529899682","sampaio.vinicius"),
]
c("largou")
for fnames in fnames_[22:]:
aa=S.fb.triplifyGML(dpath=dpath,
fname=fnames[0],
fnamei=None,
fpath=fpath,
scriptpath=scriptpath,
uid=fnames[2],
sid=fnames[3],
fb_link=None,
ego=True,
umbrella_dir=umbrella_dir)
|
[
"renato.fabbri@gmail.com"
] |
renato.fabbri@gmail.com
|
b8ed560f29c16f3eb2bbbe66bd280997880c9fad
|
6fa831a9ac84ab220aad1195365640cabf3eeb88
|
/tools/calibrate/min_quad.py
|
10eff68e71089a0ffedda59e1d153540a3f30fe1
|
[] |
no_license
|
d4niele/maia
|
1e68faae43687a4d12f29fff7fe94424e9713da1
|
178087336a32856c5f46e364bf164dc1e229b59d
|
refs/heads/master
| 2020-05-24T16:07:53.666392
| 2019-07-02T22:42:50
| 2019-07-02T22:42:50
| 187,349,363
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 217
|
py
|
import numpy as np
from matplotlib import pyplot as plt
N = 10
xs = np.random.random(N)
ys = np.random.random(N)
trend = np.polyfit(xs,ys,1)
plt.plot(xs,ys,'o')
trendpoly = np.poly1d(trend)
plt.plot(xs,trendpoly(xs))
|
[
"you@example.com"
] |
you@example.com
|
767ade35eef66aad7a14b9a9d020379e637aa45e
|
fd379769378d129ae2f038a01f85391034491d61
|
/python/inversions.py
|
67c0c16bafa80a0b69c63aab4ce145dfca63c857
|
[
"MIT"
] |
permissive
|
drusk/algorithms
|
824593d489904f6efa3ccf2a44fab69aafec9cd4
|
c8bdc1c1aff6386e37c023bf1f4984e5addbcab5
|
refs/heads/master
| 2021-01-10T19:33:45.092215
| 2013-12-17T19:59:54
| 2013-12-17T19:59:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 728
|
py
|
"""
Count inversions in a list of numbers.
"""
__author__ = "David Rusk <drusk@uvic.ca>"
def main():
input_file = ("/home/drusk/Documents/Courses/online/algorithms1/"
"assignments/a1/IntegerArray.txt")
numbers = []
with open(input_file, "rb") as filehandle:
for line in filehandle.readlines():
numbers.append(int(line))
print "Read %d numbers." % len(numbers)
# Just implementing the naive way to test other code
inversions = 0
for index, num in enumerate(numbers):
for other_num in numbers[index + 1:]:
if other_num < num:
inversions += 1
print "Inversions: %d" % inversions
if __name__ == "__main__":
main()
|
[
"drusk@uvic.ca"
] |
drusk@uvic.ca
|
5de26745bb13ad2ae79516eb18e726a7104d168a
|
51d0377511a5da902033fb9d80184db0e096fe2c
|
/30-case-studies-in-statistical-thinking/5-earthquakes-and-oil-mining-in-oklahoma/01-eda-plotting-earthquakes-over-time.py
|
fd64e420d5acd064b26dfb6c353d44084515012c
|
[] |
no_license
|
sashakrasnov/datacamp
|
c28c6bda178163337baed646220b2f7dcc36047d
|
759f4cec297883907e21118f24a3449d84c80761
|
refs/heads/master
| 2021-12-07T02:54:51.190672
| 2021-09-17T21:05:29
| 2021-09-17T21:05:29
| 157,093,632
| 6
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,291
|
py
|
'''
EDA: Plotting earthquakes over time
Make a plot where the y-axis is the magnitude and the x-axis is the time of all earthquakes in Oklahoma between 1980 and the first half of 2017. Each dot in the plot represents a single earthquake. The time of the earthquakes, as decimal years, is stored in the Numpy array time, and the magnitudes in the Numpy array mags.
'''
import numpy as np
import pandas as pd
import dc_stat_think as dcst
import matplotlib.pyplot as plt
df = pd.read_csv('../datasets/oklahoma_earthquakes_1950-2017.csv', comment='#', index_col='time', parse_dates=True, usecols=['time','mag'])
time = np.array([d.timestamp() / 31556925.9747 + 1970 for d in df['1980-01':'2017-06'].index.to_pydatetime()])
mags = df['1980-01':'2017-06'].mag.values
'''
INSTRUCTIONS
* Plot the magnitude (mags) versus time (time) using plt.plot() with keyword arguments marker='.' and linestyle='none'. Also use the keyword argument alpha=0.1 to make the points transparent to better visualize overlapping points.
* Label the x-axis 'time (year)', y-axis 'magnitude', and show the plot.
'''
# Plot time vs. magnitude
_ = plt.plot(time, mags, marker='.', linestyle='none', alpha=0.1)
# Label axes and show the plot
_ = plt.xlabel('time (year)')
_ = plt.ylabel('magnitude')
plt.show()
|
[
"a@skrasnov.com"
] |
a@skrasnov.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.