blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2a190607625a690d7574afa6ac51c511908c664d
|
2ddc456deb713a182692f04a0f58b6219c683c3d
|
/manage.py
|
83e6cb6b4ad6924c454de15cd1c2a04e74b6ca2b
|
[] |
no_license
|
rabithakuri/blog
|
feabb6ffcac216abd6c081544f95dd46d773a76e
|
59af4161fa751b7c5b49ac97100b3175f2e32d44
|
refs/heads/master
| 2023-01-03T17:11:53.568997
| 2020-10-28T03:44:41
| 2020-10-28T03:44:41
| 307,582,816
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 655
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'intern_project.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"you@example.com"
] |
you@example.com
|
c5ec3cebc6acd55c78f200d64e7195209a6a3380
|
8eeef634531bddfd99bad995517a6dc2750e5815
|
/tests/TestServidorHTTPConfiguracion.py
|
4d385d32ef1d5c2c8e778218724c9a2373bb1195
|
[] |
no_license
|
GabrielMartinMoran/balizaIntegracionContinua
|
99c882b9376520d832ef72c3e98194835aaf1b37
|
7f82c917793716fef30d30d0cd0a2d98ffda6d47
|
refs/heads/master
| 2021-06-08T14:14:44.112718
| 2021-04-27T18:06:17
| 2021-04-27T18:06:17
| 153,819,687
| 0
| 0
| null | 2021-04-27T18:07:03
| 2018-10-19T17:37:59
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,808
|
py
|
#----------------------------- IMPORTAMOS EL DIRECTORIO src ---------------------------
import os
import sys
#IMPORTAMOS DEL PADRE
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../src')))
#--------------------------------------------------------------------------------------
import unittest
import requests
from ServidorHTTPConfiguracion import ServidorHTTPConfiguracion
from ConfiguracionCI import ConfiguracionCI
from ConfiguracionRed import ConfiguracionRed
HOST = "localhost"
PUERTO_1 = 8081
PUERTO_2 = 8082
class TestServidorHTTPConfiguracion(unittest.TestCase):
def test_establecemos_una_configuracion_para_travis_y_corroboramos(self):
configuracion_ci = ConfiguracionCI()
servidor = ServidorHTTPConfiguracion(HOST, PUERTO_1, configuracion_ci, None)
response = requests.get("http://"+HOST+":"+str(PUERTO_1)+"/set_configuracion_ci?usuario=USUARIO&repositorio=REPOSITORIO&token=TOKEN&APIurl=http://test.url&servidorCI=Travis")
self.assertEqual("USUARIO", configuracion_ci.get_usuario())
self.assertEqual("REPOSITORIO", configuracion_ci.get_repositorio())
self.assertEqual("TOKEN", configuracion_ci.get_token())
servidor.detener()
def test_establecemos_una_configuracion_de_red_y_corroboramos(self):
configuracion_red = ConfiguracionRed()
servidor = ServidorHTTPConfiguracion(HOST, PUERTO_2, None, configuracion_red)
response = requests.get("http://"+HOST+":"+str(PUERTO_2)+"/set_configuracion_red?SSID=SSID&clave=CLAVE")
self.assertEqual("SSID", configuracion_red.get_SSID())
self.assertEqual("CLAVE", configuracion_red.get_clave())
servidor.detener()
def main():
unittest.main()
if __name__ == '__main__':
main()
|
[
"moran.gabriel.95@gmail.com"
] |
moran.gabriel.95@gmail.com
|
22b1198f010dde12d103a5f65f822ee6e9dd0675
|
e9fade80d627161ace797e6e80c54e328da4cf46
|
/issure_tracker/accounts/migrations/0001_initial.py
|
468da38e15e4ee1e580be4f953070a92552736f3
|
[] |
no_license
|
nurbekov0001/tracer
|
1d48558f4064fd8c8382c16c57b11dbef21554e9
|
3daff8790efd87f99459d9ab9824960a0b8159a8
|
refs/heads/master
| 2023-04-17T16:06:01.446028
| 2021-04-22T10:04:48
| 2021-04-22T10:04:48
| 346,675,253
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,367
|
py
|
# Generated by Django 3.1.7 on 2021-04-16 13:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bird_date', models.DateField(blank=True, null=True, verbose_name='Дата рождения')),
('link', models.URLField(blank=True, null=True, verbose_name='Сылка на GitHub')),
('avatar', models.ImageField(blank=True, null=True, upload_to='user_pics', verbose_name='Аватар')),
('description', models.TextField(blank=True, max_length=2000, null=True, verbose_name='Полное описание')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL, verbose_name='Ползователь')),
],
options={
'verbose_name': 'Профиль',
'verbose_name_plural': 'Профили',
},
),
]
|
[
"nurbekovbekbolot090501@gmail.com"
] |
nurbekovbekbolot090501@gmail.com
|
433b32bb5daca94ed59dd9026ce5d90ddf8b386b
|
a230088db9185a549fc63db351cc3558fbf78fe8
|
/tests/gallery/test_gallery.py
|
147c5bae1edb6779c17447d57a4b498268546f16
|
[
"MIT"
] |
permissive
|
ciphertechsolutions/construct
|
7cebb8ec1634fafcbd1d89af3772fdcbec9509c3
|
e75adbee3cc2bc3fc0326166d623566bf4358424
|
refs/heads/master
| 2020-05-17T06:06:49.291119
| 2019-07-04T01:49:02
| 2019-07-04T01:49:02
| 183,551,585
| 0
| 0
|
NOASSERTION
| 2019-10-19T22:37:53
| 2019-04-26T03:38:32
|
Python
|
UTF-8
|
Python
| false
| false
| 286
|
py
|
from declarativeunittest import *
from construct import *
from construct.lib import *
from gallery import pe32file
def test_pe32():
commondump(pe32file, "python37-win32.exe")
commondump(pe32file, "python37-win64.exe")
commondump(pe32file, "SharpZipLib0860-dotnet20.dll")
|
[
"arek.bulski@gmail.com"
] |
arek.bulski@gmail.com
|
513f6a85f513250674a0329b3e5b13a71f2ada85
|
77741ac3384cf80ba9f5684f896e4b6500064582
|
/PycharmProjects/继承/09-super().py
|
a9a8a4081e86dbcffdb4377fb61750a70e521906
|
[
"MIT"
] |
permissive
|
jiankangliu/baseOfPython
|
9c02763b6571596844ee3e690c4d505c8b95038d
|
a10e81c79bc6fc3807ca8715fb1be56df527742c
|
refs/heads/master
| 2020-05-09T12:11:02.314281
| 2019-04-13T01:17:24
| 2019-04-13T01:17:24
| 181,104,243
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,658
|
py
|
class Master(object):
def __init__(self):
self.kongfu = '[古法煎饼果子配方]'
def make_cake(self):
print(f'师傅运用{self.kongfu}制作煎饼果子')
# def xx(self):
# print('aaaaa')
# 黑马学校类
class School(Master):
def __init__(self):
self.kongfu = '[黑马煎饼果子配方]'
def make_cake(self):
print(f'学校运用{self.kongfu}制作煎饼果子')
# 再用super调用这个类的父类方法
# super(School, self).__init__()
# super(School, self).make_cake()
super().__init__()
super().make_cake()
# 如果一个子类继承了多个父类,如果有同名方法或属性,默认继承书写在括号里面第一个父类的同名属性和方法
class Prentice(School):
def __init__(self):
self.kongfu = '[原创煎饼果子配方]'
def make_cake(self):
# 做自己的初始化调用: 下面已经执行过其他父类的初始化,这里需要还原成自己
self.__init__()
print(f'大秋运用{self.kongfu}制作煎饼果子')
def make_super_cake(self):
# 调用父类方法
# super(自己类的类名, self).目标函数() -- 化简写法就是去掉所有参数,默认它自己可以填充目标参数
# super(Prentice, self).__init__()
# super(Prentice, self).make_cake()
super().__init__()
super().make_cake()
# laowang = Master()
# print(laowang.kongfu)
# laowang.make_cake()
daqiu = Prentice()
daqiu.make_super_cake()
# 有个顾客想一次性吃所有
|
[
"xwp_fullstack@163.com"
] |
xwp_fullstack@163.com
|
79435fffffd54d4dd35c5ea37db3e9fa998944b7
|
99ada05e0088a8e93400b245c02fb0b28ef91a2d
|
/api_v1/containers/shop/views.py
|
90df9332a7860db17f2d5387f1c797548f3e16de
|
[
"MIT"
] |
permissive
|
eric-scott-owens/loopla
|
789fdf128393c29ced808b10e98eb55d5a0ed882
|
1fd5e6e7e9907198ff904111010b362a129d5e39
|
refs/heads/master
| 2022-12-12T17:30:44.373305
| 2019-08-01T06:17:05
| 2019-08-01T06:17:05
| 199,980,906
| 0
| 0
|
MIT
| 2022-12-11T00:23:28
| 2019-08-01T05:08:43
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 6,925
|
py
|
import copy
from datetime import datetime
from django.contrib.auth.models import User
from django.conf import settings
from django.db import transaction
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
import stripe
from shop.models import Order, OrderStatusTransition, OrderItem, Kudos, KudosAvailable, CatalogItem
from api_v1.containers.shop.serializers import CreateNewOrderSerializer, OrderSerializer, ProcessOrderSerializer
from users.models import Person
@api_view(['POST'])
def create_order(request):
data = copy.deepcopy(request.data)
request_serializer = CreateNewOrderSerializer(data=data)
request_serializer.is_valid(raise_exception=True)
user = request.user
person = Person.objects.get(user=user)
try:
# Create the initial order
items = []
for order_item in data['order_items']:
items.append({
"type": 'sku',
"parent": order_item['parent'],
"quantity": order_item['quantity']
})
stripe.api_key = settings.STRIPE_PRIVATE_KEY
stripe_order = stripe.Order.create(
currency='usd',
items=items,
shipping={
"name": '%s %s' % (user.first_name, user.last_name),
"address":{
"line1": person.address_line_1,
"city": person.city,
"state": person.state,
"country": 'US',
"postal_code": person.zipcode
},
},
email= user.email
)
# Store the order data in our database
order = Order(
order_id = stripe_order.id,
user = user,
amount = stripe_order.amount,
email = stripe_order.email,
status = stripe_order.status,
created = datetime.fromtimestamp(stripe_order.created),
updated = datetime.fromtimestamp(stripe_order.updated)
)
order.currency = stripe_order.currency
order.save()
order_status = OrderStatusTransition(
order = order,
status = stripe_order.status,
created = datetime.fromtimestamp(stripe_order.updated)
)
order_status.save()
for item in stripe_order['items']:
order_item = OrderItem(
order = order,
amount = item['amount'],
description = item['description'],
parent = item['parent'],
quantity = item['quantity'],
item_type = item['type']
)
order_item.currency = item.currency,
order_item.save()
updated_order = Order.objects.get(order_id=order.order_id)
response_serializer = OrderSerializer(updated_order)
return Response(response_serializer.data, status=status.HTTP_201_CREATED)
except Exception as e:
return Response(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@api_view(['POST'])
def process_order(request):
data = copy.deepcopy(request.data)
request_serializer = ProcessOrderSerializer(data=data)
request_serializer.is_valid(raise_exception=True)
try:
user = request.user
order = Order.objects.get(order_id=data['order_id'])
stripe_token = data['stripe_token']
if order.user != user:
return Response(status=status.HTTP_403_FORBIDDEN)
# Pay the order
stripe.api_key = settings.STRIPE_PRIVATE_KEY
stripe_order = stripe.Order.pay(order.order_id, source=stripe_token)
# Record the results
order.status = stripe_order.status
order.updated = datetime.fromtimestamp(stripe_order.updated)
order.charge = stripe_order.charge
order.save()
order_status = OrderStatusTransition(
order = order,
status = stripe_order.status,
created = datetime.fromtimestamp(stripe_order.updated)
)
order_status.save()
# Update Available Kudos
kudos_types_ordered = {} # Dictionary keyed by kudos_id and valued by count of that kudos type ordered
order_items = OrderItem.objects.filter(order_id=order.order_id)
for item in order_items:
if item.item_type == 'sku':
catalog_item = CatalogItem.objects.get(sku=item.parent)
# collect the kudos we need to make available
for collection in catalog_item.collection.all():
for kudos_id in collection.kudos_ids:
################################################################################
## This section should be identical to the handling for the kudos bellow
################################################################################
# if the kudos dictionary entry does not exists, add it with a count of quantity
if kudos_id not in kudos_types_ordered:
kudos_types_ordered[kudos_id] = item.quantity
else:
# else, increment the counter
kudos_types_ordered[kudos_id] = kudos_types_ordered[kudos_id] + item.quantity
################################################################################
## End section
for kudos in catalog_item.kudos.all():
kudos_id = str(kudos.id)
################################################################################
## This section should be identical to the handling for the collections above
################################################################################
# if the kudos dictionary entry does not exists, add it with a count of quantity
if kudos_id not in kudos_types_ordered:
kudos_types_ordered[kudos_id] = item.quantity
else:
# else, increment the counter
kudos_types_ordered[kudos_id] = kudos_types_ordered[kudos_id] + item.quantity
################################################################################
## End section
# Update ordering counts and available kudos
for kudos_id in kudos_types_ordered:
number_sold_now = kudos_types_ordered[kudos_id]
kudos_number_sold = None
updated_kudos = None
# Updated kudos.number_sold
with transaction.atomic():
kudos = Kudos.objects.get(id=kudos_id)
kudos.number_sold = kudos.number_sold + number_sold_now
kudos.save()
updated_kudos = Kudos.objects.get(id=kudos_id)
kudos_number_sold = updated_kudos.number_sold
# Start populating available kudos
edition_number = kudos_number_sold - number_sold_now
while edition_number < kudos_number_sold:
edition_number = edition_number + 1
kudos_available = KudosAvailable(
user = user,
kudos = updated_kudos,
edition_number = edition_number,
order = order
)
kudos_available.save()
updated_order = Order.objects.get(order_id=order.order_id)
response_serializer = OrderSerializer(updated_order)
return Response(response_serializer.data, status=status.HTTP_202_ACCEPTED)
except Exception as e:
return Response(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
[
"eric.owens@loopla.com"
] |
eric.owens@loopla.com
|
67c01605dd097ec955b09e05c3ffb60a960ea937
|
69a415b34e14537b12f1fc4975360a45ea86af39
|
/app/migrations/0059_auto_20210323_1537.py
|
c3494a4b0a778a68824d8194da53a3af538a1d30
|
[] |
no_license
|
imagilex/sosadelbosque
|
36aa0a4fcec737717e58ce4ae6cc195c679c8b19
|
ae52203213cea278690dc13cde60377775c2ef62
|
refs/heads/master
| 2023-05-15T09:26:20.842589
| 2023-03-07T21:33:27
| 2023-03-07T21:33:27
| 170,960,865
| 0
| 1
| null | 2023-04-30T11:23:33
| 2019-02-16T04:05:38
|
Python
|
UTF-8
|
Python
| false
| false
| 2,802
|
py
|
# Generated by Django 3.0.7 on 2021-03-23 15:37
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('initsys', '0003_auto_20190723_1358'),
('app', '0058_auto_20210315_1901'),
]
operations = [
migrations.AlterModelOptions(
name='tmpreportecontrolrecepcion',
options={'ordering': ['-fecha_de_ultimo_contacto']},
),
migrations.AddField(
model_name='tmpreportecontrolinscritosmod40',
name='autor',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='initsys.Usr'),
preserve_default=False,
),
migrations.AddField(
model_name='tmpreportecontrolinscritosmod40detalle',
name='autor',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='initsys.Usr'),
preserve_default=False,
),
migrations.AddField(
model_name='tmpreportecontrolpatronsustituto',
name='autor',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='initsys.Usr'),
preserve_default=False,
),
migrations.AddField(
model_name='tmpreportecontrolpatronsustitutodetalle',
name='autor',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='initsys.Usr'),
preserve_default=False,
),
migrations.AddField(
model_name='tmpreportecontrolproximopensionmod40',
name='autor',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='initsys.Usr'),
preserve_default=False,
),
migrations.AddField(
model_name='tmpreportpensionesenproceso',
name='autor',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='initsys.Usr'),
preserve_default=False,
),
migrations.AddField(
model_name='tmpreportpensionesenprocesodetalle',
name='autor',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='initsys.Usr'),
preserve_default=False,
),
migrations.AddField(
model_name='tmpreporttramitesycorrecciones',
name='autor',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='initsys.Usr'),
preserve_default=False,
),
]
|
[
"maths.unam@gmail.com"
] |
maths.unam@gmail.com
|
3ded5cc16aa18ec2b0aadcdc17047b5871c455f3
|
b8151327e53471c48679908bad4f80e26e4de056
|
/Datasets/us_cropland.py
|
a107245f114533b4d17d70eb4ef833a848cd5143
|
[
"MIT"
] |
permissive
|
edencfc/earthengine-py-notebooks
|
5d91b4e1e3773742890a7498b0e19354b8ed02b5
|
f37adeffc40574f2de82efc9e8103a9c7f918585
|
refs/heads/master
| 2021-04-03T21:29:11.948772
| 2020-03-17T12:41:30
| 2020-03-17T12:41:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,133
|
py
|
'''
<table class="ee-notebook-buttons" align="left">
<td><a target="_blank" href="https://github.com/giswqs/earthengine-py-notebooks/tree/master/Datasets/us_cropland.ipynb"><img width=32px src="https://www.tensorflow.org/images/GitHub-Mark-32px.png" /> View source on GitHub</a></td>
<td><a target="_blank" href="https://nbviewer.jupyter.org/github/giswqs/earthengine-py-notebooks/blob/master/Datasets/us_cropland.ipynb"><img width=26px src="https://upload.wikimedia.org/wikipedia/commons/thumb/3/38/Jupyter_logo.svg/883px-Jupyter_logo.svg.png" />Notebook Viewer</a></td>
<td><a target="_blank" href="https://mybinder.org/v2/gh/giswqs/earthengine-py-notebooks/master?filepath=Datasets/us_cropland.ipynb"><img width=58px src="https://mybinder.org/static/images/logo_social.png" />Run in binder</a></td>
<td><a target="_blank" href="https://colab.research.google.com/github/giswqs/earthengine-py-notebooks/blob/master/Datasets/us_cropland.ipynb"><img src="https://www.tensorflow.org/images/colab_logo_32px.png" /> Run in Google Colab</a></td>
</table>
'''
# %%
'''
## Install Earth Engine API
Install the [Earth Engine Python API](https://developers.google.com/earth-engine/python_install) and [geehydro](https://github.com/giswqs/geehydro). The **geehydro** Python package builds on the [folium](https://github.com/python-visualization/folium) package and implements several methods for displaying Earth Engine data layers, such as `Map.addLayer()`, `Map.setCenter()`, `Map.centerObject()`, and `Map.setOptions()`.
The following script checks if the geehydro package has been installed. If not, it will install geehydro, which automatically install its dependencies, including earthengine-api and folium.
'''
# %%
import subprocess
try:
import geehydro
except ImportError:
print('geehydro package not installed. Installing ...')
subprocess.check_call(["python", '-m', 'pip', 'install', 'geehydro'])
# %%
'''
Import libraries
'''
# %%
import ee
import folium
import geehydro
# %%
'''
Authenticate and initialize Earth Engine API. You only need to authenticate the Earth Engine API once.
'''
# %%
try:
ee.Initialize()
except Exception as e:
ee.Authenticate()
ee.Initialize()
# %%
'''
## Create an interactive map
This step creates an interactive map using [folium](https://github.com/python-visualization/folium). The default basemap is the OpenStreetMap. Additional basemaps can be added using the `Map.setOptions()` function.
The optional basemaps can be `ROADMAP`, `SATELLITE`, `HYBRID`, `TERRAIN`, or `ESRI`.
'''
# %%
Map = folium.Map(location=[40, -100], zoom_start=4)
Map.setOptions('HYBRID')
# %%
'''
## Add Earth Engine Python script
'''
# %%
dataset = ee.ImageCollection('USDA/NASS/CDL') \
.filter(ee.Filter.date('2017-01-01', '2018-12-31')) \
.first()
cropLandcover = dataset.select('cropland')
Map.setCenter(-100.55, 40.71, 4)
Map.addLayer(cropLandcover, {}, 'Crop Landcover')
# %%
'''
## Display Earth Engine data layers
'''
# %%
Map.setControlVisibility(layerControl=True, fullscreenControl=True, latLngPopup=True)
Map
|
[
"giswqs@gmail.com"
] |
giswqs@gmail.com
|
39ceedaebdc2ac1eece9c5118a1b8e68f0d01460
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_352/ch19_2020_03_24_01_38_47_185100.py
|
ac0a448a458fe9fddfb5f3d65c33f54c47e13d4b
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 185
|
py
|
def classifica_triangulo(x, y, z):
if x==y and y==z:
return "equilátero"
elif x!=y or x!=z or z!=y:
return "isóseles"
else:
return "escaleno"
|
[
"you@example.com"
] |
you@example.com
|
b75dbfb5879fc58f82f014ed3d954cd18d86fdf8
|
66e45a2760db8a1fc580689586806c2e3cce0517
|
/pymontecarlo/options/beam/gaussian.py
|
f899c96d4c539645036e125a335c60688599a596
|
[] |
no_license
|
arooney/pymontecarlo
|
4b5b65c88737de6fac867135bc05a175c8114e48
|
d2abbb3e9d3bb903ffec6dd56472470e15928b46
|
refs/heads/master
| 2020-12-02T18:01:42.525323
| 2017-05-19T16:44:30
| 2017-05-19T16:44:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,600
|
py
|
"""
Gaussian beam.
"""
# Standard library modules.
# Third party modules.
# Local modules.
from pymontecarlo.options.beam.cylindrical import \
CylindricalBeam, CylindricalBeamBuilder
from pymontecarlo.options.particle import Particle
# Globals and constants variables.
class GaussianBeam(CylindricalBeam):
def __init__(self, energy_eV, diameter_m, particle=Particle.ELECTRON,
x0_m=0.0, y0_m=0.0):
"""
Creates a new Gaussian beam.
A Gaussian beam is a two dimensional beam where the particles are
distributed following a 2D-Gaussian distribution.
:arg energy_eV: initial energy of the particle(s)
:type energy_eV: :class:`float`
:arg diameter_m: diameter of the beam.
The diameter corresponds to the full width at half maximum (FWHM) of
a two dimensional Gaussian distribution.
:type diameter_m: :class:`float`
:arg particle: type of particles [default: :data:`.ELECTRON`]
:type particle: :mod:`.particle`
:arg x0_m: initial x position where the beam first intersects the sample
:type x0_m: :class:`float`
:arg y0_m: initial y position where the beam first intersects the sample
:type y0_m: :class:`float`
"""
super().__init__(energy_eV, diameter_m, particle, x0_m, y0_m)
class GaussianBeamBuilder(CylindricalBeamBuilder):
def _create_beam(self, energy_eV, diameter_m, particle, x0_m, y0_m):
return GaussianBeam(energy_eV, diameter_m, particle, x0_m, y0_m)
|
[
"philippe.pinard@gmail.com"
] |
philippe.pinard@gmail.com
|
00da945565f09b900f74aadad32813c82c145c2f
|
6b5431368cb046167d71c1f865506b8175127400
|
/challenges/filtra-positivos/tests.py
|
c5012357f4ede037d68c5541b1104ebaf7a60f7b
|
[] |
no_license
|
Insper/design-de-software-exercicios
|
e142f4824a57c80f063d617ace0caa0be746521e
|
3b77f0fb1bc3d76bb99ea318ac6a5a423df2d310
|
refs/heads/master
| 2023-07-03T12:21:36.088136
| 2021-08-04T16:18:03
| 2021-08-04T16:18:03
| 294,813,936
| 0
| 1
| null | 2021-08-04T16:18:04
| 2020-09-11T21:17:24
|
Python
|
UTF-8
|
Python
| false
| false
| 401
|
py
|
from strtest import str_test
class TestCase(str_test.TestCaseWrapper):
TIMEOUT = 2
def test_1(self):
entradas = [[-1, -2, -3],[-1, -2, -3, 0, 1, 2],[0, -1, 1, -2, 2]]
esperados = [[], [1, 2], [1, 2]]
for entrada, esperado in zip(entradas, esperados):
self.assertEqual(esperado, self.function(entrada), 'Não funcionou para a entrada {0}'.format(entrada))
|
[
"andrew.kurauchi@gmail.com"
] |
andrew.kurauchi@gmail.com
|
3f3ea32395628def25fe0fafc5c4996611eb9cf0
|
085488720112922ff3aed15f99f3c93911425c4a
|
/vesper/command/station_name_aliases_preset.py
|
0188c41410e63f2a621096eea37ab59056c0769b
|
[
"MIT"
] |
permissive
|
HaroldMills/Vesper
|
0b61d18bc241af22bfc251088fc87d72add6367b
|
ec92fe5231f54336499db189a3bbc6cb08a19e61
|
refs/heads/master
| 2023-07-05T22:45:27.316498
| 2023-07-04T11:58:14
| 2023-07-04T11:58:14
| 19,112,486
| 49
| 6
|
MIT
| 2023-02-14T16:09:19
| 2014-04-24T14:55:34
|
Python
|
UTF-8
|
Python
| false
| false
| 377
|
py
|
"""Module containing class `StationNameAliasesPreset`."""
from vesper.util.yaml_preset import YamlPreset
class StationNameAliasesPreset(YamlPreset):
"""
Preset that specifies station name aliases.
The preset body is YAML that specifies a mapping from station names
to lists of aliases.
"""
extension_name = 'Station Name Aliases'
|
[
"harold.mills@gmail.com"
] |
harold.mills@gmail.com
|
5d11bcbbbb8cfe659783ba765ff02b2cd2ea8b0d
|
6609c26b4ed72c156104ce282c3cf88c6aac59f6
|
/chapter09/example15_formulas.py
|
a0b854c34b5de02f0e522bd4669039b9c7180405
|
[
"MIT"
] |
permissive
|
yordanivh/intro_to_cs_w_python
|
4ab9dbbc2963b285b22cacb6648d1300fded18ce
|
eebbb8efd7ef0d07be9bc45b6b1e8f20737ce01a
|
refs/heads/master
| 2020-09-06T12:25:23.362118
| 2020-02-14T14:07:07
| 2020-02-14T14:07:07
| 220,423,698
| 0
| 0
|
MIT
| 2020-02-14T14:07:08
| 2019-11-08T08:41:25
|
Python
|
UTF-8
|
Python
| false
| false
| 385
|
py
|
#Repetition based on user input
text = ""
while text != "quit":
text = input("Please enter a chemical formula (or 'quit' to exit): ")
if text == "quit":
print("...exiting program")
elif text == "H2O":
print("Water")
elif text == "NH3":
print("Ammonia")
elif text == "CH4":
print("Methane")
else:
print("Unknown compound")
|
[
"yordan@hashicorp.com"
] |
yordan@hashicorp.com
|
10ad81e1ec0dbb45e3b86748b126cb9c94ee3c97
|
043e511436798e9aed96052baddac7a353ac6562
|
/printZigZagMatrix.py
|
88bb65c74eabe5d24283634c30dbb4d1b1d12569
|
[] |
no_license
|
bch6179/Pyn
|
01e19f262cda6f7ee1627d41a829609bde153a93
|
e718fcb6b83664d3d6413cf9b2bb4a875e62de9c
|
refs/heads/master
| 2021-01-22T21:28:13.982722
| 2017-05-05T07:21:19
| 2017-05-05T07:21:19
| 85,434,828
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,623
|
py
|
Solution(object):
def printZigZagMatrix(self, A):
n,m = len(A), len(A[0])
x,y=0,0
x = []
for i in range(n+m):
if i % 2 == 0:
x = i
while x >= 0:
if x <= m and (i-x) <= n:
res.append(A[x][i-x])
x -= 1
else:
x = i
while x <= m:
if x <= m and (i-x) <= n:
res.append( A[x][i-x] )
x += 1
return res
def printZMatrix(self, matrix):
if len(matrix) == 0:
return []
x, y = 0, 0
n, m = len(matrix), len(matrix[0])
rows, cols = range(n), range(m)
dx = [1, -1]
dy = [-1, 1]
direct = 1
result = []
for i in xrange(len(matrix) * len(matrix[0])):
result.append(matrix[x][y])
nextX = x + dx[direct]
nextY = y + dy[direct]
if nextX not in rows or nextY not in cols:
if direct == 1:
if nextY >= m:
nextX, nextY = x + 1, y
else:
nextX, nextY = x, y + 1
else:
if nextX >= n:
nextX, nextY = x, y + 1
else:
nextX, nextY = x + 1, y
direct = 1 - direct
x, y = nextX, nextY
return result
|
[
"bch6179@gmail.com"
] |
bch6179@gmail.com
|
6dc24e6e9dc2ac5b81fe106aafe9c9efcdd3c231
|
1f98ccf9ef52d3adab704676480c85fe22c9542d
|
/simpledb/tx/TxTest.py
|
2243678f4f85a87127d5ea2fc6e3c2c891fab0d3
|
[] |
no_license
|
61515/simpleDB_Python
|
234c671cbbf57f3e8fc5489ec4c292365085b7a8
|
b6846da4a78369838f5b3c7a704de704e18f7be7
|
refs/heads/master
| 2023-02-22T14:07:52.660633
| 2021-01-24T02:25:40
| 2021-01-24T02:25:40
| 332,343,905
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,773
|
py
|
from simpledb.buffer.BufferMgr import BufferMgr
from simpledb.file.BlockId import BlockId
from simpledb.log.LogMgr import LogMgr
from simpledb.tx.Transaction import Transaction
from simpledb.util.File import File
from simpledb.file.FileMgr import FileMgr
class TxTest(object):
@classmethod
def main(cls, args):
# db = SimpleDB("txtest", 400, 8)
fm = FileMgr(File("txtest"), 400)
lm = LogMgr(fm, "simpledb.log")
bm = BufferMgr(fm, lm, 8)
tx1 = Transaction(fm, lm, bm)
blk = BlockId("testfile", 1)
tx1.pin(blk)
# The block initially contains unknown bytes,
# so don't log those values here.
tx1.setInt(blk, 80, 1, False)
tx1.setString(blk, 40, "one", False)
tx1.commit()
tx2 = Transaction(fm, lm, bm)
tx2.pin(blk)
ival = tx2.getInt(blk, 80)
sval = tx2.getString(blk, 40)
print("initial value at location 80 = " + str(ival))
print("initial value at location 40 = " + str(sval))
newival = ival + 1
newsval = sval + "!"
tx2.setInt(blk, 80, newival, True)
tx2.setString(blk, 40, newsval, True)
tx2.commit()
tx3 = Transaction(fm, lm, bm)
tx3.pin(blk)
print("new value at location 80 = " + str(tx3.getInt(blk, 80)))
print("new value at location 40 = " + tx3.getString(blk, 40))
tx3.setInt(blk, 80, 9999, True)
print("pre-rollback value at location 80 = " + str(tx3.getInt(blk, 80)))
tx3.rollback()
tx4 = Transaction(fm, lm, bm)
tx4.pin(blk)
print("post-rollback at location 80 = " + str(tx4.getInt(blk, 80)))
tx4.commit()
if __name__ == '__main__':
import sys
TxTest.main(sys.argv)
|
[
"1632039752@qq.com"
] |
1632039752@qq.com
|
3b852eab16c802fad017d8dbc780791730df3e35
|
8eadd4c7db6872f28592333207b23a6e9309aba7
|
/cities/migrations/0019_hotel_city.py
|
5c67acf480e7b48ac8f8eb8188f58c5733437c25
|
[] |
no_license
|
tripupp/Sep19
|
142255904d186845f0f5cdc5b04064fa081c9e6d
|
4e9ab2077be21c914f2f0207e64268fe6f98224d
|
refs/heads/master
| 2022-11-23T23:46:01.512565
| 2019-09-19T19:46:20
| 2019-09-19T19:46:20
| 205,845,957
| 0
| 1
| null | 2022-11-22T04:13:26
| 2019-09-02T11:51:07
|
CSS
|
UTF-8
|
Python
| false
| false
| 510
|
py
|
# Generated by Django 2.2.4 on 2019-09-17 23:17
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cities', '0018_hotel_hotelfacility'),
]
operations = [
migrations.AddField(
model_name='hotel',
name='city',
field=models.ForeignKey(default=3, on_delete=django.db.models.deletion.CASCADE, to='cities.City'),
preserve_default=False,
),
]
|
[
"ahuja.devansh2@gmail.com"
] |
ahuja.devansh2@gmail.com
|
bf41d3301840a07139a8656932fa600b19eeaa9d
|
b1ff576cdde5adf698b98446538e0b56d18f070f
|
/klasses/migrations/0003_auto_20210308_1410.py
|
38ed54499f645f695f7d61108f3d1d74e57c9592
|
[] |
no_license
|
DUMBALINYOLO/gbc_oms
|
e3cfba17a12f3600b6503fc70cc9f3dcab5cc0e2
|
cdea6fd81333088b2db9911140681fec9577132a
|
refs/heads/main
| 2023-08-20T11:48:36.418990
| 2021-10-11T23:25:35
| 2021-10-11T23:25:35
| 322,593,446
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 539
|
py
|
# Generated by Django 3.1.5 on 2021-03-08 12:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('people', '0001_initial'),
('klasses', '0002_auto_20210201_1054'),
]
operations = [
migrations.AlterField(
model_name='studentclass',
name='class_teacher',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='people.teacherprofile'),
),
]
|
[
"baridzimaximillem@gmail.com"
] |
baridzimaximillem@gmail.com
|
e0606d6d38fc089b63725df350a5cdfd84866c17
|
85a32fc66050b5590f6a54774bbb4b88291894ab
|
/10-days-of-statistics/day-8-least-square-regression-line/python3.py
|
8b9028d791e8278e76ef465b56d78615f88c5516
|
[] |
no_license
|
charlesartbr/hackerrank-python
|
59a01330a3a6c2a3889e725d4a29a45d3483fb01
|
bbe7c6e2bfed38132f511881487cda3d5977c89d
|
refs/heads/master
| 2022-04-29T07:40:20.244416
| 2022-03-19T14:26:33
| 2022-03-19T14:26:33
| 188,117,284
| 46
| 37
| null | 2022-03-19T14:26:34
| 2019-05-22T21:38:18
|
Python
|
UTF-8
|
Python
| false
| false
| 387
|
py
|
x = [95, 85, 80, 70, 60]
y = [85, 95, 70, 65, 70]
n = len(x)
mean_x = sum(x) / n
mean_y = sum(y) / n
square_x = sum(x[i] ** 2 for i in range(n))
product_xy = sum(x[i] * y[i] for i in range(n))
b = ((n * product_xy) - (sum(x) * sum(y))) / ((n * square_x) - (sum(x) ** 2))
a = mean_y - (b * mean_x)
score_x = 80
# regression line
score_y = a + (b * score_x)
print(round(score_y, 3))
|
[
"e-mail@charles.art.br"
] |
e-mail@charles.art.br
|
5c0b1ed0af605cfca6411fb25f4e72fc5d812943
|
f9fe13fe62ba3fb1fb096da4268d5dc43e435ea4
|
/52)convert_num_to_words.py
|
e2ec69789424a1254d41ee6f1e0853fc530c1494
|
[] |
no_license
|
MANNEMPRATAPVARUN/guvipy
|
7e460da8b9d98c2fcd488757585d5bd207570666
|
4da4fe4f3d4855e14383015da19588ef0aea4f32
|
refs/heads/master
| 2020-06-10T01:22:26.063815
| 2019-06-12T13:44:44
| 2019-06-12T13:44:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 324
|
py
|
n1=int(input())
if(n1==1):
print("one")
elif(n1==2):
print("two")
elif(n1==3):
print("three")
elif(n1==4):
print("four")
elif(n1==5):
print("five")
elif(n1==6):
print("six")
elif(n1==7):
print("seven")
elif(n1==8):
print("eight")
elif(n1==9):
print("nine")
elif(n1==10):
print("ten")
|
[
"noreply@github.com"
] |
MANNEMPRATAPVARUN.noreply@github.com
|
f226d0ef2e31d350f86f78906f0565ae5e9b2432
|
eb40a068cef3cabd7a0df37a0ec2bde3c1e4e5ae
|
/imperative/python/megengine/data/tools/_queue.py
|
9acd8396acb4078c93b51588585ba3c9a4325c34
|
[
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
tpoisonooo/MegEngine
|
ccb5c089a951e848344f136eaf10a5c66ae8eb6f
|
b8f7ad47419ef287a1ca17323fd6362c6c69445c
|
refs/heads/master
| 2022-11-07T04:50:40.987573
| 2021-05-27T08:55:50
| 2021-05-27T08:55:50
| 249,964,363
| 1
| 0
|
NOASSERTION
| 2021-05-27T08:55:50
| 2020-03-25T11:48:35
| null |
UTF-8
|
Python
| false
| false
| 5,241
|
py
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import binascii
import os
import queue
import subprocess
from multiprocessing import Queue
import pyarrow
import pyarrow.plasma as plasma
MGE_PLASMA_MEMORY = int(os.environ.get("MGE_PLASMA_MEMORY", 4000000000)) # 4GB
# Each process only need to start one plasma store, so we set it as a global variable.
# TODO: how to share between different processes?
MGE_PLASMA_STORE_MANAGER = None
def _clear_plasma_store():
# `_PlasmaStoreManager.__del__` will not be called automaticly in subprocess,
# so this function should be called explicitly
global MGE_PLASMA_STORE_MANAGER
if MGE_PLASMA_STORE_MANAGER is not None and MGE_PLASMA_STORE_MANAGER.refcount == 0:
del MGE_PLASMA_STORE_MANAGER
MGE_PLASMA_STORE_MANAGER = None
class _PlasmaStoreManager:
__initialized = False
def __init__(self):
self.socket_name = "/tmp/mge_plasma_{}".format(
binascii.hexlify(os.urandom(8)).decode()
)
debug_flag = bool(os.environ.get("MGE_DATALOADER_PLASMA_DEBUG", 0))
# NOTE: this is a hack. Directly use `plasma_store` may make subprocess
# difficult to handle the exception happened in `plasma-store-server`.
# For `plasma_store` is just a wrapper of `plasma-store-server`, which use
# `os.execv` to call the executable `plasma-store-server`.
cmd_path = os.path.join(pyarrow.__path__[0], "plasma-store-server")
self.plasma_store = subprocess.Popen(
[cmd_path, "-s", self.socket_name, "-m", str(MGE_PLASMA_MEMORY),],
stdout=None if debug_flag else subprocess.DEVNULL,
stderr=None if debug_flag else subprocess.DEVNULL,
)
self.__initialized = True
self.refcount = 1
def __del__(self):
if self.__initialized and self.plasma_store.returncode is None:
self.plasma_store.kill()
class PlasmaShmQueue:
def __init__(self, maxsize: int = 0):
r"""
Use pyarrow in-memory plasma store to implement shared memory queue.
Compared to native `multiprocess.Queue`, `PlasmaShmQueue` avoid pickle/unpickle
and communication overhead, leading to better performance in multi-process
application.
:type maxsize: int
:param maxsize: maximum size of the queue, `None` means no limit. (default: ``None``)
"""
# Lazy start the plasma store manager
global MGE_PLASMA_STORE_MANAGER
if MGE_PLASMA_STORE_MANAGER is None:
try:
MGE_PLASMA_STORE_MANAGER = _PlasmaStoreManager()
except Exception as e:
err_info = (
"Please make sure pyarrow installed correctly!\n"
"You can try reinstall pyarrow and see if you can run "
"`plasma_store -s /tmp/mge_plasma_xxx -m 1000` normally."
)
raise RuntimeError(
"Exception happened in starting plasma_store: {}\n"
"Tips: {}".format(str(e), err_info)
)
else:
MGE_PLASMA_STORE_MANAGER.refcount += 1
self.socket_name = MGE_PLASMA_STORE_MANAGER.socket_name
# TODO: how to catch the exception happened in `plasma.connect`?
self.client = None
# Used to store the header for the data.(ObjectIDs)
self.queue = Queue(maxsize) # type: Queue
def put(self, data, block=True, timeout=None):
if self.client is None:
self.client = plasma.connect(self.socket_name)
try:
object_id = self.client.put(data)
except plasma.PlasmaStoreFull:
raise RuntimeError("plasma store out of memory!")
try:
self.queue.put(object_id, block, timeout)
except queue.Full:
self.client.delete([object_id])
raise queue.Full
def get(self, block=True, timeout=None):
if self.client is None:
self.client = plasma.connect(self.socket_name)
object_id = self.queue.get(block, timeout)
if not self.client.contains(object_id):
raise RuntimeError(
"ObjectID: {} not found in plasma store".format(object_id)
)
data = self.client.get(object_id)
self.client.delete([object_id])
return data
def qsize(self):
return self.queue.qsize()
def empty(self):
return self.queue.empty()
def join(self):
self.queue.join()
def disconnect_client(self):
if self.client is not None:
self.client.disconnect()
def close(self):
self.queue.close()
self.disconnect_client()
global MGE_PLASMA_STORE_MANAGER
MGE_PLASMA_STORE_MANAGER.refcount -= 1
_clear_plasma_store()
def cancel_join_thread(self):
self.queue.cancel_join_thread()
|
[
"megengine@megvii.com"
] |
megengine@megvii.com
|
976b4d718dda1ead16bd28d32cc7063864b631dd
|
e10a6d844a286db26ef56469e31dc8488a8c6f0e
|
/norml/train_maml.py
|
1b115e00c874ca240844f93a803a8535781b8616
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
Jimmy-INL/google-research
|
54ad5551f97977f01297abddbfc8a99a7900b791
|
5573d9c5822f4e866b6692769963ae819cb3f10d
|
refs/heads/master
| 2023-04-07T19:43:54.483068
| 2023-03-24T16:27:28
| 2023-03-24T16:32:17
| 282,682,170
| 1
| 0
|
Apache-2.0
| 2020-07-26T15:50:32
| 2020-07-26T15:50:31
| null |
UTF-8
|
Python
| false
| false
| 1,731
|
py
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""A short script for training MAML.
Example to run
python -m norml.train_maml --config MOVE_POINT_ROTATE_MAML
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from dotmap import DotMap
import tensorflow.compat.v1 as tf
from norml import config_maml
from norml import maml_rl
FLAGS = flags.FLAGS
flags.DEFINE_string('config', 'RL_PENDULUM_GYM_CONFIG_META',
'Configuration for training.')
def main(argv):
del argv # Unused
config = DotMap(getattr(config_maml, FLAGS.config))
print('MAML config: %s' % FLAGS.config)
tf.logging.info('MAML config: %s', FLAGS.config)
algo = maml_rl.MAMLReinforcementLearning(config)
sess_config = tf.ConfigProto(allow_soft_placement=True)
sess_config.gpu_options.allow_growth = True
with tf.Session(config=sess_config) as sess:
algo.init_logging(sess)
init = tf.global_variables_initializer()
sess.run(init)
done = False
while not done:
done, _ = algo.train(sess, 10)
algo.stop_logging()
if __name__ == '__main__':
tf.app.run()
|
[
"copybara-worker@google.com"
] |
copybara-worker@google.com
|
543fd4be6b22c96bcaa75db052979e4302c6b24d
|
52f0984561895b48f3e6e40658a6e52c97705715
|
/python-folder/serverless2.py
|
ddcd1ce97acb5419e4a251fe3bee710c7535e509
|
[] |
no_license
|
jsanon01/python
|
8da2755e7724850875518455c1760bb9f04dd873
|
edd52214e3578f18b71b0ad944c287411fb23dfb
|
refs/heads/master
| 2022-05-20T00:29:10.550169
| 2022-05-10T01:08:48
| 2022-05-10T01:08:48
| 165,682,490
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,377
|
py
|
"""
I want to import website for Reference
I want a function named 'compute'
I want a function named 'storage'
I want a function named 'data_stores'
I want a function named 'api_proxy'
I want a function named 'api_integration'
I want a function named 'analytics'
I want a function named 'dev_tool'
I want a function named 'orchestration'
I want to print out a menu to display 'main loop function'
I want a while statement inside the main function calling sub-functions
"""
import webbrowser
def compute():
print('Compute:\n- Lambda lets you run code without provisioning servers.\n- Lamda Edges manages CloudFront.\n- Fargate is a built-purpose containers.')
def storage():
print('Storage:\n- S3 provides secure web interface from any web around the world.\n- EFS provides simple, and elastic file storage.')
def data_stores():
print('Data Stores:\n- DynamoDB is a fast and flexible No SQL DB.\n- Aurora Serverless is an auto-scaling configuration for Aurora/MySQL.\n- Amazon RDS Proxy is a H.A. DB managing 1000 of connections to relational DBs.')
def api_proxy():
print('API Proxy:\nAPI Gateway is a fully managed service to create, maintain, publish, monitor, and secure APIs at any scale....')
def api_integration():
print('Application Integration:\n- Amazon SNS is a fully managed pub/sub messaging service designing to decouple and scale microservices.\n- Amazon SQS is a fully managed message queuing service designing to decouple and scale microservices.\n- AWS AppSync simplifies application development by creating data from one or more data sources.\n- Amazon EventBridge is a serverless event bus service designing to access application from a variety of sources to your AWS environment.\n...')
def analytics():
print('Analytics:\n- Amazon Kinesis is a platform for streaming data on AWS.\n- Amazon Athena is an interactice query analyzing data into S3 using standard SQL.')
def dev_tool():
print('Developer Tools:\n- AWS provides tools and services that aid developers in the serverless application development process.')
def orchestration():
print('AWS Step Functions:\n- AWS Step Functions coordinates the components of distributed applications using visual workflows.')
print("\nThis script prints out a Menu to display 'Main Loop function.'")
def main():
print("\nHere are the following AWS 'serverless' services:\n[0] Quit\t\t[1] Compute\t\t[2] Storage\t[3] Data Stores\n[4] API Proxy\t\t[5] API Integration\t[6] Analytics\t[7] Dev Tools\n[8] Orchestration\t[9] AWS Website\t\t[10] Google ")
aws = int(input('\nEnter a number from 0 - 9: '))
while aws:
if aws == 1:
compute()
elif aws == 2:
storage()
elif aws == 3:
data_stores()
elif aws == 4:
api_proxy()
elif aws == 5:
api_integration()
elif aws == 6:
analytics()
elif aws == 7:
dev_tool()
elif aws == 8:
orchestration()
elif aws == 9:
webbrowser.open('https://aws.amazon.com/serverless/')
elif aws == 10:
webbrowser.open('https://google.com')
else:
print('Invalid number.')
aws = int(input('\nEnter a number from 0 - 10: '))
print('\nExiting the script...!')
main()
print()
|
[
"jeansanon180@gmail.com"
] |
jeansanon180@gmail.com
|
5883b13a008f89976e099bb9238c124fc37ad18d
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03273/s686375641.py
|
da4e84ee5bc242bc4a53671532a88a0e7dd849e7
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 876
|
py
|
H,W = map(int,input().split())
A = [list(input()) for _ in range(H)]
B = [[0 for _ in range(W)] for _ in range(H)]
for i in range(H):
for j in range(W):
if A[i][j]=="#":
B[i][j] = 1
C = []
for i in range(H-1,-1,-1):
if sum(B[i])==0:
C.append(i)
B1 = []
for i in range(H):
if i not in C:
B1.append(B[i])
C = []
for j in range(W):
flag = 0
for i in range(len(B1)):
if B1[i][j]==1:
flag = 1
break
if flag==0:
C.append(j)
B2 = []
for i in range(len(B1)):
row = []
for j in range(W):
if j not in C:
row.append(B1[i][j])
B2.append(row)
B3 = []
for i in range(len(B2)):
x = ""
for j in range(len(B2[i])):
if B2[i][j]==0:
x += "."
else:
x += "#"
B3.append(x)
for i in range(len(B3)):
print(B3[i])
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
a0187e3cf9f492ef6c1bc89361772c035ddcc37f
|
2ed229651c21f552b61c2a2e520450d20795780f
|
/simple_issue_tracker/issue/migrations/0003_auto_20180331_0756.py
|
2e079cb4b6193e0bd6c65a5a589f0d070d56a4b3
|
[] |
no_license
|
SurendraKumar19/LaunchYard
|
360c2fbff58f453e5e1d21716ba033ce0203e279
|
8be7d967f986320cf212799e4a30b19724904cf2
|
refs/heads/master
| 2020-03-07T18:59:31.975463
| 2018-04-02T04:16:24
| 2018-04-02T04:16:24
| 127,658,734
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 628
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-31 07:56
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('issue', '0002_auto_20180330_1804'),
]
operations = [
migrations.AlterField(
model_name='issue',
name='assigned_to',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='assigned_to', to=settings.AUTH_USER_MODEL),
),
]
|
[
"="
] |
=
|
c03da917bab449ab5a0857c6e6e2c45956ddfbcd
|
f0e9efbb0b90ff3f6c3796ab5cfcfecd3d937188
|
/travel_buddy/apps/first_app/views.py
|
6565dd2f866b03559a79d10cee1eef1fe5aac969
|
[] |
no_license
|
john-gore/belt_exam_travel
|
9d6a05b6128ab4d8d3ac50c80784e383e0098a44
|
2d32a22aa66f58a0835e7d3daedeab83925a0a8c
|
refs/heads/master
| 2021-07-21T11:54:26.658977
| 2017-10-27T21:42:54
| 2017-10-27T21:42:54
| 108,598,248
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,039
|
py
|
from django.shortcuts import render, HttpResponse, redirect
from django.core.urlresolvers import reverse
from .models import User, Trip
from django.contrib import messages
def index(request):
return render(request, "login.html")
def register(request):
result = User.objects.validate_registration(request.POST)
if type(result) == list:
for err in result:
messages.error(request, err)
return redirect('/')
request.session['user_id'] = result.id
messages.success(request, "Logged in!!")
return redirect("/success")
def login(request):
result = User.objects.validate_login(request.POST)
if type(result) == list:
for err in result:
messages.error(request, err)
return redirect('/')
request.session['user_id'] = result.id
messages.success(request, "Successfully logged in!")
return redirect('/success')
def success(request):
try:
request.session['user_id']
except KeyError:
return redirect('/')
return redirect('/dash')
def dash(request):
this_user = User.objects.get(id = request.session['user_id'])
context = {
'users': User.objects.get(id=request.session['user_id']),
'all_users': Trip.objects.exclude(user = this_user),
'trips': Trip.objects.filter(user = this_user),
}
return render(request, 'dashboard.html', context)
def add_trip(request):
this_user = User.objects.get(id = request.session['user_id'])
this_trip = Trip.objects.create(destination_name = request.POST
['destination'], description = request.POST['description'], travel_from = request.POST['travel_from'], travel_to = request.POST['travel_to'])
this_trip.user.add(this_user)
return redirect('/dash')
def adddestination(request, id):
this_trip = Trip.objects.get(id=id)
this_user = User.objects.get(id = request.session['user_id'])
this_trip.user.add(this_user)
return redirect("/dash")
def destination(request, id):
return render(request, "destination.html")
|
[
"johngore@Johns-MBP.localdomain"
] |
johngore@Johns-MBP.localdomain
|
3565507f8d9a85c2327ed93fe28b09dfd4f644e6
|
f4b5721c6b3f5623e306d0aa9a95ec53461c1f89
|
/backend/src/gloader/xml/xpath/Context.py
|
f82a5fa07e901b4eb84d3947e8023d537613d205
|
[
"MIT"
] |
permissive
|
citelab/gini5
|
b53e306eb5dabf98e9a7ded3802cf2c646f32914
|
d095076113c1e84c33f52ef46a3df1f8bc8ffa43
|
refs/heads/uml-rename
| 2022-12-10T15:58:49.578271
| 2021-12-09T23:58:01
| 2021-12-09T23:58:01
| 134,980,773
| 12
| 11
|
MIT
| 2022-12-08T05:20:58
| 2018-05-26T17:16:50
|
Python
|
UTF-8
|
Python
| false
| false
| 2,193
|
py
|
########################################################################
#
# File Name: Context.py
#
#
"""
The context of an XPath expression.
WWW: http://4suite.org/XPATH e-mail: support@4suite.org
Copyright (c) 2000-2001 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.org/COPYRIGHT for license and copyright information
"""
import xml.dom.ext
import CoreFunctions
class Context:
functions = CoreFunctions.CoreFunctions
def __init__(self,
node,
position=1,
size=1,
varBindings=None,
processorNss=None):
self.node = node
self.position = position
self.size = size
self.varBindings = varBindings or {}
self.processorNss = processorNss or {}
self._cachedNss = None
self._cachedNssNode = None
self.stringValueCache = {}
return
def __repr__(self):
return "<Context at %s: Node=%s, Postion=%d, Size=%d>" % (
id(self),
self.node,
self.position,
self.size
)
def nss(self):
if self._cachedNss is None or self.node != self._cachedNssNode:
nss = xml.dom.ext.GetAllNs(self.node)
self._cachedNss = nss
self._cachedNssNode = self.node
return self._cachedNss
def next(self):
pass
def setNamespaces(self, processorNss):
self.processorNss = processorNss
def copyNamespaces(self):
return self.processorNss.copy()
def setVarBindings(self, varBindings):
self.varBindings = varBindings
def copyVarBindings(self):
#FIXME: should this be deep copy, because of the possible list entries?
return self.varBindings.copy()
def copyNodePosSize(self):
return (self.node, self.position, self.size)
def setNodePosSize(self,(node,pos,size)):
self.node = node
self.position = pos
self.size = size
def copy(self):
newdict = self.__dict__.copy()
newdict["varBindings"] = self.varBindings.copy()
return newdict
def set(self,d):
self.__dict__ = d
|
[
"maheswar@MacBook-Pro.local"
] |
maheswar@MacBook-Pro.local
|
71e2016edbb152b6c30a8d3f50f6cea2e63b1cb2
|
8d56828fb72dcbb502a857c48394ee3684120745
|
/parsers/pe_hdr.py
|
691798f51cd9c757b0c1468369c4f99201b1566b
|
[
"MIT"
] |
permissive
|
ArseniySky/mitra
|
1a26166e39df76d4472ce2a3000fca46e5327034
|
bb6038587300d34072cda1409e3bb72772b11dc9
|
refs/heads/master
| 2023-01-03T05:39:51.486063
| 2020-10-24T15:35:56
| 2020-10-24T15:35:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,695
|
py
|
#!/usr/bin/env python
from parsers import FType
from helpers import *
class parser(FType):
DESC = "Portable Executable (hdr)"
TYPE = "PE(hdr)"
MAGIC = b"MZ"
def __init__(self, data=""):
FType.__init__(self, data)
self.data = data
self.bAppData = True # let's not have duplicates
self.bParasite = True
# alignment, but actually first physical offset of a section for big headers
self.cut = 2
self.prewrap = 0
self.parasite_o = 0x50
self.parasite_s = 0x100
self.cut = 2
def fixparasite(self, parasite):
# padding
ALIG = 4
if len(parasite) % ALIG > 0:
parasite += (ALIG-(len(parasite) % ALIG)) * b"\0"
return parasite
def parasitize(self, fparasite):
# strategy: add parasite between DOS and PE headers
# TODO: turn this into fixformat
host = self.data
parasite = self.fixparasite(fparasite.data)
delta = len(parasite)
DOSHdr_s = 0x40
# move the PE header at the right offset
PEhdr_o = DOSHdr_s + delta
PEHeadersMax = 0x200 # could be adjusted for bigger headers - 0x400 for cuphead
PEoffset = host.find(b"PE\0\0")
peHDR = host[PEoffset:PEHeadersMax].rstrip(b"\0") # roughly accurate :p
if PEhdr_o + len(peHDR) > PEHeadersMax:
return None, []
# update SizeOfHeaders
SoH_o = 0x54 # local header in the PE header
SoH_s = 4
peHDR = inc4l(peHDR, SoH_o, delta)
# combine new PE header with rest of the PE
merged = b"".join([
b"MZ", # Magic
b"\0" * (DOSHdr_s-2-4), # DOS header slack space
int4l(PEhdr_o), # pointer to new PE header offset
parasite,
peHDR,
b"\0" * (PEHeadersMax - PEhdr_o - len(peHDR)),
host[PEHeadersMax:],
])
return merged, [DOSHdr_s, PEhdr_o]
|
[
"ange.albertini@gmail.com"
] |
ange.albertini@gmail.com
|
c3bf55ecaebb7517aa85a0abef26fdf5363f20dd
|
e730c3367959c0d46a4d776442704fec24b84afd
|
/guoke_spider/pipelines.py
|
0148d313b6cb0bf2b7a2554e0d257e74afdf9841
|
[] |
no_license
|
IanChen6/guoke_spider
|
3baf4f110856b951ccf4b33c9a30d74287a12e0d
|
90c0e99ad752d421c46117a26af448bede8d5e9c
|
refs/heads/master
| 2021-01-23T18:44:36.839711
| 2017-09-08T01:57:43
| 2017-09-08T01:57:43
| 101,175,667
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,274
|
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import codecs
import json
import MySQLdb
import pymysql as pymysql
from scrapy.pipelines.images import ImagesPipeline
from twisted.enterprise import adbapi
import os
from scrapy.exporters import JsonItemExporter
class GuokeSpiderPipeline(object):
def process_item(self, item, spider):
return item
#自定义json文件的导出
class JsonWithEncodingPipeline(object):
'''
返回json数据到文件
'''
def __init__(self):
self.file = codecs.open("article.json",'w',encoding="utf-8")
def process_item(self, item, spider):
lines = json.dumps(dict(item),ensure_ascii=False) + "\n"
self.file.write(lines)
return item
def spider_closed(self,spider):
self.file.close()
class JsonExporterPipeline(object):
# scrapy提供的Json export 导出json文件
def __init__(self):
self.file = codecs.open("articlexport.json",'wb')
self.exporter=JsonItemExporter(self.file,encoding="utf-8",ensure_ascii=False)
self.exporter.start_exporting()
def spider_closed(self,spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self,item,spider):
self.exporter.export_item(item)
return item
class MysqlPipeline(object):
'''
插入mysql数据库
'''
def __init__(self):
self.conn =pymysql.connect(host='172.24.22.178',port=3306,user='root',passwd='1029384756',db='bole_spider',use_unicode=True, charset="utf8")
self.cursor = self.conn.cursor()
def process_item(self,item,spider):
insert_sql = '''
insert into boleitem(title,create_date,url,url_object_id,front_image_url,front_image_path,comment_nums,fav_nums,praise_nums,tag,content) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
'''
# mysql> GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' IDENTIFIED BY '123456' WITH GRANT OPTION; 解决访问被拒绝
self.cursor.execute(insert_sql,(item["title"],item["create_date"],item["url"],item["url_object_id"],item["front_image_url"],item["front_image_path"],item["comment_nums"],item["fav_nums"],item["praise_nums"],item["tag"],item["content"]))
self.conn.commit()#connect执行commit
# class MysqlTwistedPipline(object):
# '''
# 采用异步的方式插入数据,防止造成堵塞(上面那个方法会堵塞)
# '''
# def __init__(self,dbpool):
# self.dbpool = dbpool
#
# @classmethod
# def from_settings(cls,settings):
#
# # dbpool = adbapi.ConnectionPool("pymysql",host='172.24.22.178',port=3306,user='root',passwd='1029384756',db='bole_spider',use_unicode=True, charset="utf8")
#
# dbpool = adbapi.ConnectionPool(DB_SERVER,**DB_CONNECT)
# return cls(dbpool)
# def process_item(self,item,spider):
# '''
# 使用twisted将mysql插入变成异步
# :param item:
# :param spider:
# :return:
# '''
# query = self.dbpool.runInteraction(self.do_insert,item)
#
# query.addErrback(self.handle_error)
#
# def handle_error(self,failure):
# #处理异步插入的异常
# print(failure)
#
# def do_insert(self,cursor,item):
# #具体插入数据
# insert_sql = '''
# insert into jobbole_article(title,create_date,url,url_object_id,front_image_url,front_image_path,comment_nums,fav_nums,praise_nums,tag,content) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
# '''
# cursor.execute(insert_sql,(item["title"],item["create_date"],item["url"],item["url_object_id"],item["front_image_url"],item["front_image_path"],item["comment_nums"],item["fav_nums"],item["praise_nums"],item["tag"],item["content"]))
#
class ArticleImagePipeline(ImagesPipeline):
'''
对图片的处理
'''
def item_completed(self, results, item, info):
for ok ,value in results:
if ok:
image_file_path = value["path"]
item['front_image_path'] = image_file_path
else:
item['front_image_path'] = ""
return item
|
[
"626614767@qq.com"
] |
626614767@qq.com
|
41bfa16dc61c56da11b0ac61055d62943d6efcf4
|
76b1e713a3057e6f08abc116814af00891dbc2ef
|
/store/models/product.py
|
c85acce04472c75ceaee1e079948a3a94d9b3407
|
[] |
no_license
|
Jay28497/Django-Ecommerce-Website
|
ed17f6536fe4be4d6db658c46999bb05ec22d3f8
|
2697d376c8ff2720720183c0e475b188ff7b0e33
|
refs/heads/master
| 2023-03-31T15:20:56.008251
| 2021-04-10T12:21:08
| 2021-04-10T12:21:08
| 355,427,413
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 878
|
py
|
from django.db import models
from .category import Category
class Product(models.Model):
name = models.CharField(max_length=50)
price = models.IntegerField(default=0)
category = models.ForeignKey(Category, on_delete=models.CASCADE, default=1)
description = models.CharField(max_length=200, default='')
image = models.ImageField(upload_to='uploads/products/')
@staticmethod
def get_all_products():
return Product.objects.all()
@staticmethod
def get_all_products_by_category_id(category_id):
if category_id:
return Product.objects.filter(category=category_id)
else:
return Product.objects.all()
@staticmethod
def get_products_by_id(ids):
return Product.objects.filter(id__in=ids)
def __str__(self):
return self.name
class Meta:
db_table = 'Product'
|
[
"jaykanjariya28@gmail.com"
] |
jaykanjariya28@gmail.com
|
870b89779a326a8ad7f5d0a8ae8f9736e854516a
|
46646aaeecc313471ce5b9fbc3ffe98fa2779079
|
/tests/test_pitch_shift.py
|
0f638710fbd8cddecf6cff6c5bf78c9a7b9ef8f0
|
[
"MIT",
"Python-2.0"
] |
permissive
|
psgainz/audiomentations
|
c57840cc9516564cff00a9db64e8e92807024bfb
|
a5512965bb59995cbad6a3e5a5994dfaba7185d0
|
refs/heads/master
| 2020-08-23T01:30:44.289468
| 2019-09-22T13:48:07
| 2019-09-22T13:48:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 587
|
py
|
import unittest
import numpy as np
from audiomentations.augmentations.transforms import PitchShift
from audiomentations.core.composition import Compose
class TestPitchShift(unittest.TestCase):
def test_dynamic_length(self):
samples = np.zeros((512,), dtype=np.float32)
sample_rate = 16000
augmenter = Compose([
PitchShift(min_semitones=-2, max_semitones=-1, p=1.0)
])
samples = augmenter(samples=samples, sample_rate=sample_rate)
self.assertEqual(samples.dtype, np.float32)
self.assertEqual(len(samples), 512)
|
[
"iver56@hotmail.com"
] |
iver56@hotmail.com
|
7761e5f25a807647a04357adc679ee7e8ac02fa0
|
7c8bd2e26fdabf1555e0150272ecf035f6c21bbd
|
/dp/금광.py
|
8190eebcd06649d3cf24238703d4170ac0996cbf
|
[] |
no_license
|
hyeokjinson/algorithm
|
44090c2895763a0c53d48ff4084a96bdfc77f953
|
46c04e0f583d4c6ec4f51a24f19a373b173b3d5c
|
refs/heads/master
| 2021-07-21T10:18:43.918149
| 2021-03-27T12:27:56
| 2021-03-27T12:27:56
| 245,392,582
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 764
|
py
|
T=int(input())
for _ in range(T):
n,m=map(int,input().split())
arr=list(map(int,input().split()))
dp=[]
index=0
for i in range(n):
dp.append(arr[index:index+m])
index+=m
for j in range(1,m):
for i in range(n):
#왼쪽 위에서 오는 경우
if i==0:
left_up=0
else:
left_up=dp[i-1][j-1]
#왼쪽에서 오는 경우
left=dp[i][j-1]
#왼쪽 아래에서 오는경우
if i==n-1:
left_down=0
else:
left_down=dp[i+1][j-1]
dp[i][j]=max(left_down,left,left_up)+dp[i][j]
res=0
for i in range(n):
res=max(res,dp[i][m-1])
print(res)
|
[
"hjson817@gmail.com"
] |
hjson817@gmail.com
|
663b87517554847f85f756083b55562af5062418
|
24b1258616111c3b585e137ee64f1395e73f18db
|
/torch/testing/_deprecated.py
|
3cf7338bff889e10f349fd6b7aab1548aed2db30
|
[
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] |
permissive
|
neginraoof/pytorch
|
9ca47c2d32e5b71a24f13232329bea1e5744acda
|
ed79eff6d3faf126fec8895e83bf5ac28aa1041c
|
refs/heads/master
| 2021-12-13T16:03:02.260731
| 2021-09-09T20:37:36
| 2021-09-09T20:37:36
| 193,156,038
| 1
| 0
|
NOASSERTION
| 2021-01-15T23:15:32
| 2019-06-21T20:23:32
|
C++
|
UTF-8
|
Python
| false
| false
| 2,590
|
py
|
"""This module exists since the `torch.testing` exposed a lot of stuff that shouldn't have been public. Although this
was never documented anywhere, some other internal FB projects as well as downstream OSS projects might use this. Thus,
we don't internalize without warning, but still go through a deprecation cycle.
"""
import functools
import warnings
from typing import Any, Callable, Optional, Tuple
import torch
__all__ = [
"rand",
"randn",
"assert_allclose",
]
def warn_deprecated(instructions: str) -> Callable:
def outer_wrapper(fn: Callable) -> Callable:
msg = (
f"torch.testing.{fn.__name__} is deprecated and will be removed in a future release. "
f"{instructions.strip()}"
)
@functools.wraps(fn)
def inner_wrapper(*args: Any, **kwargs: Any) -> Any:
warnings.warn(msg, FutureWarning)
return fn(*args, **kwargs)
return inner_wrapper
return outer_wrapper
rand = warn_deprecated("Use torch.rand instead.")(torch.rand)
randn = warn_deprecated("Use torch.randn instead.")(torch.randn)
_DTYPE_PRECISIONS = {
torch.float16: (1e-3, 1e-3),
torch.float32: (1e-4, 1e-5),
torch.float64: (1e-5, 1e-8),
}
def _get_default_rtol_and_atol(actual: torch.Tensor, expected: torch.Tensor) -> Tuple[float, float]:
actual_rtol, actual_atol = _DTYPE_PRECISIONS.get(actual.dtype, (0.0, 0.0))
expected_rtol, expected_atol = _DTYPE_PRECISIONS.get(expected.dtype, (0.0, 0.0))
return max(actual_rtol, expected_rtol), max(actual_atol, expected_atol)
# TODO: include the deprecation as soon as torch.testing.assert_close is stable
# @warn_deprecated(
# "Use torch.testing.assert_close instead. "
# "For detailed upgrade instructions see https://github.com/pytorch/pytorch/issues/61844."
# )
def assert_allclose(
actual: Any,
expected: Any,
rtol: Optional[float] = None,
atol: Optional[float] = None,
equal_nan: bool = True,
msg: str = "",
) -> None:
if not isinstance(actual, torch.Tensor):
actual = torch.tensor(actual)
if not isinstance(expected, torch.Tensor):
expected = torch.tensor(expected, dtype=actual.dtype)
if rtol is None and atol is None:
rtol, atol = _get_default_rtol_and_atol(actual, expected)
torch.testing.assert_close(
actual,
expected,
rtol=rtol,
atol=atol,
equal_nan=equal_nan,
check_device=True,
check_dtype=False,
check_stride=False,
check_is_coalesced=False,
msg=msg or None,
)
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
1185038b5add6d6e2bb70633f7fb38a0bd4a4477
|
f324cd2cbebd303fd34cd2e26fe1a51c44202d55
|
/test/integration/vint/linting/policy/test_prohibit_encoding_opt_after_scriptencoding.py
|
867a2e0269f74fa396c1c0c04952e24076736d24
|
[
"MIT"
] |
permissive
|
Vimjas/vint
|
d71579154d177daf458ec68423a66055f90fa308
|
e12091830f0ae7311066b9d1417951182fb32eb5
|
refs/heads/master
| 2023-09-02T07:31:31.299270
| 2022-10-24T13:06:33
| 2022-10-24T13:06:33
| 20,857,415
| 191
| 11
|
MIT
| 2022-10-24T13:10:00
| 2014-06-15T14:38:32
|
Python
|
UTF-8
|
Python
| false
| false
| 2,200
|
py
|
import unittest
from test.asserting.policy import PolicyAssertion, get_fixture_path
from vint.linting.level import Level
from vint.linting.policy.prohibit_encoding_opt_after_scriptencoding import (
ProhibitEncodingOptionAfterScriptEncoding,
)
VALID_ORDER_VIM_SCRIPT = get_fixture_path(
'prohibit_encoding_opt_after_scriptencoding_valid.vim'
)
NO_ENCODING_OPT_VIM_SCRIPT = get_fixture_path(
'prohibit_encoding_opt_after_scriptencoding_valid_no_encoding_opt.vim'
)
NO_SCRIPT_ENCODING_VIM_SCRIPT = get_fixture_path(
'prohibit_encoding_opt_after_scriptencoding_valid_no_scriptencoding.vim'
)
INVALID_ORDER_VIM_SCRIPT = get_fixture_path(
'prohibit_encoding_opt_after_scriptencoding_invalid.vim'
)
class TestProhibitEncodingOptionAfterScriptEncoding(PolicyAssertion, unittest.TestCase):
def _create_violation_by_line_number(self, line_number):
return {
'name': 'ProhibitEncodingOptionAfterScriptEncoding',
'level': Level.WARNING,
'position': {
'line': line_number,
'column': 1,
'path': INVALID_ORDER_VIM_SCRIPT
}
}
def test_get_violation_if_found_with_valid_file(self):
self.assertFoundNoViolations(VALID_ORDER_VIM_SCRIPT,
ProhibitEncodingOptionAfterScriptEncoding)
def test_get_violation_if_found_with_valid_file_no_encoding_option(self):
self.assertFoundNoViolations(NO_ENCODING_OPT_VIM_SCRIPT,
ProhibitEncodingOptionAfterScriptEncoding)
def test_get_violation_if_found_with_valid_file_no_scriptencoding(self):
self.assertFoundNoViolations(NO_SCRIPT_ENCODING_VIM_SCRIPT,
ProhibitEncodingOptionAfterScriptEncoding)
def test_get_violation_if_found_with_invalid_file(self):
expected_violations = [self._create_violation_by_line_number(2)]
self.assertFoundViolationsEqual(INVALID_ORDER_VIM_SCRIPT,
ProhibitEncodingOptionAfterScriptEncoding,
expected_violations)
if __name__ == '__main__':
unittest.main()
|
[
"yuki.kokubun@mixi.co.jp"
] |
yuki.kokubun@mixi.co.jp
|
e086eda2496c310a06840292336539270a37fa7c
|
82e57ddf893ec8b1d3c19e4eeab758eb2cb4ace4
|
/but/trades/views/send_email.py
|
ded5f2cabc1f01788ff32fda5c6f102a6c146d22
|
[
"MIT"
] |
permissive
|
yevgnenll/but
|
1cf072afdb95492aae7efc9847040d1770a623c7
|
2cb3d7b8fd4b898440f9a74ee4b6b8fbdff32bb1
|
refs/heads/master
| 2021-01-21T04:55:31.408608
| 2016-06-09T02:53:42
| 2016-06-09T02:53:42
| 55,655,016
| 4
| 0
| null | 2016-06-09T03:14:06
| 2016-04-07T01:54:33
|
Python
|
UTF-8
|
Python
| false
| false
| 535
|
py
|
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from trades.models import Contact
from users.models import User
def send_email(request):
title = request.POST.get('title')
content = request.POST.get('content')
accept = request.POST.get('user_id')
contact = Contact.objects.create(
send=request.user,
message_text=content,
message_title=title,
accept=User.objects.get(id=accept),
)
return redirect(request.META.get('HTTP_REFERER'))
|
[
"yevgnenll@gmail.com"
] |
yevgnenll@gmail.com
|
e5013a8634f961854d429a44905a148c553490fc
|
e82b761f53d6a3ae023ee65a219eea38e66946a0
|
/All_In_One/addons/libraryManager.py
|
eb2bfd17f26d7d9e8e79fdfc1d61ba2439c6cd68
|
[] |
no_license
|
2434325680/Learnbgame
|
f3a050c28df588cbb3b14e1067a58221252e2e40
|
7b796d30dfd22b7706a93e4419ed913d18d29a44
|
refs/heads/master
| 2023-08-22T23:59:55.711050
| 2021-10-17T07:26:07
| 2021-10-17T07:26:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,028
|
py
|
bl_info = {
"name": "texture library manager",
"author": "Antonio Mendoza",
"version": (0, 0, 1),
"blender": (2, 72, 0),
"location": "View3D > Panel Tools > Texture library manager panel (sculpt mode)",
"warning": "",
"description": "Load and unload image libraries",
"category": "Learnbgame",
}
import bpy
import os
import sys
from bpy.props import StringProperty, BoolProperty, IntProperty, CollectionProperty, FloatProperty, EnumProperty
from bpy.types import Menu, Panel
def findImage (p_Name):
found = False
for img in bpy.data.textures:
if img.name == p_Name:
found = True
break
return found
def selectImages (p_fileList):
file_list = p_fileList
img_list = [item for item in file_list if item[-3:] == 'png' or item[-3:] == 'jpg' or item[-4:] == 'jpeg' or item[-3:] == 'tga']
return img_list
def image_batchImport(p_dir):
file_list = sorted(os.listdir(p_dir))
img_list = selectImages (file_list)
for img in img_list:
dirImage = os.path.join(p_dir, img)
tName = os.path.basename(os.path.splitext(img)[0])
if findImage(tName) == False:
nImage = bpy.data.images.load(dirImage)
nT = bpy.data.textures.new(name=tName,type='IMAGE')
bpy.data.textures[tName].image = nImage
def image_batchRemove(p_dir):
file_list = sorted(os.listdir(p_dir))
img_list = selectImages (file_list)
for img in img_list:
dirImage = os.path.join(p_dir, img)
tName = os.path.basename(os.path.splitext(img)[0])
for tex in bpy.data.textures:
if tex.name == tName:
if tex.type == 'IMAGE':
image = tex.image
tex.image.user_clear()
bpy.data.images.remove(image)
tex.user_clear()
bpy.data.textures.remove(tex)
def findUserSysPath():
userPath = ''
def readLibraryDir():
dir = ''
fileDir = os.path.join(bpy.utils.resource_path('USER'), "scripts\\presets\\texture_library.conf")
if os.path.isfile(fileDir):
file = open(fileDir, 'r')
dir = file.read()
file.close()
return dir
class LBM_OP_LibraryUnload(bpy.types.Operator):
bl_idname = "operator.library_unload"
bl_label = ""
library = bpy.props.StringProperty()
def execute(self, context):
dir = os.path.join(context.scene.libmng_string_librarydir,self.library)
image_batchRemove(dir)
return {'FINISHED'}
class LBM_OP_LibraryLoad(bpy.types.Operator):
bl_idname = "operator.library_load"
bl_label = ""
library = bpy.props.StringProperty()
def execute(self, context):
dir = context.scene.libmng_string_librarydir + '\\' + self.library
image_batchImport(dir)
return {'FINISHED'}
class LBM_OP_LoadLibraries(bpy.types.Operator):
bl_idname = "operator.load_libraries"
bl_label = "Refresh libraries"
name = bpy.props.StringProperty()
dir_library = bpy.props.StringProperty()
libraries = []
@classmethod
def loadLibraryDir(self):
dir = ''
fileDir = os.path.join(bpy.utils.resource_path('USER'), "scripts\\presets\\texture_library.conf")
if os.path.isfile(fileDir):
file = open(fileDir, 'r')
dir = file.read()
file.close()
self.dir_library = dir
self.findLibraries(self.dir_library)
def saveLibraryDir(self, p_Dir):
fileDir = os.path.join(bpy.utils.resource_path('USER'), "scripts\\presets\\texture_library.conf")
file = open(fileDir, 'w+')
file.write(p_Dir)
file.close()
def notInLibraries(self,p_item):
notFound = True
for item in self.libraries:
if p_item == item:
notFound = False
break
return notFound
@classmethod
def findLibraries(self, p_dir):
dir = p_dir
if os.path.isdir(dir):
file_list = sorted(os.listdir(dir))
dir_list = []
del self.libraries[:]
for item in file_list:
lib_dir = os.path.join(dir,item)
if os.path.isdir(lib_dir):
dir_list.append(item)
for lib in dir_list:
lib_dir = os.path.join(dir,lib)
if os.path.isdir(lib_dir) and self.notInLibraries(self,lib):
self.libraries.append(lib)
def execute(self, context):
self.saveLibraryDir(context.scene.libmng_string_librarydir)
self.dir_library = bpy.context.scene.libmng_string_librarydir
self.findLibraries(context.scene.libmng_string_librarydir)
return {'FINISHED'}
class LBM_PN_LibraryManager(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'TOOLS'
bl_label = "Library manager"
bl_category = 'Tools'
#bl_context = 'sculptmode'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return (context.sculpt_object or context.vertex_paint_object or context.vertex_paint_object or context.image_paint_object)
def draw(self,context):
layout = self.layout
row = layout.row()
row.prop(context.scene,'libmng_string_librarydir',text='library dir' )
row = layout.row()
op = row.operator(LBM_OP_LoadLibraries.bl_idname)
box = layout.box()
i = 0
for item in LBM_OP_LoadLibraries.libraries:
row = box.row(align=True)
op_name = item
op = row.label(text=op_name)
opl = row.operator(LBM_OP_LibraryLoad.bl_idname, icon='ZOOMIN')
opl.library = op_name
opul = row.operator(LBM_OP_LibraryUnload.bl_idname, icon='ZOOMOUT')
opul.library = op_name
def loadInitData():
LBM_OP_LoadLibraries.loadLibraryDir()
def register():
default_library = readLibraryDir()
bpy.types.Scene.libmng_string_librarydir = bpy.props.StringProperty(name="libraryDir", default=default_library, subtype = 'DIR_PATH')
bpy.utils.register_class(LBM_OP_LoadLibraries)
bpy.utils.register_class(LBM_OP_LibraryLoad)
bpy.utils.register_class(LBM_OP_LibraryUnload)
bpy.utils.register_class(LBM_PN_LibraryManager)
loadInitData()
def unregister():
del bpy.types.Scene.libmng_string_librarydir
bpy.utils.unregister_class(LBM_OP_LoadLibraries)
bpy.utils.unregister_class(LBM_OP_LibraryLoad)
bpy.utils.unregister_class(LBM_OP_LibraryUnload)
bpy.utils.unregister_class(LBM_PN_LibraryManager)
if __name__ == "__main__":
register()
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
25d875712d847ff5331efc882b0a32e303df0b65
|
15bfc2b3ba52420d95ed769a332aaa52f402bbd2
|
/api/v2010/incoming_phone_number/create-test-post-example-3/create-test-post-example-3.6.x.py
|
d8100d65b045e9b542be53429173ac37acc793ac
|
[] |
no_license
|
synackme/sample-code
|
013b8f0a6a33bfd327133b09835ee88940d3b1f2
|
5b7981442f63df7cf2d17733b455270cd3fabf78
|
refs/heads/master
| 2020-03-17T04:53:07.337506
| 2018-05-07T16:47:48
| 2018-05-07T16:47:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = '"ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
incoming_phone_number = client.incoming_phone_numbers.create(phone_number="33")
print(incoming_phone_number.sid)
|
[
"jose.oliveros.1983@gmail.com"
] |
jose.oliveros.1983@gmail.com
|
076cf2edeb38036316e575bf5611e0df4742847d
|
6cb67c5ae32faf6168d87858b43d3457282a022e
|
/esercizi/exchange2.py
|
082747f748f13162d798425ecbe6a739df126a5b
|
[
"Apache-2.0"
] |
permissive
|
formazione/formazione.github.io
|
4d64320ab524db5a47eb1ee25bcfd8afc5c7a238
|
161e7fdf8b7b8aaeab6e44ad360b3a5dfd90503d
|
refs/heads/master
| 2023-08-10T15:57:22.646132
| 2023-07-25T11:08:04
| 2023-07-25T11:08:04
| 125,328,356
| 0
| 0
|
Apache-2.0
| 2020-02-15T09:02:12
| 2018-03-15T07:25:29
|
Python
|
UTF-8
|
Python
| false
| false
| 4,043
|
py
|
import random
class Exchange:
'''e1 = Exchange(dollars=1000, euro=1, cambio=1.18) will give you the euro for 1000 $\
d1 = Exchange(dollars=1, euro=500, cambio=1.18)
'''
def __init__(self, dollars, euro, cambio):
"If euro = 1 it changes dollars into euro and viceversa"
self.dollars = dollars
self.euro = euro
self.cambio = cambio
def exchange(self):
"Calculate euro for dollars and viceversa"
if self.euro: # get euro for .... dollars
return round(self.dollars / self.cambio, 2)
if self.dollars:
return round(self.euro * self.cambio, 2)
def print(self):
"This just adds the € to the euro"
print(f"{self.euro} €")
def pprint(self):
self.print_sol = f"You get {self.dollars} $ for [{self.exchange()} €] "
self.print_sol += f" at an exchange rate of {self.cambio}"
#print(self.print_sol)
return self.print_sol
def print_ex(self, lang="en"):
"Call this to print the exercises"
global cnt
if self.euro:
d = self.dollars
c = self.cambio
if lang=="en":
f = [
f"How many € you must give to get {d} $ at and exchange rate of {c}?",
f"Change {d} $ in euro at an exchange rate of {c}.",
f"How many € you must pay to get {d} $ at and exchange rate of {c}?",
f"Find the € you need to buy {d} $ at and exchange rate of {c}.",
f"If you got {d}$, how many euro you will get at {c} as exchange rate?"
]
if lang=="it":
f = [
f"Quanti € devi pagare per acquistare {d} $ al cambio EUR/USD di {c}?",
f"Cambia {d} $ in euro al tasso di {c}.",
f"Calcola quanti euro ottieni vendendo {d} $ al cambio pari a {c} EUR/USD.",
f"Con {d}$, quanti euro ottieni con un cambio EUR/USD pari a {c}?"
]
self.print_change = random.choice(f)
print(str(cnt), self.print_change)
cnt += 1
return self.print_change
def dollars(self):
if self.euro != 1:
return self.euro() * self.cambio
def random(self):
return random.randrange(100, 3000, 50)
def random_cambio(self):
ch = 1 + round(random.random(), 2)
# print(ch)
return ch
def generate_ex(self):
"generates random dollars and exchange"
self.cambio = round(self.random_cambio(), 2)
if self.euro == 1:
self.dollars = self.random()
self.result = self.exchange()
else:
self.euro = self.random()
self.result = self.exchange()
# return self.euro, self.dollars, self.result
def print_solution(self):
print("Solution:")
self.pprint()
print()
cnt = 1
def main(save=0):
"This shows 10 random exercizes made with the class Exchange"
################## Instructions ####################
# first create an istance
e1 = Exchange(dollars=1000, euro=1, cambio=1.18)
# To see the result of the istance, uncomment the following statement
# e1.pprint()
# Let's create 10 exercizes
sol = []
text = []
# change 10 if you want more or less exercizes
print("Solve these exercizes:\n ---")
for n in range(10):
# Generate a random exercise
e1.generate_ex()
# adds the exercise to the list sol
sol.append(str(cnt) + " " + e1.pprint())
text.append(e1.print_change)
# prints traccia and solutions
e1.print_ex()
solutions(sol)
if save:
save_text(text)
def save_text(text):
"Create text file"
with open("traccia.txt", "w") as file:
file.write(text)
def solutions(sol):
"Prints the solutions"
print("\nSolutions")
# print only solutions
for n in sol:
print(n)
main(save=1)
|
[
"gatto.gio@gmail.com"
] |
gatto.gio@gmail.com
|
6b511d19244e574fcd3136bdc4ce1388d22015d1
|
dc42551b91ccb14541730975f4c377aedb0ad7c4
|
/model/bert_ce.py
|
764390df746ee82ab6f70c7c758eb374c71a677d
|
[] |
no_license
|
liqinzhang/ChineseNER
|
df862eef8d19204c9f9f9e72ac7f716ed6237f7c
|
7d1c6c107b5cc5c57391c0feb1a57357ee519b6a
|
refs/heads/main
| 2023-08-29T18:05:04.404609
| 2021-11-18T01:15:33
| 2021-11-18T01:15:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,419
|
py
|
# -*-coding:utf-8 -*-
from tools.train_utils import *
from tools.layer import *
from tools.loss import cross_entropy_loss
from tools.utils import add_layer_summary
from config import TRAIN_PARAMS
def build_graph(features, labels, params, is_training):
"""
pretrain Bert model output + cross-entropy loss
"""
input_ids = features['token_ids']
label_ids = features['label_ids']
input_mask = features['mask']
segment_ids = features['segment_ids']
seq_len = features['seq_len']
embedding = pretrain_bert_embedding(input_ids, input_mask, segment_ids, params['pretrain_dir'],
params['embedding_dropout'], is_training)
load_bert_checkpoint(params['pretrain_dir']) # load pretrain bert weight from checkpoint
logits = tf.layers.dense(embedding, units=params['label_size'], activation=None,
use_bias=True, name='logits')
add_layer_summary(logits.name, logits)
loss = cross_entropy_loss(logits, label_ids, seq_len,
params['label_size'], params['max_seq_len'], params['dtype'])
pred_ids = tf.argmax(logits, axis=-1) # batch * max_seq
if is_training:
pred2str = map2sequence(params['idx2tag'])
tf.summary.text('prediction', pred2str(pred_ids[0, :]))
return loss, pred_ids
TRAIN_PARAMS.update({
'diff_lr_times': {'logit': 500}
})
|
[
"lixiang.2533@bytedance.com"
] |
lixiang.2533@bytedance.com
|
4298c3737468598270839efff3011bedf2e31660
|
8f4c59e69cce2f6e932f55b3c65aae376b206a2c
|
/笨办法学python/projects/setup(1).py
|
332fa7f39039701ec9d8ae239e60aa76c3237f66
|
[] |
no_license
|
zmjm4/python
|
ef7206292f1c3a3a5763b25527024999de5e8e79
|
44cf74c0f16891c351ce214762218ccf2d7353a0
|
refs/heads/master
| 2020-05-27T17:23:48.776167
| 2018-05-24T07:14:16
| 2018-05-24T07:14:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 400
|
py
|
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except:
from distutils.core import setup
config={
'description': 'My Project',
'author': 'My Name',
'url': 'URL to get it at.',
'download_url': 'Where to download it.',
'author_email': 'My email.',
'version': '0.1',
'install_requires':['nose'],
'packages': ['NAME','bin'],
'scripts': [],
'name': 'projectname'
}
setup(**config)
|
[
"715073608@qq.com"
] |
715073608@qq.com
|
ebda7187c990755ed5402fe5d8d27ea22d320b99
|
a84b99ce26f86a467df95b3ef2b57dd15cb1b49e
|
/boston_housing_v1.py
|
3422493d35a3c0ff7bae1422e2c0fc0720a5c705
|
[] |
no_license
|
yanqinghao/KerasDL
|
3ea6246495bc01bda5792a7146ad4519088375dc
|
853f2872fff600fdd62d882d52d6dd6f82200cd8
|
refs/heads/master
| 2020-03-28T14:07:10.799496
| 2019-05-20T06:17:51
| 2019-05-20T06:17:51
| 148,458,453
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,120
|
py
|
import ptvsd
# Allow other computers to attach to ptvsd at this IP address and port.
ptvsd.enable_attach(address=('10.122.24.46', 3000), redirect_output=True)
# Pause the program until a remote debugger is attached
ptvsd.wait_for_attach()
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
from keras.datasets import boston_housing
from keras import models
from keras import layers
import numpy as np
import matplotlib.pyplot as plt
(train_data, train_targets), (test_data, test_targets) = boston_housing.load_data()
print(train_data.shape, '|', test_data.shape)
print(train_targets)
mean = train_data.mean(axis=0)
train_data -= mean
std = train_data.std(axis=0)
train_data /= std
test_data -= mean
test_data /= std
def build_model():
model = models.Sequential()
model.add(layers.Dense(64, activation='relu', input_shape=(train_data.shape[1],)))
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(1))
model.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])
return model
## crossvalidation
k = 4
num_val_samples = len(train_data) // k
num_epochs = 500
all_scores = []
all_mae_histories = []
for i in range(k):
print('processing fold #', i)
val_data = train_data[i * num_val_samples: (i + 1) * num_val_samples]
val_targets = train_targets[i * num_val_samples: (i + 1) * num_val_samples]
partial_train_data = np.concatenate([train_data[:i * num_val_samples],
train_data[(i + 1) * num_val_samples:]], axis=0)
partial_train_targets = np.concatenate([train_targets[:i * num_val_samples],
train_targets[(i + 1) * num_val_samples:]], axis=0)
model = build_model()
history = model.fit(partial_train_data, partial_train_targets, epochs=num_epochs, batch_size=1,
verbose=0, validation_data=(val_data, val_targets))
mae_history = history.history['val_mean_absolute_error']
all_mae_histories.append(mae_history)
val_mse, val_mae = model.evaluate(val_data, val_targets, verbose=0)
all_scores.append(val_mae)
print(all_scores, '|', np.mean(all_scores))
average_mae_history = [np.mean([x[i] for x in all_mae_histories]) for i in range(num_epochs)]
plt.plot(range(1, len(average_mae_history) + 1), average_mae_history)
plt.xlabel('Epochs')
plt.ylabel('Validation MAE')
plt.show()
def smooth_curve(points, factor=0.9):
smoothed_points = []
for point in points:
if smoothed_points:
previous = smoothed_points[-1]
smoothed_points.append(previous * factor + point * (1 - factor))
else:
smoothed_points.append(point)
return smoothed_points
smooth_mae_history = smooth_curve(average_mae_history[10:])
plt.plot(range(1, len(smooth_mae_history) + 1), smooth_mae_history)
plt.xlabel('Epochs')
plt.ylabel('Validation MAE')
plt.show()
## Final model
model = build_model()
model.fit(train_data, train_targets, epochs=80, batch_size=16, verbose=0)
test_mse_score, test_mae_score = model.evaluate(test_data, test_targets)
print(test_mae_score)
|
[
"187280967@qq.com"
] |
187280967@qq.com
|
1467080cc60312b2af114571aaf42827195feca1
|
0fccee4c738449f5e0a8f52ea5acabf51db0e910
|
/genfragments/EightTeV/LQ/LQToCMu_M_800_TuneZ2star_8TeV_pythia6_cff.py
|
1e0c27817afa82ce4cc3f09aea6c3fd1520590fb
|
[] |
no_license
|
cms-sw/genproductions
|
f308ffaf3586c19b29853db40e6d662e937940ff
|
dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4
|
refs/heads/master
| 2023-08-30T17:26:02.581596
| 2023-08-29T14:53:43
| 2023-08-29T14:53:43
| 11,424,867
| 69
| 987
| null | 2023-09-14T12:41:28
| 2013-07-15T14:18:33
|
Python
|
UTF-8
|
Python
| false
| false
| 1,777
|
py
|
import FWCore.ParameterSet.Config as cms
source = cms.Source("EmptySource")
from Configuration.Generator.PythiaUEZ2starSettings_cfi import *
generator = cms.EDFilter("Pythia6GeneratorFilter",
pythiaHepMCVerbosity = cms.untracked.bool(False),
maxEventsToPrint = cms.untracked.int32(0),
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(1.0),
comEnergy = cms.double(8000.0),
crossSection = cms.untracked.double(0.001246),
PythiaParameters = cms.PSet(
pythiaUESettingsBlock,
processParameters = cms.vstring(
'PMAS(42,1)=800.0 ! LQ mass',
'IMSS(21)=33 ! LUN number for SLHA File (must be 33)',
'IMSS(22)=33 ! Read-in SLHA decay table',
'MSEL=0 ! (D=1) to select between full user control (0, then use MSUB) and some preprogrammed alternative',
'MSUB(163)=1 ! g+g->LQ+LQbar',
'MSUB(164)=1 ! q+qbar->LQ+LQbar'),
# This is a vector of ParameterSet names to be read, in this order
parameterSets = cms.vstring('pythiaUESettings',
'processParameters',
'SLHAParameters'),
SLHAParameters = cms.vstring('SLHAFILE = Configuration/Generator/data/LQ_cmu_beta1.0.out')
)
)
configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('$Revision: 1.3 $'),
name = cms.untracked.string('$Source: /cvs/CMSSW/CMSSW/Configuration/GenProduction/python/PYTHIA6_Exotica_LQ_cmu_250_7TeV_mumujj_cff.py,v $')
,
annotation = cms.untracked.string('default documentation string for PYTHIA6_Exotica_LQ_cmu_250_7TeV_mumujj_cff.py')
)
ProductionFilterSequence = cms.Sequence(generator)
|
[
"sha1-45889d0b422ced4b08fb223eae59c585c331ccec@cern.ch"
] |
sha1-45889d0b422ced4b08fb223eae59c585c331ccec@cern.ch
|
c9d58c17e864ab025f0a150793e137ea2d21d2f7
|
009628e385aca8552dad5c1c5cba018ca6e5954d
|
/scripts/cazalsconnolling
|
63c09a69565fefe3cdc31f78c056fdc17bca2aa8
|
[] |
no_license
|
csrocha/python-mtk
|
565ebcfeb668a6409d48135bf081321d8121b263
|
c3ba520f55c2e204feb6b98251abcb046e51c6cd
|
refs/heads/main
| 2023-01-12T02:46:44.457520
| 2020-11-17T20:20:59
| 2020-11-17T20:20:59
| 313,939,251
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,092
|
#!/usr/bin/python
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
import sys, os
import os.path as path
from math import copysign
from numpy import array, zeros, arange, float, abs, argmax, all
from numpy import ndenumerate,round, dot, outer, arccos, pi, nan
from numpy.linalg import det, norm
from mtk.log import log
from mtk.geometry.vol import BasicVolume
from mtk.geometry.iter import face, line
from mtk.geometry.planecut import cutfaces
from mtk.geometry.triangle import triangle
from mtk.geometry.sphere import sphere
from mtk.geometry.intersection import triangle_sphere
from mtk.geometry.line import segment
from mtk.geometry.arc import arc
from mtk.geometry.polygon import polygon
from mtk.io.ply_ff import load_ply
from mtk.io.vtk_ff import writer
import sys
import bisect as bs
import logging
def solidangles(plyfilename, outfilename, radius):
log.info("Loading")
vertexs, faces = load_ply(open(plyfilename))
vertexs = vertexs[:,0:3]
try:
P = polygon(vertexs, faces)
except ValueError:
import pdb; pdb.set_trace()
raise RuntimeError('Not all faces are triangles in your ply file')
l = float(len(P.v))
values = []
c = 0
for i in range(len(P.v)):
try:
values.append(P.area(i, radius))
except RuntimeError as m:
log.warning("Trouble to calculate curvature to vertex %i: '%s'" % (i, m))
values.append(nan)
log.info("Storing")
w = writer(points=vertexs, polygons=faces, scalars=values)
w.write(outfilename)
_usage_ = """
Calculate curvature to a body using cazals algorithm.
cazalsconnolling [ply filename] [sphere radius] [vtk output filename]
"""
if __name__ == "__main__":
if len(sys.argv) != 4:
print _usage_
log.info("Starting connolling")
log.info("Shell command '%s'" % ' '.join(sys.argv))
log.info("Work path '%s'" % os.getcwd())
plyfile = sys.argv[1]
radius = float(sys.argv[2])
outfile = sys.argv[3]
solidangles(plyfile, outfile, radius)
log.info("Stop conolling execution")
|
[
"cristian.rocha@moldeo.coop"
] |
cristian.rocha@moldeo.coop
|
|
b2514650358f9f9158ce3d6cff20a73195fa8295
|
c18bdb1abf3bb9be1997a568e2260850c11137ab
|
/lib/tinyusb/hw/mcu/microchip/samd/asf4/tools/update_from_atmel_start.py
|
0ef2a04c372b36a6d8f7d128ead7f5776f2a39e1
|
[
"MIT"
] |
permissive
|
diybitcoinhardware/micropython
|
a730026ad7fa4e3cff4cb5ed14dd6b3f1fb54734
|
6bdf1b69162b673d48042ccd021f9efa019091fa
|
refs/heads/master
| 2022-11-23T04:42:47.976301
| 2022-11-07T14:25:09
| 2022-11-07T14:25:28
| 182,322,508
| 4
| 4
|
MIT
| 2022-10-16T11:47:28
| 2019-04-19T20:34:49
|
C
|
UTF-8
|
Python
| false
| false
| 2,514
|
py
|
import requests
import zipfile
import os.path
import shutil
import os
import os.path
import sys
import subprocess
if not subprocess.check_output(['git', 'rev-parse', '--show-toplevel'], universal_newlines=True).strip() == os.getcwd():
print('Please run "{}" from the top directory in the asf4 repo.'.format(sys.argv[0]))
sys.exit(1)
# Change .gitignore if you change these dir names.
DOWNLOADED_ZIP_DIR = 'downloaded-zip'
DOWNLOADED_DIR = 'downloaded'
for chip in ['samd21', 'samd51']:
r = None
os.makedirs(DOWNLOADED_ZIP_DIR, exist_ok=True)
filename = os.path.join(DOWNLOADED_ZIP_DIR, chip + '.zip')
if os.path.isfile(filename):
print('NOTE:', filename, 'already downloaded. Delete it and re-run if you want to re-download')
else:
print("Downloading", filename, "...")
with open('tools/' + chip + '.json', 'r') as project_json:
headers = {'content-type': 'text/plain'}
r = requests.post('http://start.atmel.com/api/v1/generate/?format=atzip&compilers=[atmel_studio,make]&file_name_base=My%20Project', headers=headers, data=project_json)
if not r.ok:
# Double check that the JSON is minified. If it's not, you'll get a 404.
print(r.text)
sys.exit(1)
with open(filename, 'wb') as out:
out.write(r.content)
# Extract to a temporary location and normalize before replacing the existing location.
z = zipfile.ZipFile(filename)
downloaded_chip_dir = os.path.join(DOWNLOADED_DIR, chip)
# Clean up old zip extraction.
if os.path.isdir(downloaded_chip_dir):
shutil.rmtree(downloaded_chip_dir)
print("Unzipping ...")
z.extractall(downloaded_chip_dir)
# Remove all carriage returns.
for dirpath, dirnames, filenames in os.walk(downloaded_chip_dir):
for fn in filenames:
fn = os.path.join(dirpath, fn)
subprocess.run(['sed', '-i', 's/\r//g', fn])
# Move files to match SAMD51 structure.
if chip == 'samd21':
shutil.move(os.path.join(downloaded_chip_dir, 'samd21a/include'), downloaded_chip_dir)
shutil.move(os.path.join(downloaded_chip_dir, 'samd21a/gcc/gcc'), os.path.join(downloaded_chip_dir, 'gcc'))
shutil.move(os.path.join(downloaded_chip_dir, 'samd21a/gcc/system_samd21.c'), os.path.join(downloaded_chip_dir, 'gcc'))
print("Updating",chip,"from",downloaded_chip_dir)
subprocess.run(['rsync', '-r', '--delete', downloaded_chip_dir + '/', chip], check=True)
|
[
"snigirev.stepan@gmail.com"
] |
snigirev.stepan@gmail.com
|
eb41704997bf530667ea55f44b8a1c784eaeb6b5
|
871454bb6203d26f93c144cb8604e5c6276ab94b
|
/auth/demo/serializers.py
|
eae85b34749d46389279c7317dc7fb45f67302a9
|
[] |
no_license
|
netology-code/DJ_code
|
1b13a51b7f85c1f35dcfb2b2d010fe2ded7eb297
|
a9b657aa6e2d9b3a9f4f4c4c120bb30e8802c042
|
refs/heads/master
| 2023-06-20T11:35:44.791530
| 2021-07-25T14:41:21
| 2021-07-25T14:41:21
| 388,563,577
| 3
| 44
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
from rest_framework import serializers
from demo.models import Adv
class AdvSerializer(serializers.ModelSerializer):
class Meta:
model = Adv
fields = ['id', 'user', 'text', 'created_at', 'open']
read_only_fields = ['user',]
|
[
"oz.sasha.ivanov@gmail.com"
] |
oz.sasha.ivanov@gmail.com
|
e98cfae6341abe6f37c3f1a0a2427707d096ce33
|
fb54704d4a6f9475f42b85d8c470e3425b37dcae
|
/medium/ex723.py
|
72d622c13c82343615c20230acd3754aceafcd33
|
[] |
no_license
|
ziyuan-shen/leetcode_algorithm_python_solution
|
b2784071a94b04e687fd536b57e8d5a9ec1a4c05
|
920b65db80031fad45d495431eda8d3fb4ef06e5
|
refs/heads/master
| 2021-06-27T05:19:47.774044
| 2021-02-04T09:47:30
| 2021-02-04T09:47:30
| 210,991,299
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,893
|
py
|
class Solution:
def crush(self, board):
for r in range(self.nrow):
p1 = 0
p2 = 1
while p2 < self.ncol - 1:
val = abs(board[r][p1])
while p2 < self.ncol and abs(board[r][p2]) == val:
p2 += 1
if p2 - p1 > 2:
for c in range(p1, p2):
board[r][c] = -val
p1 = p2
p2 += 1
for c in range(self.ncol):
p1 = 0
p2 = 1
while p2 < self.nrow - 1:
val = abs(board[p1][c])
while p2 < self.nrow and abs(board[p2][c]) == val:
p2 += 1
if p2 - p1 > 2:
for r in range(p1, p2):
board[r][c] = -val
p1 = p2
p2 += 1
change_flag = False
for r in range(self.nrow):
for c in range(self.ncol):
if board[r][c] < 0:
change_flag = True
board[r][c] = 0
return change_flag
def gravity(self, board):
for c in range(self.ncol):
p1 = self.nrow - 1
p2 = p1 - 1
while p2 >= 0:
if board[p1][c] != 0:
p1 -= 1
p2 -= 1
else:
while p2 >= 0 and board[p2][c] == 0:
p2 -= 1
if p2 >= 0:
board[p1][c] = board[p2][c]
board[p2][c] = 0
p1 -= 1
p2 = p1 - 1
def candyCrush(self, board: List[List[int]]) -> List[List[int]]:
self.nrow = len(board)
self.ncol = len(board[0])
while self.crush(board):
self.gravity(board)
return board
|
[
"ziyuan.shen@duke.edu"
] |
ziyuan.shen@duke.edu
|
a0f7d764e965e2a2a6b2307b6cffc09c9e55114e
|
b167407960a3b69b16752590def1a62b297a4b0c
|
/tools/project-creator/Python2.6.6/Lib/test/test_timeout.py
|
48fdc0108adcd87a0723272de8ee9ff835c08394
|
[
"MIT"
] |
permissive
|
xcode1986/nineck.ca
|
543d1be2066e88a7db3745b483f61daedf5f378a
|
637dfec24407d220bb745beacebea4a375bfd78f
|
refs/heads/master
| 2020-04-15T14:48:08.551821
| 2019-01-15T07:36:06
| 2019-01-15T07:36:06
| 164,768,581
| 1
| 1
|
MIT
| 2019-01-15T08:30:27
| 2019-01-09T02:09:21
|
C++
|
UTF-8
|
Python
| false
| false
| 7,061
|
py
|
"""Unit tests for socket timeout feature."""
import unittest
from test import test_support
# This requires the 'network' resource as given on the regrtest command line.
skip_expected = not test_support.is_resource_enabled('network')
import time
import socket
class CreationTestCase(unittest.TestCase):
"""Test case for socket.gettimeout() and socket.settimeout()"""
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def tearDown(self):
self.sock.close()
def testObjectCreation(self):
# Test Socket creation
self.assertEqual(self.sock.gettimeout(), None,
"timeout not disabled by default")
def testFloatReturnValue(self):
# Test return value of gettimeout()
self.sock.settimeout(7.345)
self.assertEqual(self.sock.gettimeout(), 7.345)
self.sock.settimeout(3)
self.assertEqual(self.sock.gettimeout(), 3)
self.sock.settimeout(None)
self.assertEqual(self.sock.gettimeout(), None)
def testReturnType(self):
# Test return type of gettimeout()
self.sock.settimeout(1)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
self.sock.settimeout(3.9)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
def testTypeCheck(self):
# Test type checking by settimeout()
self.sock.settimeout(0)
self.sock.settimeout(0L)
self.sock.settimeout(0.0)
self.sock.settimeout(None)
self.assertRaises(TypeError, self.sock.settimeout, "")
self.assertRaises(TypeError, self.sock.settimeout, u"")
self.assertRaises(TypeError, self.sock.settimeout, ())
self.assertRaises(TypeError, self.sock.settimeout, [])
self.assertRaises(TypeError, self.sock.settimeout, {})
self.assertRaises(TypeError, self.sock.settimeout, 0j)
def testRangeCheck(self):
# Test range checking by settimeout()
self.assertRaises(ValueError, self.sock.settimeout, -1)
self.assertRaises(ValueError, self.sock.settimeout, -1L)
self.assertRaises(ValueError, self.sock.settimeout, -1.0)
def testTimeoutThenBlocking(self):
# Test settimeout() followed by setblocking()
self.sock.settimeout(10)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.settimeout(10)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
def testBlockingThenTimeout(self):
# Test setblocking() followed by settimeout()
self.sock.setblocking(0)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
self.sock.setblocking(1)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
class TimeoutTestCase(unittest.TestCase):
"""Test case for socket.socket() timeout functions"""
# There are a number of tests here trying to make sure that an operation
# doesn't take too much longer than expected. But competing machine
# activity makes it inevitable that such tests will fail at times.
# When fuzz was at 1.0, I (tim) routinely saw bogus failures on Win2K
# and Win98SE. Boosting it to 2.0 helped a lot, but isn't a real
# solution.
fuzz = 2.0
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addr_remote = ('www.python.org.', 80)
self.localhost = '127.0.0.1'
def tearDown(self):
self.sock.close()
def testConnectTimeout(self):
# Choose a private address that is unlikely to exist to prevent
# failures due to the connect succeeding before the timeout.
# Use a dotted IP address to avoid including the DNS lookup time
# with the connect time. This avoids failing the assertion that
# the timeout occurred fast enough.
addr = ('10.0.0.0', 12345)
# Test connect() timeout
_timeout = 0.001
self.sock.settimeout(_timeout)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.connect, addr)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is more than %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvTimeout(self):
# Test recv() timeout
_timeout = 0.02
self.sock.connect(self.addr_remote)
self.sock.settimeout(_timeout)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.recv, 1024)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testAcceptTimeout(self):
# Test accept() timeout
_timeout = 2
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
self.sock.listen(5)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.accept)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvfromTimeout(self):
# Test recvfrom() timeout
_timeout = 2
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.recvfrom, 8192)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testSend(self):
# Test send() timeout
# couldn't figure out how to test it
pass
def testSendto(self):
# Test sendto() timeout
# couldn't figure out how to test it
pass
def testSendall(self):
# Test sendall() timeout
# couldn't figure out how to test it
pass
def test_main():
test_support.requires('network')
test_support.run_unittest(CreationTestCase, TimeoutTestCase)
if __name__ == "__main__":
test_main()
|
[
"278688386@qq.com"
] |
278688386@qq.com
|
ea2aa046e4ada2f0e7f4f8eabf7b353f0795682a
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_3/mtledn001/question1.py
|
d2992358e9f1fd6e7a51fafadabd786f4caafacd
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339
| 2014-09-22T02:22:22
| 2014-09-22T02:22:22
| 22,372,174
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 183
|
py
|
# Draw a rectangle
h = eval(input('Enter the height of the rectangle:\n'))
w= eval(input('Enter the width of the rectangle:\n'))
for i in range(1,h+1):
print("*"*w)
|
[
"jarr2000@gmail.com"
] |
jarr2000@gmail.com
|
a8f1dd9db7c0ce138615ac6844de319aaa5c692f
|
dec29f40788478f73798f23b79ca892b3121150a
|
/apps/product/migrations/0006_product_featured.py
|
239bba971c32d72c05c0f20fc7113bd3b3a59639
|
[] |
no_license
|
RonaldTheodoro/django-ecommerce
|
2c661e6f3ae0154ecb7a8e25183875da8c27d14f
|
9097049107e5a7ab52474aa89fe40f02689fb24a
|
refs/heads/master
| 2021-05-06T02:08:51.166682
| 2017-12-17T00:32:03
| 2017-12-17T00:32:03
| 114,499,259
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 454
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-30 11:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0005_auto_20171119_1838'),
]
operations = [
migrations.AddField(
model_name='product',
name='featured',
field=models.BooleanField(default=True),
),
]
|
[
"ronald.silva4@fatec.sp.gov.br"
] |
ronald.silva4@fatec.sp.gov.br
|
52a14bcf8d67657749c1e9d1a0a544f03ad604c3
|
6644e20a38f22b2b0842981b9b4855fb08bb0113
|
/exer706.py
|
d303559ba7fbd7a9ddd388a86fe41bc3f199b33d
|
[
"MIT"
] |
permissive
|
profnssorg/valmorMantelli1
|
506acb35243aeea24701d70e369280f064a3024e
|
a9a42218b54da208e834f333aa5242d14639b28b
|
refs/heads/master
| 2020-04-11T13:55:03.354177
| 2019-01-18T20:28:33
| 2019-01-18T20:28:33
| 161,834,724
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 941
|
py
|
###Titulo: Leitor de string
###Função: Este programa lê três strings e substitui na primeira os caracteres da segunda e terceira
###Autor: Valmor Mantelli Jr.
###Data: 08/01/2019
###Versão: 0.0.13
# Declaração de variáveis
f = ""
s = ""
t = ""
x = ""
p = ""
y = 0
#Entrada de dados
f = input("Digite a primeira sequência de letras: ")
s = input("Digite a segunda sequência de letras: ")
t = input("Digit a terceira sequencia de letras: ")
#Processamento dos dados e saída
if len (s) == len(t): # Verifica se o tamanho da segunda e terceira strings são iguais
x = ""
for p in f:
y =s.find(p)
if y != -1:
x += t[y]
else:
x += p
if x == "":
print("Não sobraram caracteres para serem retirados.")
else:
print("Os caracteres %s foram trocados por %s em %s gerando uma nova sequência %s." % (s, t, p, x))
else:
print ("A segunda e terceira sequência de caracteres precisam ter o mesmo tamanho")
|
[
"noreply@github.com"
] |
profnssorg.noreply@github.com
|
e88c2e150d4d4d9f1fee48c6f67a8495f4ed9ccc
|
d2cb388a385598489e36817afb15502596780dc2
|
/hedgehog/test_bayes_net.py
|
4ac37b9ca124ff2baad63ef64502765f00f3f245
|
[
"MIT"
] |
permissive
|
dormanh/hedgehog
|
2ab5bd0fe58e8e8d4fb6410946a532c4a5d09f7c
|
6ebb6fad04dcae810e16768cfc90e6a12bbc5cd4
|
refs/heads/master
| 2023-03-13T05:45:42.648064
| 2021-02-28T13:49:20
| 2021-02-28T13:49:20
| 343,113,432
| 0
| 0
|
MIT
| 2021-02-28T13:34:20
| 2021-02-28T13:34:19
| null |
UTF-8
|
Python
| false
| false
| 3,524
|
py
|
import copy
import importlib
import inspect
import math
import random
import numpy as np
import pandas as pd
import pytest
import hedgehog as hh
def check_partial_fit(bn):
"""Checks that partial_fit produces the same result as fit."""
bn_partial = copy.deepcopy(bn)
# Fit the parameters of the first BN in one go
samples = bn.sample(500)
bn.fit(samples)
# Fit the parameters of the second BN incrementally
bn_partial.P = {}
for chunk in np.array_split(samples, 5):
bn_partial.partial_fit(chunk)
# Check that the obtained parameters are identical
for node in bn.P:
pd.testing.assert_series_equal(bn.P[node], bn_partial.P[node])
def check_sample_many(bn):
for n in (2, 3, 100):
sample = bn.sample(n)
assert len(sample) == n
assert sorted(sample.columns) == sorted(bn.nodes)
def check_sample_one(bn):
sample = bn.sample()
assert isinstance(sample, dict)
assert sorted(sample.keys()) == sorted(bn.nodes)
def check_full_joint_dist(bn):
fjd = bn.full_joint_dist()
assert math.isclose(fjd.sum(), 1)
assert sorted(fjd.index.names) == sorted(bn.nodes)
def check_Ps(bn):
for child, parents in bn.parents.items():
P = bn.P[child]
assert P.index.names[-1] == child
assert P.index.names[:-1] == parents
assert P.groupby(parents).sum().eq(1).all()
for orphan in set(bn.nodes) - set(bn.parents):
P = bn.P[orphan]
assert P.index.name == orphan
assert P.sum() == 1
def check_query(bn):
"""Checks that the query function works for every algorithm."""
fjd = bn.full_joint_dist()
event = dict(zip(fjd.index.names, fjd.index[0]))
query = random.choice(list(event))
del event[query]
for algorithm in ('exact', 'gibbs', 'likelihood', 'rejection'):
bn.query(query, event=event, algorithm=algorithm)
def naive():
bn = hh.BayesNet('A', 'B', 'C')
bn.P['A'] = pd.Series({True: .1, False: .9})
bn.P['B'] = pd.Series({True: .3, False: .7})
bn.P['C'] = pd.Series({True: .5, False: .5})
bn.prepare()
return bn
@pytest.mark.parametrize('bn, check', [
pytest.param(
example(),
check,
id=f"{example.__name__}:{check.__name__}"
)
for example in (
*dict(inspect.getmembers(
importlib.import_module('hedgehog.examples'),
inspect.isfunction)
).values(),
naive
)
for check in (
check_partial_fit,
check_sample_many,
check_sample_one,
check_full_joint_dist,
check_Ps,
check_query
)
])
def test(bn, check):
check(bn)
def test_indep_vars():
"""This doctest checks that querying with independent variables works as expected.
>>> bn = hh.BayesNet()
>>> bn.P['A'] = pd.Series({1: .2, 2: .3, 3: .5})
>>> bn.P['B'] = pd.Series({1: .4, 2: .2, 3: .4})
>>> bn.prepare()
>>> bn.full_joint_dist()
A B
1 1 0.08
2 0.04
3 0.08
2 1 0.12
2 0.06
3 0.12
3 1 0.20
2 0.10
3 0.20
Name: P(A, B), dtype: float64
>>> bn.query('A', event={'B': 1})
A
1 0.2
2 0.3
3 0.5
Name: P(A), dtype: float64
>>> bn.query('A', event={'B': 2})
A
1 0.2
2 0.3
3 0.5
Name: P(A), dtype: float64
>>> bn.query('A', event={'B': 3})
A
1 0.2
2 0.3
3 0.5
Name: P(A), dtype: float64
"""
|
[
"maxhalford25@gmail.com"
] |
maxhalford25@gmail.com
|
065879523e20eb090c83b185d34a52dad01e6602
|
c71799bd4e86b8354588e395396e559df867843b
|
/utility/utility_env.py
|
07867072760d3dfc04b871d842687612217b90a6
|
[] |
no_license
|
jacob-heglund/capture-the-flag
|
ab564c6f271dd2bb2bc07dd4660eb0798f5480f4
|
99423e6606fa4f32358df91f83d69b39c68c6593
|
refs/heads/master
| 2020-09-06T20:03:17.997672
| 2019-11-08T19:37:31
| 2019-11-08T19:37:31
| 220,535,046
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,781
|
py
|
import numpy as np
import gym_cap.envs.const as CONST
UNKNOWN = CONST.UNKNOWN # -1
TEAM1_BG = CONST.TEAM1_BACKGROUND # 0
TEAM2_BG = CONST.TEAM2_BACKGROUND # 1
TEAM1_AG = CONST.TEAM1_UGV # 2
TEAM1_UAV= CONST.TEAM1_UAV # 3
TEAM2_AG = CONST.TEAM2_UGV # 4
TEAM2_UAV= CONST.TEAM2_UAV # 5
TEAM1_FL = CONST.TEAM1_FLAG # 6
TEAM2_FL = CONST.TEAM2_FLAG # 7
OBSTACLE = CONST.OBSTACLE # 8
DEAD = CONST.DEAD # 9
SELECTED = CONST.SELECTED # 10
COMPLETED= CONST.COMPLETED # 11
def one_hot_encoder(state, agents, vision_radius=9, reverse=False):
"""Encoding pipeline for CtF state to one-hot representation
6-channel one-hot representation of state.
State is not binary: team2 is represented with -1.
Channels are not symmetrical.
:param state: CtF state in raw format
:param agents: Agent list of CtF environment
:param vision_radius: Size of the vision range (default=9)
:param reverse:Reverse the color. Used for red-perspective (default=False)
:return oh_state: One-hot encoded state
"""
vision_lx = 2*vision_radius+1
vision_ly = 2*vision_radius+1
oh_state = np.zeros((len(agents),vision_lx,vision_ly,6))
# team 1 : (1), team 2 : (-1), map elements: (0)
map_channel = {UNKNOWN:0, DEAD:0,
TEAM1_BG:1, TEAM2_BG:1,
TEAM1_AG:2, TEAM2_AG:2,
TEAM1_UAV:3, TEAM2_UAV:3,
TEAM1_FL:4, TEAM2_FL:4,
OBSTACLE:5}
if not reverse:
map_color = {UNKNOWN:1, DEAD:0,
TEAM1_BG:0, TEAM2_BG:1,
TEAM1_AG:1, TEAM2_AG:-1,
TEAM1_UAV:1, TEAM2_UAV:-1,
TEAM1_FL:1, TEAM2_FL:-1,
OBSTACLE:1}
else: # reverse color
map_color = {UNKNOWN:1, DEAD:0,
TEAM1_BG:1, TEAM2_BG:0,
TEAM1_AG:-1, TEAM2_AG:1,
TEAM1_UAV:-1, TEAM2_UAV:1,
TEAM1_FL:-1, TEAM2_FL:1,
OBSTACLE:1}
# Expand the observation with wall to avoid dealing with the boundary
sx, sy = state.shape
_state = np.full((sx+2*vision_radius, sy+2*vision_radius),OBSTACLE)
_state[vision_radius:vision_radius+sx, vision_radius:vision_radius+sy] = state
state = _state
for idx,agent in enumerate(agents):
# Initialize Variables
x, y = agent.get_loc()
x += vision_radius
y += vision_radius
vision = state[x-vision_radius:x+vision_radius+1,y-vision_radius:y+vision_radius+1] # extract view
# FULL MATRIX OPERATION
for channel, val in map_color.items():
if val == 1:
oh_state[idx,:,:,map_channel[channel]] += (vision == channel).astype(np.int32)
elif val == -1:
oh_state[idx,:,:,map_channel[channel]] -= (vision == channel).astype(np.int32)
return oh_state
def one_hot_encoder_v2(state, agents, vision_radius=9, reverse=False):
""" Encoding pipeline for CtF state to one-hot representation
11-channel one-hot representation of state.
State is binary.
Some optimization is included.
:param state: CtF state in raw format
:param agents: Agent list of CtF environment
:param vision_radius: Size of the vision range (default=9)`
:param reverse:Reverse the color. Used for red-perspective (default=False)
:return oh_state: One-hot encoded state
"""
num_channel = 11
num_agents = len(agents)
vision_lx = 2*vision_radius+1
vision_ly = 2*vision_radius+1
# Map channel for each elements
if not reverse:
order = [UNKNOWN, OBSTACLE, TEAM1_BG, TEAM2_BG, TEAM1_AG, TEAM2_AG,
TEAM1_UAV, TEAM2_UAV, TEAM1_FL, TEAM2_FL, DEAD]
else:
order = [UNKNOWN, OBSTACLE, TEAM2_BG, TEAM1_BG, TEAM2_AG, TEAM1_AG,
TEAM2_UAV, TEAM1_UAV, TEAM2_FL, TEAM1_FL, DEAD]
map_channel = dict(zip(order, range(num_channel)))
# Padding Boundary
#state = np.pad(state, ((vision_radius,vision_radius),(vision_radius,vision_radius)), 'constant', constant_values=OBSTACLE)
sx, sy = state.shape
_state = np.full((sx+2*vision_radius, sy+2*vision_radius),OBSTACLE)
_state[vision_radius:vision_radius+sx, vision_radius:vision_radius+sy] = state
state = _state
each_agent = []
for idx, agent in enumerate(agents):
# Initialize Variables
x, y = agent.get_loc()
x += vision_radius
y += vision_radius
vision = state[x-vision_radius:x+vision_radius+1,y-vision_radius:y+vision_radius+1] # extract view
# operation
each_channel = []
for element, channel in map_channel.items():
each_channel.append(vision==element)
each_agent.append(np.stack(each_channel, axis=-1))
oh_state = np.stack(each_agent, axis=0)
return oh_state
# Debug
def debug():
"""debug
Include testing code for above methods and classes.
The execution will start witn __main__, and call this method.
"""
import gym
import time
env = gym.make("cap-v0")
s = env.reset(map_size=20)
print('start running')
stime = time.time()
for _ in range(3000):
s = env.reset(map_size=20)
one_hot_encoder(s, env.get_team_blue)
print(f'Finish testing for one-hot-encoder: {time.time()-stime} sec')
s = env.reset(map_size=20)
print('start running v2')
stime = time.time()
for _ in range(3000):
s = env.reset(map_size=20)
one_hot_encoder_v2(s, env.get_team_blue)
print(f'Finish testing for one-hot-encoder: {time.time()-stime} sec')
if __name__ == '__main__':
debug()
|
[
"jheglun2@illinois.edu"
] |
jheglun2@illinois.edu
|
6a61a4502a0334d46da5be716ffb38dbcc31975f
|
583fdb9f37dea28ada24e335f1e44ba6cf587770
|
/860 柠檬水找零.py
|
109ef98e4711690d86ba8fbc1ce323e5179e55a1
|
[] |
no_license
|
Ford-z/LeetCode
|
8c4c30eeaa3d8f02b24c8d0058c60f09c3a6debe
|
88eeca3780b4dc77efce4f14d317ed1c872cf650
|
refs/heads/master
| 2021-11-21T00:51:05.314084
| 2021-09-16T15:45:18
| 2021-09-16T15:45:18
| 194,425,542
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,596
|
py
|
#在柠檬水摊上,每一杯柠檬水的售价为 5 美元。
#顾客排队购买你的产品,(按账单 bills 支付的顺序)一次购买一杯。
#每位顾客只买一杯柠檬水,然后向你付 5 美元、10 美元或 20 美元。你必须给每个顾客正确找零,也就是说净交易是每位顾客向你支付 5 美元。
#注意,一开始你手头没有任何零钱。
#如果你能给每位顾客正确找零,返回 true ,否则返回 false 。
#来源:力扣(LeetCode)
#链接:https://leetcode-cn.com/problems/lemonade-change
#著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
class Solution:
def lemonadeChange(self, bills: List[int]) -> bool:
cash=[]
flag=True
for i in range(len(bills)):
if(bills[i]==5):
cash.append(5)
if(bills[i]==10):
if(cash.count(5)>=1):
cash.remove(5)
cash.append(10)
else:
flag=False
break
if(bills[i]==20):
if(cash.count(10)>=1 and cash.count(5)>=1):
cash.remove(10)
cash.remove(5)
cash.append(20)
elif(cash.count(5)>=3):
cash.remove(5)
cash.remove(5)
cash.remove(5)
cash.append(20)
else:
flag=False
break
return flag
|
[
"noreply@github.com"
] |
Ford-z.noreply@github.com
|
1c6da6f5d8052821f26f42065a6f5aaba410456d
|
dec494542217437afa7f38e8703328b25b183cb8
|
/39.py
|
c80519dded9c7ba9b7dd4286c678e9a42e4168c1
|
[] |
no_license
|
Transi-ent/LeetcodeSolver
|
ee44c9c4d5bce9f11c079c5b27b4b967790cb5cd
|
a89e19753666657a6f15c1be589b9b2dbd4b6c84
|
refs/heads/master
| 2021-04-16T20:37:34.710175
| 2021-02-24T03:51:43
| 2021-02-24T03:51:43
| 249,383,432
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,483
|
py
|
class Solution:
"""
回溯法。枝叶粗大,
"""
def combinationSum(self, candidates: list, target: int) -> list:
s = set()
res = self.findCombnt(candidates, target, 0, [],[],s)
print(res)
return res
def findCombnt(self, nums: list, target: int, index: int,
tmplist: list, res: list, s: set) ->list:
n = len(nums)
if index>=n:
return res
for i in range(index, n):
copyOftmp = tmplist.copy()
copyOfs = s.copy()
sumVal = sum(copyOftmp)
if sumVal+nums[i]==target:
tmp = copyOftmp+[nums[i]]
tmp.sort()
ss = ''.join([str(i) for i in tmp])
if not ss in s:
res.append(copyOftmp+[nums[i]])
s.add(ss)
elif sumVal+nums[i]>target:
self.findCombnt(nums, target, index+1, copyOftmp, res, s)
else:
self.findCombnt(nums, target, index, copyOftmp+[nums[i]], res, s)
return res
class Solution2:
"""
回溯法。改进版
"""
def combinationSum(self, candidates: list, target: int) -> list:
res = []
candidates.sort()
n = len(candidates)
def traceback(index: int, tmpsum: int, tmp: list):
if tmpsum>target or index>=n:
return
if tmpsum==target:
res.append(tmp)
for i in range(index, n):
if tmpsum+candidates[i]>target:
break # 因为已经排好序了,后面的元素只会越来越大
# 回溯法的本质即为在一个循环中递归调用自身
traceback(i, tmpsum+candidates[i], tmp+[candidates[i]])
traceback(0, 0, [])
print(res)
return res
class Solution3:
"""
递归。
"""
def combinationSum(self, candidates: list, target: int) -> list:
res = []
candidates.sort()
n = len(candidates)
def dfs(index: int, tmpsum: int, tmp: list):
if tmpsum>target or index>=n:
return
if tmpsum==target:
res.append(tmp)
return
dfs(index, tmpsum+candidates[index], tmp+[candidates[index]])
dfs(index+1, tmpsum, tmp)
dfs(0, 0, [])
print(res)
return res
Solution3().combinationSum([2,3,6,7], 7)
|
[
"1305113016@qq.com"
] |
1305113016@qq.com
|
3a6b487f57c857ad7468ca3079d6f2dcc05ee7d9
|
dd31ec8f3f979b0339cf686ce9094def03ef003a
|
/almoxarifado/admin.py
|
439d557537488c4abfc4866aa38d2ec5b78db9fc
|
[
"MIT"
] |
permissive
|
rvmoura96/projeto-almoxarifado
|
872bb945b4057bdbf108776e2101e9966a23f4de
|
4ca5e5d00f449a940f7c601479bb3fe14c54f012
|
refs/heads/master
| 2022-11-11T07:45:33.475443
| 2017-11-21T21:13:19
| 2017-11-21T21:13:19
| 106,044,249
| 1
| 1
|
MIT
| 2022-10-26T05:02:32
| 2017-10-06T19:48:08
|
Python
|
UTF-8
|
Python
| false
| false
| 322
|
py
|
from django.contrib import admin
from .models import Equipamento, Item, Tipo, TipoItens, Modelo, Fabricante
# Register your models here.
admin.site.register(Tipo)
admin.site.register(TipoItens)
admin.site.register(Modelo)
admin.site.register(Fabricante)
admin.site.register(Equipamento)
admin.site.register(Item)
|
[
"rvmoura.96@gmail.com"
] |
rvmoura.96@gmail.com
|
ff9ef6987c579208033a4e26c738b75647883f52
|
bb81b9c70f204e636560dc969b68c1654e24cb77
|
/tests/i2b2modeltests/metadatatests/test_modifier_dimension.py
|
8b6124f32a4a8cc9d09f52c4faf5c4d17e173218
|
[
"Apache-2.0"
] |
permissive
|
BD2KOnFHIR/i2b2model
|
8f526225498ccfadddeb180e76d644f1098a4041
|
566be446f4b8691f8d82d5d04b7635248aba6041
|
refs/heads/master
| 2021-04-27T20:52:59.357953
| 2020-04-14T17:01:02
| 2020-04-14T17:01:02
| 122,386,895
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,433
|
py
|
import unittest
from collections import OrderedDict
from datetime import datetime
from dynprops import as_dict, clear
from i2b2model.shared.i2b2core import I2B2Core
from i2b2model.testingutils.base_test_case import BaseTestCase
class ModifierDimensionTestCase(BaseTestCase):
def setUp(self):
clear(I2B2Core)
def tearDown(self):
clear(I2B2Core)
def test_basics(self):
from i2b2model.metadata.i2b2modifierdimension import ModifierDimension
I2B2Core.download_date = datetime(2017, 5, 25)
I2B2Core.sourcesystem_cd = "MOD_TEST"
I2B2Core.import_date = datetime(2017, 5, 25)
md = ModifierDimension('MODTEST', 'baboon', 'Wild baboons', ['Earth', 'Africa', 'Zimbabwai'])
self.assertAlmostNow(md.update_date)
I2B2Core.update_date = datetime(2001, 12, 1)
expected = OrderedDict([
('modifier_path', '\\Earth\\Africa\\Zimbabwai\\baboon\\'),
('modifier_cd', 'MODTEST:baboon'),
('name_char', 'MODTEST Wild baboons'),
('modifier_blob', ''),
('update_date', datetime(2001, 12, 1, 0, 0)),
('download_date', datetime(2017, 5, 25, 0, 0)),
('import_date', datetime(2017, 5, 25, 0, 0)),
('sourcesystem_cd', 'MOD_TEST'),
('upload_id', None)])
self.assertEqual(expected, as_dict(md))
if __name__ == '__main__':
unittest.main()
|
[
"solbrig.harold@mayo.edu"
] |
solbrig.harold@mayo.edu
|
dbc9ed063bb15f5e00d2bafe49c707761c1595fe
|
e58e59ed72562454750c87556bf44d7a4bae5065
|
/pymontecarlo_casino2/expander.py
|
da2650a680f18028b0b14a523f6e6077d26b8430
|
[
"Apache-2.0"
] |
permissive
|
tomyanmmx/pymontecarlo-casino2
|
587c67bc32217a5eb23583f58a6a88511f3c69e2
|
d1c7eb761ccbde745fe858caf9490e87b13347d5
|
refs/heads/master
| 2021-05-07T22:59:52.396739
| 2017-09-10T14:22:59
| 2017-09-10T14:22:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 309
|
py
|
""""""
# Standard library modules.
# Third party modules.
# Local modules.
from pymontecarlo.options.program.expander import Expander, expand_to_single
# Globals and constants variables.
class Casino2Expander(Expander):
def expand_analyses(self, analyses):
return expand_to_single(analyses)
|
[
"philippe.pinard@gmail.com"
] |
philippe.pinard@gmail.com
|
22da037eb35ae20878e8e83e233f08cc4cad5413
|
51a37b7108f2f69a1377d98f714711af3c32d0df
|
/src/leetcode/P430.py
|
2e1a4f1b681e32e310db002cc62a8f5a002e90be
|
[] |
no_license
|
stupidchen/leetcode
|
1dd2683ba4b1c0382e9263547d6c623e4979a806
|
72d172ea25777980a49439042dbc39448fcad73d
|
refs/heads/master
| 2022-03-14T21:15:47.263954
| 2022-02-27T15:33:15
| 2022-02-27T15:33:15
| 55,680,865
| 7
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 729
|
py
|
"""
# Definition for a Node.
class Node:
def __init__(self, val, prev, next, child):
self.val = val
self.prev = prev
self.next = next
self.child = child
"""
def tail(node):
if node is None:
return None
t = node
while t.next is not None:
t = t.next
return t
class Solution:
def flatten(self, head):
if head is None:
return None
c = self.flatten(head.child)
n = self.flatten(head.next)
if c is not None:
head.child = None
head.next = c
c.prev = head
t = tail(c)
t.next = n
if n is not None:
n.prev = t
return head
|
[
"stupidchen@foxmail.com"
] |
stupidchen@foxmail.com
|
a7e12fe087d3ae0a715b5abdef52d114f3c2ff4f
|
6452098273ff0555d3edf349ed800958cf89b7d8
|
/Posts/posts/migrations/0003_auto_20190626_1200.py
|
6fb96575c2c8114c2c9aa2295368ad35fb88895a
|
[] |
no_license
|
karthik018/DjangoPractice
|
8a530ec90d6efd7cc9c7122743bc647d6274b736
|
041e1144e8b6153f5f8c0d5a367472f1e68a9dc6
|
refs/heads/master
| 2020-06-11T08:07:59.201541
| 2019-06-27T08:54:42
| 2019-06-27T08:54:42
| 193,900,339
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 503
|
py
|
# Generated by Django 2.2.2 on 2019-06-26 12:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0002_auto_20190626_1134'),
]
operations = [
migrations.AlterField(
model_name='comments',
name='commented_on_id',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='posts.Comments'),
),
]
|
[
"thinkcreative01karthik@gmail.com"
] |
thinkcreative01karthik@gmail.com
|
db4718b5c11abc46048e756487f6714698f41805
|
20674c17d815214bf66b75be686bb8a45c0f5914
|
/version1/415_Add_Strings.py
|
abfc1fed5d4ddd31c6b7d8e9156cb801b0847556
|
[] |
no_license
|
moontree/leetcode
|
e7b670969fe20785b15aae82996875fd66de1b08
|
f2bf9b13508cd01c8f383789569e55a438f77202
|
refs/heads/master
| 2021-05-20T20:36:45.615420
| 2020-04-02T09:15:26
| 2020-04-02T09:15:26
| 252,408,563
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,058
|
py
|
"""
Given two non-negative integers num1 and num2 represented as string, return the sum of num1 and num2.
Note:
The length of both num1 and num2 is < 5100.
Both num1 and num2 contains only digits 0-9.
Both num1 and num2 does not contain any leading zero.
You must not use any built-in BigInteger library or convert the inputs to integer directly.
"""
def add_strings(num1, num2):
"""
:type num1: str
:type num2: str
:rtype: str
"""
b1, b2 = [int(s) for s in num1][::-1], [int(s) for s in num2][::-1]
c = 0
m, n = len(b1), len(b2)
i = 0
res = []
while i < m and i < n:
val = b1[i] + b2[i] + c
c = val / 10
res.append(val % 10)
i += 1
while i < m:
val = b1[i] + c
c = val / 10
res.append(val % 10)
i += 1
while i < n:
val = b2[i] + c
c = val / 10
res.append(val % 10)
i += 1
if c > 0:
res.append(c)
print res
return "".join([str(v) for v in res[::-1]])
print add_strings("99991", "9")
|
[
"zhangchao@zhangchaodeMacBook-Pro.local"
] |
zhangchao@zhangchaodeMacBook-Pro.local
|
b5e0e9c943a15be5ba40d7ff9c3c755ac2c3a131
|
7748d76ac2557477733c245189a5510e793c965a
|
/5 - Django/Amadon/apps/amadon/views.py
|
9c08620dd6db4778c4a77f3047aef83137c8ccee
|
[] |
no_license
|
ectom/Coding-Dojo-Python-Stack
|
9798fd0c452389d485d343659eed7132c61b9055
|
07d2d48e966f4210627a1a11d561f1d28e8a9982
|
refs/heads/master
| 2021-06-27T22:59:40.794253
| 2017-09-15T09:54:40
| 2017-09-15T09:54:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,034
|
py
|
from django.shortcuts import render, HttpResponse, redirect
def index(request):
try:
request.session['data']
request.session['total_price'] = 0
request.session['total_quantity'] = 0
except:
request.session['data'] = []
return render(request, 'amadon/index.html')
def process(request):
items = {
'001': 19.99,
'002': 29.99,
'003': 4.99,
'004': 49.99
}
data = {
'quantity': int(request.POST['quantity'])
}
number = request.POST["product"]
print items[number]
request.session['data'].append(data)
for item in request.session['data']:
request.session['price'] = items[number]*item['quantity']
request.session['total_price'] += (items[number]*item['quantity'])
request.session['total_quantity'] += item['quantity']
return redirect('/checkout')
def checkout(request):
return render(request, 'amadon/checkout.html')
def clear(request):
request.session.clear()
return redirect('/')
|
[
"etom579@gmail.com"
] |
etom579@gmail.com
|
5e4a349d5502380c981d7f17a2242bfa4f4745a3
|
b7ad21dc8a2bf3459f8f65c7c2df944f168b9086
|
/regression_tests/bugs/78484.py
|
fcfaf180b6266f5d2fdf43038e46c5cdbba6e362
|
[] |
no_license
|
italiangrid/WMS-Test-Suite
|
ee99651cdacbd18ec202ba3d62d6c1aeb02ab405
|
0c72c8a868c671691eae55800e906d55d9b5de0d
|
refs/heads/master
| 2016-09-03T06:28:05.953113
| 2013-11-25T13:29:30
| 2013-11-25T13:29:30
| 3,335,842
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,897
|
py
|
#
# Bug: 78484
# Title: [ YAIM_WMS ] Multiple parameter configuration added in condor_config.local
# Link: https://savannah.cern.ch/bugs/?78484
#
#
import logging
from libutils.Exceptions import *
def run(utils):
bug='78484'
logging.info("Start regression test for bug %s"%(bug))
logging.warning("To verify this bug you need access to WMS. You have set WMS_USERNAME and WMS_PASSOWRD attributes at configuration file")
ssh=utils.open_ssh(utils.get_WMS(),utils.get_Username(),utils.get_Password())
logging.info("Get the list of GRID_MONITOR* variables")
short_hostname=utils.execute_remote_cmd(ssh,"/bin/hostname -s")[:-1]
result=utils.execute_remote_cmd(ssh,"cat /opt/condor-c/local.%s/condor_config.local"%(short_hostname))
result=result.split("\n")
grid_monitor=[]
for line in result:
if line.find("GRID_MONITOR")!=-1:
grid_monitor.append(line)
logging.info("Run yaim (site-info.def should be presented in /opt/glite/yaim/examples/siteinfo directory)")
utils.execute_remote_cmd(ssh,"/opt/glite/yaim/bin/yaim -c -s /opt/glite/yaim/examples/siteinfo/site-info.def -n WMS")
logging.info("Get the list of GRID_MONITOR* variables after yaim")
result=utils.execute_remote_cmd(ssh,"cat /opt/condor-c/local.%s/condor_config.local"%(short_hostname))
result=result.split("\n")
grid_monitor_after=[]
for line in result:
if line.find("GRID_MONITOR")!=-1:
grid_monitor_after.append(line)
z=set(grid_monitor)^set(grid_monitor_after)
if len(z) >0 :
ssh.close()
logging.error("Error!!!. After yaim found these new entries: %s"%(z))
raise GeneralError("Check GRID_MONITOR* variables","After yaim found these new entries: %s"%(z))
logging.info("Test OK")
ssh.close()
logging.info("End of regression test for bug %s"%(bug))
|
[
"akretsis@ceid.upatras.gr"
] |
akretsis@ceid.upatras.gr
|
253068a67dcd85f578e3f01e7400efd2cdf1f67e
|
f2149869f6b2d12ce0fe68cecaf3b7c63dc6d3d8
|
/src/paramCube.py
|
b41e2345df0684e08dc9287533832617e552a305
|
[] |
no_license
|
mclaughlin6464/HODEmulator
|
35946bb55ead0427970dece32110a5932018d561
|
97a86c6c74cc49689478dde3155ab7a4c89f7c91
|
refs/heads/master
| 2020-04-05T14:09:05.870165
| 2016-08-31T00:56:44
| 2016-08-31T00:56:44
| 55,720,418
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,599
|
py
|
# This module is similar to testSeveralSteps, but with an increase in scale.
#@Author Sean McLaughlin
import numpy as np
from time import time
from os import path
from itertools import izip
import argparse
from myCats import cat_dict
from allCorrFunc import loadHaloAndModel, popAndCorr, RBINS, MIN_PTCL
from doBatchCalls import BOUNDS #i Need them in both places but it's smarter to ahve ti there.
# TODO not hardcoding some of these? Depends on my use i guess.
# Will have to see how i end up using this.
SIMNAME = 'chinchilla' # hardcode for noew
REDSHIFT = 0.5#0.0
#N_PTCL = 200
RBIN_CENTERS = (RBINS[1:] + RBINS[:-1]) / 2
def paramCube(outputdir, fixed_params={}, n_per_dim=4, id_no=None):
if type(n_per_dim) is int:
n_per_dim = {key: n_per_dim for key in BOUNDS.iterkeys()}
assert type(n_per_dim) is dict
values = {}
for param in BOUNDS.iterkeys():
if param in fixed_params:
n_per_dim[param] = 1
values[param] = np.array([fixed_params[param]])
else: # param in varied_params
values[param] = np.linspace(BOUNDS[param][0], BOUNDS[param][1], num=n_per_dim[param])
n_total = np.prod(n_per_dim.values())
if n_total == 1: #only one, we can skip all this stuff.
calc_galaxy_autocorr(SIMNAME, 1 / (1 + REDSHIFT),
path.join(outputdir,'Emulator_lhc_'+ '%03d'%id_no if id_no is not None else 'Emulator'),
params=fixed_params, do_jackknife=True, Lbox=400, npart=2048)
return
points = [{} for i in xrange(n_total)]
fixed_base = '_'.join('%s%.2f' % (key, val) for key, val in fixed_params.iteritems()) + '_'
outbase = [fixed_base for i in xrange(n_total)]
n_segment = n_total # not necessary, but notaionally clearer
for param in sorted(BOUNDS.iterkeys()): # sorted to make deterministic, though it may already be.
n_segment /= n_per_dim[param]
for i, p in enumerate(points):
idx = (i / n_segment) % n_per_dim[param]
p[param] = values[param][idx]
outbase[i] += str(idx) # now each outbase has a unique combination of indexes
# now each dictionary in values carries a unique combination of parameters for the emulator
# if memory was an issue one could just run the model at each step instead of generating them all.
# i don't think 1000 dictionaries is the worst of my memory issues.
# now, send each fo these to my code.
for p, out in izip(points, outbase):
#check if the file already exists
if path.isfile(path.join(outputdir,out+'_corr_%.3f.npy'%(1/(1+REDSHIFT)) )):
continue #file already exists!
print 'Skipping %s'%out
calc_galaxy_autocorr(SIMNAME, 1 / (1 + REDSHIFT), path.join(outputdir, out),
params=p,do_jackknife=True, Lbox=400, npart=2048)
# mostly copied from allCorrFunc. I don't wanan break backwards compatibaility yet
# but I need to make some changes here.
def calc_galaxy_autocorr(simname, scale_factor, outbase, params={},do_jackknife=True, **kwargs):
'Calculate the cross correlation for a single catalog at a single scale factor'
print h.heap()
print '--'*25
t0 = time()
cat = cat_dict[simname](**kwargs)
print str(cat)
halocat, model = loadHaloAndModel(cat, 'redMagic', scale_factor)
if do_jackknife:
data, cov = popAndCorr(halocat, model, cat, params,do_jackknife, MIN_PTCL, RBINS)
else:
data = popAndCorr(halocat, model, cat, params,do_jackknife, MIN_PTCL, RBINS)
header_start = ['Cosmology: %s'%simname, 'Params for HOD:' ]
header_start.extend('%s:%.3f'%(key,val) for key, val in params.iteritems())
header = '\n'.join(header_start)
np.savetxt(outbase + '_corr_%.3f.npy' % (scale_factor), data,
header = header)
if do_jackknife:
np.savetxt(outbase + '_cov_%.3f.npy' % (scale_factor), cov,
header = header)
print '\nTotal Time: %.3f\n' % (time() - t0)
def testCube(outputdir, fixed_params={}, n_per_dim=4):
'''Create fake data of the same structure as paramCube for testing. '''
if type(n_per_dim) is int:
n_per_dim = {key: n_per_dim for key in BOUNDS.iterkeys()}
assert type(n_per_dim) is dict
values = {}
for param in BOUNDS.iterkeys():
if param in fixed_params:
n_per_dim[param] = 1
values[param] = np.array([fixed_params[param]])
else: # param in varied_params
values[param] = np.linspace(BOUNDS[param][0], BOUNDS[param][1], num=n_per_dim[param])
n_total = np.prod(n_per_dim.values())
points = [{} for i in xrange(n_total)]
fixed_base = '_'.join('%s%.2f' % (key, val) for key, val in fixed_params.iteritems()) + '_'
outbase = [fixed_base for i in xrange(n_total)]
n_segment = n_total # not necessary, but notaionally clearer
for param in sorted(BOUNDS.iterkeys()): # sorted to make deterministic, though it may already be.
n_segment /= n_per_dim[param]
for i, p in enumerate(points):
idx = (i / n_segment) % n_per_dim[param]
p[param] = values[param][idx]
outbase[i] += str(idx) # now each outbase has a unique combination of indexes
# now each dictionary in values carries a unique combination of parameters for the emulator
# if memory was an issue one could just run the model at each step instead of generating them all.
# i don't think 1000 dictionaries is the worst of my memory issues.
# now, send each fo these to my code.
simname = SIMNAME
scale_factor = 1/(1+REDSHIFT)
for p, out in izip(points, outbase):
ob = path.join(outputdir, out)
#I could maybe do something more interesting than rands.
data = np.stack( [(RBINS[1:] + RBINS[:-1]) / 2, np.random.rand(len(RBINS)-1)] )
#cov = np.random.rand((len(RBINS), len(RBINS))) #could just do an eye matrix too.
cov = np.eye((len(RBINS)-1))*np.random.rand()
header_start = ['Cosmology: %s'%simname, 'Params for HOD:' ]
header_start.extend('%s:%.3f'%(key,val) for key, val in p.iteritems())
header = '\n'.join(header_start)
np.savetxt(ob + '_corr_test_%.3f.npy' % (scale_factor), data,
header = header)
np.savetxt(ob + '_cov_test_%.3f.npy' % (scale_factor), cov,
header = header)
if __name__ == '__main__':
desc = 'Run my correlation function calculator over a large range of parameters'
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('outputdir', type=str,
help='The directory to store the outputs of the calculations.')
parser.add_argument('--test', action='store_true', help='Create fake data with a similar structure for testing.')
parser.add_argument('--id',type=int,default=None, help='The job id for this call.')
for param in BOUNDS.iterkeys():
parser.add_argument(''.join(['--', param])) #no help scripts #YOLO
args = vars(parser.parse_args())
test = args['test']
del args['test']
outputdir = args['outputdir']
del args['outputdir']
id_no = args['id']
del args['id']
for key in args.keys():
if args[key] is not None:
args[key] = float(args[key])
else:
del args[key]
#pretty smart if i say so myself
#leave default nperdim for now..
print args
if not test:
paramCube(outputdir, fixed_params=args, id_no=id_no)
else:
testCube(outputdir, fixed_params=args)
|
[
"mclaughlin6464@gmail.com"
] |
mclaughlin6464@gmail.com
|
565266b855d7039287e4042a28519c553237365d
|
0c5ab5c69ed77ed9a0613e4cefd3fc1b418ceef6
|
/FinalProject/WordSeg/SylMap.py
|
060d497164e78364b3b7d61ee8d8eeef063be86f
|
[] |
no_license
|
Neil-Do/HUS-Python
|
00831eb1121690f76752867ccfdcb8bd98973af6
|
630849717bb27b99a9ddfb33ae479485ebb032d4
|
refs/heads/master
| 2020-09-13T10:54:50.523791
| 2020-01-17T06:53:43
| 2020-01-17T06:53:43
| 222,749,058
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 320
|
py
|
class SylMap():
def __init__(self):
sylMapDataFile = open("../data/VNsyl.txt", 'r')
size_ = sylMapDataFile.readline()
self.syl_ = set(syllabel.strip() for syllabel in sylMapDataFile)
sylMapDataFile.close()
def isVNESE(self, token):
return token in self.syl_
# da test
|
[
"dotatthanhk60@gmail.com"
] |
dotatthanhk60@gmail.com
|
01c70cd54c7ba983dca24fdedac9503fe11c80ca
|
cc26a1bbae6af3dec61fd27e44484e01da21d36e
|
/Scientific Expedition/Sum by Type/mission.py
|
7577f5f610a89a6c3cd896fe72c83b758d036bf5
|
[] |
no_license
|
ofisser86/py-check-io
|
6bacef0783987e49f3bf28b9bea74e59e4ebb184
|
70469deea240f03199072f2dd28d6819815a2624
|
refs/heads/master
| 2023-02-02T09:32:53.934629
| 2020-12-16T13:44:51
| 2020-12-16T13:44:51
| 309,277,316
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 731
|
py
|
from typing import Tuple
def sum_by_types(items: list) -> Tuple[str, int]:
# your code here
return "".join([i for i in items if type(i) is str]), sum([i for i in items if type(i) is int])
if __name__ == '__main__':
print("Example:")
print(sum_by_types([]))
# These "asserts" are used for self-checking and not for an auto-testing
assert sum_by_types([]) == ('', 0)
assert sum_by_types([1, 2, 3]) == ('', 6)
assert sum_by_types(['1', 2, 3]) == ('1', 5)
assert sum_by_types(['1', '2', 3]) == ('12', 3)
assert sum_by_types(['1', '2', '3']) == ('123', 0)
assert sum_by_types(['size', 12, 'in', 45, 0]) == ('sizein', 57)
print("Coding complete? Click 'Check' to earn cool rewards!")
|
[
"ofisser86@gmail.com"
] |
ofisser86@gmail.com
|
196450c05cebf762582c46904661852744c4960a
|
d200a54adcec3a254a909b9689f925c1614f6fb1
|
/backend/core/urls.py
|
6a071e60621875e2de757a1a6ad7a79bb286993f
|
[] |
no_license
|
shusaku-ishikawa/binance
|
1bbe7f4aaf32c0ade4f67da7a4c1972f414bfa19
|
60bad0848fa4f4666e2476117a79ee8452326ed1
|
refs/heads/master
| 2022-01-27T01:35:24.038917
| 2019-11-30T12:42:36
| 2019-11-30T12:42:36
| 204,909,653
| 0
| 1
| null | 2022-01-15T05:20:54
| 2019-08-28T10:50:13
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 137
|
py
|
from django.contrib import admin
from django.urls import path
app_name = 'core'
urlpatterns = [
path('admin/', admin.site.urls),
]
|
[
"ishikawasyuusaku@gmail.com"
] |
ishikawasyuusaku@gmail.com
|
2685be9bebb71c12c8a3a6a0c4af0206ea9d012b
|
ff7ed554b920bf1871e0e8991e4aa1fb5084f152
|
/src/scoring/scorer.py
|
db30a09dca5aec826b5b5fc8c0f9583ce8c5213c
|
[
"BSD-3-Clause"
] |
permissive
|
uw-bionlp/ards
|
68bfec52c4b2c2a4b4878acf089709cb1ddebddb
|
e9fc27f7034cc6b54f0ccdba4a58377948cf0258
|
refs/heads/main
| 2023-04-04T19:48:41.375668
| 2021-04-15T01:05:25
| 2021-04-15T01:05:25
| 358,041,546
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,695
|
py
|
import logging
import os
from collections import OrderedDict, Counter
import pandas as pd
class Scorer(object):
def __init__(self):
pass
def compare(self, T, P):
return None
def combine(self, dfs):
return None
def fit(self, T, P, params=None, path=None):
'''
Score predictions
Parameters
----------
T = truth
P = predictions
'''
# Check sentence count
len_check(T, P)
# Get counts
dfs = self.compare(T, P)
if not isinstance(dfs, dict):
dfs = OrderedDict([('default', dfs)])
for k, df in dfs.items():
logging.info('\n\n{}\n{}'.format(k, df))
# Include each parameter in data frame
if params is not None:
dfs = {k:add_params_to_df(df, params) for k, df in dfs.items()}
if path is not None:
for k, df in dfs.items():
if len(dfs) == 1:
f = os.path.join(path, f"scores.csv")
else:
f = os.path.join(path, f"scores_{k}.csv")
df.to_csv(f)
return dfs
def combine_cv(self, dfs, path=None):
dfs = self.combine(dfs)
if path is not None:
for k, df in dfs.items():
if len(dfs) == 1:
f = os.path.join(path, f"scores.csv")
else:
f = os.path.join(path, f"scores_{k}.csv")
df.to_csv(f)
return dfs
def len_check(x, y):
assert len(x) == len(y), "length mismatch: {} vs {}".format(len(x), len(y))
def add_params_to_df(df, params):
# Loop on Level 1
for p1, v1 in params.items():
# Level 1 as dictionary
if isinstance(v1, dict):
# Loop on level 2
for p2, v2 in v1.items():
# Level 2 as dictionary
if isinstance(v2, dict):
# Loop on level 3
for p3, v3 in v2.items():
# Level 3 is dictionary
if isinstance(v3, dict):
df[str((p1, p2, p3))] = str(v3)
# Level 3 is not dict, list, or array
elif not isinstance(v3, (list, np.ndarray)):
df[str((p1, p2, p3))] = v3
# Level 2 is not dict, list, or array
elif not isinstance(v2, (list, np.ndarray)):
df[str((p1, p2))] = v2
# Level 1 is not dict, list, or array
elif not isinstance(v1, (list, np.ndarray)):
df[p1] = v1
return df
|
[
"ndobb@uw.edu"
] |
ndobb@uw.edu
|
a48fc7b47b8c888ac0173f2c85b6eee2778b957f
|
eac7f7b96ebce1351dc6b50e45f1fcfa0f930dbb
|
/python/graphscope/interactive/query.py
|
b848c3c4251f27b449d1ffbb815dab5ff5b304e6
|
[
"Apache-2.0"
] |
permissive
|
Jancd/GraphScope
|
82bc43e02717fc3df1811ccfb73f476649b709fa
|
e162f11886dc49d68038836b665aa5381cea8d24
|
refs/heads/main
| 2023-01-31T18:40:06.995923
| 2020-12-15T01:24:12
| 2020-12-15T01:24:12
| 321,543,831
| 1
| 0
|
Apache-2.0
| 2020-12-15T03:39:00
| 2020-12-15T03:38:59
| null |
UTF-8
|
Python
| false
| false
| 5,129
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import logging
import random
from concurrent.futures import ThreadPoolExecutor
from gremlin_python.driver.client import Client
from graphscope.framework.loader import Loader
logger = logging.getLogger("graphscope")
class InteractiveQuery(object):
"""`InteractiveQuery` class, is a simple wrapper around
`Gremlin-Python <https://pypi.org/project/gremlinpython/>`_,
which implements Gremlin within the Python language.
It also can expose gremlin endpoint which can be used by any other standard gremlin console.
It also has a method called `subgraph` which can extract some fragments
from origin graph, produce a new, smaller but concise graph stored in vineyard,
which lifetime is independent from the origin graph.
"""
def __init__(self, graphscope_session, object_id, front_ip, front_port):
self._graphscope_session = graphscope_session
self._object_id = object_id
self._graph_url = "ws://%s:%d/gremlin" % (front_ip, front_port)
self._client = Client(self._graph_url, "g")
self._closed = False
@property
def object_id(self):
"""Get the vineyard object id of graph.
Returns:
str: object id
"""
return self._object_id
@property
def graph_url(self):
"""The gremlin graph url can be used with any standard gremlin console, e.g., thinkerpop."""
return self._graph_url
def closed(self):
"""Return if the current instance is closed."""
return self._closed
def subgraph(self, gremlin_script):
"""Create a subgraph, which input is the result of the execution of `gremlin_script`.
Any gremlin script that will output a set of edges can be used to contruct a subgraph.
Args:
gremlin_script (str): gremlin script to be executed
Raises:
RuntimeError: If the interactive instance is closed.
Returns:
:class:`Graph`: constructed subgraph. which is also stored in vineyard.
"""
if self.closed():
raise RuntimeError("Interactive query is closed.")
now_time = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
random_num = random.randint(0, 10000000)
graph_name = "%s_%s" % (str(now_time), str(random_num))
# create graph handle by name
self._client.submit(
"g.createGraph('%s').with('graphType', 'vineyard')" % graph_name
).all().result()
# start a thread to launch the graph
def load_subgraph(name):
import vineyard
host, port = self._graphscope_session.engine_config[
"vineyard_rpc_endpoint"
].split(":")
client = vineyard.connect(host, int(port))
# get vertex/edge stream id
vstream = client.get_name("__%s_vertex_stream" % name, True)
estream = client.get_name("__%s_edge_stream" % name, True)
# invoke load_from
g = self._graphscope_session.load_from(
edges=[Loader(estream)],
vertices=[Loader(vstream)],
generate_eid=False,
)
client.put_name(vineyard.ObjectID(g.vineyard_id), graph_name)
logger.info("subgraph has been loaded")
return g
pool = ThreadPoolExecutor()
subgraph_task = pool.submit(load_subgraph, (graph_name,))
# add subgraph vertices and edges
subgraph_script = "%s.subgraph('%s').outputVineyard('%s')" % (
gremlin_script,
graph_name,
graph_name,
)
self._client.submit(subgraph_script).all().result()
return subgraph_task.result()
def execute(self, query):
"""Execute gremlin querying scripts.
Behind the scene, it uses `gremlinpython` to send the query.
Args:
query (str): Scripts that written in gremlin quering language.
Raises:
RuntimeError: If the interactive script is closed
Returns:
execution results
"""
if self.closed():
raise RuntimeError("Interactive query is closed.")
return self._client.submit(query)
def close(self):
"""Close interactive instance and release resources"""
if not self.closed():
self._closed = True
self._graphscope_session.close_interactive_instance(self)
|
[
"linzhu.ht@alibaba-inc.com"
] |
linzhu.ht@alibaba-inc.com
|
ed4062b3b9dba0e71e56a70a329d543dd325663d
|
4f0ceccea62d3c909af88a7e3f5e1d063ed94b2e
|
/1368_Minimum Cost to Make at Least One Valid Path in a Grid.py
|
8406191983ad2ec32fef45f5613be51ea9b98894
|
[] |
no_license
|
0as1s/leetcode
|
d19c2ec4f96666a03227871b9b21b26adcd6b3b4
|
c82d375f8d9d4feeaba243eb5c990c1ba3ec73d2
|
refs/heads/master
| 2021-05-09T21:30:35.378394
| 2021-02-01T15:37:37
| 2021-02-01T15:37:37
| 118,729,125
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,661
|
py
|
def minCost(self, grid: List[List[int]]) -> int:
if not grid or not grid[0]:
return True
m, n = len(grid), len(grid[0])
directions = [[0,0],[0,1],[0,-1],[1,0],[-1,0]]
q = deque([(0,0,0)])
visit = set()
visit.add((0,0))
while q:
cx, cy, dis = q.popleft()
if cx == m-1 and cy == n-1:
return dis
visit.add((cx, cy))
for i in range(1,5):
nx = cx + directions[i][0]
ny = cy + directions[i][1]
cost = 1 if grid[cx][cy] != i else 0
if 0 <= nx < m and 0 <= ny < n and (nx, ny) not in visit:
if cost == 1:
q.append((nx, ny, dis+1))
else:
q.appendleft((nx, ny, dis))
return -1
# class Solution(object):
# def minCost(self, grid):
# """
# :type grid: List[List[int]]
# :rtype: int
# """
# dd = {
# 1: (0, 1),
# 2: (0, -1),
# 3: (1, 0),
# 4: (-1, 0)
# }
# current = set([(0, 0)])
# cur = (0, 0)
# h = len(grid)
# l = len(grid[0])
# count = 0
# used = set()
# while True:
# x, y = cur
# if x == h-1 and y == l-1:
# return count
# dx, dy = dd[grid[x][y]]
# nx, ny = x + dx, y + dy
# if (nx, ny) not in used and 0 <= nx < h and 0 <= ny < l:
# if nx == h-1 and ny == l-1:
# return count
# current.add((nx, ny))
# used.add((nx, ny))
# cur = (nx, ny)
# else:
# break
# while True:
# count += 1
# new_current = set()
# for cur in current:
# x, y = cur
# for dx, dy in dd.values():
# nx, ny = x + dx, y + dy
# if (nx, ny) not in used and 0 <= nx < h and 0 <= ny < l:
# if nx == h-1 and ny == l-1:
# return count
# used.add((nx, ny))
# new_current.add((nx, ny))
# ddx, ddy = dd[grid[nx][ny]]
# nnx, nny = nx + ddx, ny + ddy
# while (nnx, nny) not in used and 0 <= nnx < h and 0 <= nny < l:
# if nnx == h-1 and nny == l-1:
# return count
# #used.add((nnx, nny))
# new_current.add((nnx, nny))
# ddx, ddy = dd[grid[nnx][nny]]
# nnx, nny = nx + ddx, ny + ddy
# current = new_current
|
[
"still_crazy@qq.com"
] |
still_crazy@qq.com
|
82613acce0ae9f7f5f98b7e788444a5660de8c5c
|
52b5773617a1b972a905de4d692540d26ff74926
|
/.history/goatLatin_20200803094226.py
|
95977408a574cf0cbbe7838b30f3c6531d9dfa24
|
[] |
no_license
|
MaryanneNjeri/pythonModules
|
56f54bf098ae58ea069bf33f11ae94fa8eedcabc
|
f4e56b1e4dda2349267af634a46f6b9df6686020
|
refs/heads/master
| 2022-12-16T02:59:19.896129
| 2020-09-11T12:05:22
| 2020-09-11T12:05:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 353
|
py
|
def goalLatin(S):
vowels = {'a','e','i','o','u'}
S = S.split(" ")
count = 1
for i in range(len(S)):
begin = 'a'
newWord = ""
if S[i][0] in vowels:
begin = begin * count
newWord += S[i] + "ma" + begin
else:
begin = begin * count
newWord = S[i]
|
[
"mary.jereh@gmail.com"
] |
mary.jereh@gmail.com
|
1ce1c226b036e66a4128c738fb794ba8a6a298eb
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/UnitedVoucherDigest.py
|
215bbce60af562cae3f1e47b250b8e73ca0210bc
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 5,157
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class UnitedVoucherDigest(object):
def __init__(self):
self._budget_close = None
self._ceiling_amount = None
self._discount_type = None
self._from_amount = None
self._prize_id = None
self._reduction_ratio = None
self._show_order = None
self._to_amount = None
self._voucher_biz_code = None
@property
def budget_close(self):
return self._budget_close
@budget_close.setter
def budget_close(self, value):
self._budget_close = value
@property
def ceiling_amount(self):
return self._ceiling_amount
@ceiling_amount.setter
def ceiling_amount(self, value):
self._ceiling_amount = value
@property
def discount_type(self):
return self._discount_type
@discount_type.setter
def discount_type(self, value):
self._discount_type = value
@property
def from_amount(self):
return self._from_amount
@from_amount.setter
def from_amount(self, value):
self._from_amount = value
@property
def prize_id(self):
return self._prize_id
@prize_id.setter
def prize_id(self, value):
self._prize_id = value
@property
def reduction_ratio(self):
return self._reduction_ratio
@reduction_ratio.setter
def reduction_ratio(self, value):
self._reduction_ratio = value
@property
def show_order(self):
return self._show_order
@show_order.setter
def show_order(self, value):
self._show_order = value
@property
def to_amount(self):
return self._to_amount
@to_amount.setter
def to_amount(self, value):
self._to_amount = value
@property
def voucher_biz_code(self):
return self._voucher_biz_code
@voucher_biz_code.setter
def voucher_biz_code(self, value):
self._voucher_biz_code = value
def to_alipay_dict(self):
params = dict()
if self.budget_close:
if hasattr(self.budget_close, 'to_alipay_dict'):
params['budget_close'] = self.budget_close.to_alipay_dict()
else:
params['budget_close'] = self.budget_close
if self.ceiling_amount:
if hasattr(self.ceiling_amount, 'to_alipay_dict'):
params['ceiling_amount'] = self.ceiling_amount.to_alipay_dict()
else:
params['ceiling_amount'] = self.ceiling_amount
if self.discount_type:
if hasattr(self.discount_type, 'to_alipay_dict'):
params['discount_type'] = self.discount_type.to_alipay_dict()
else:
params['discount_type'] = self.discount_type
if self.from_amount:
if hasattr(self.from_amount, 'to_alipay_dict'):
params['from_amount'] = self.from_amount.to_alipay_dict()
else:
params['from_amount'] = self.from_amount
if self.prize_id:
if hasattr(self.prize_id, 'to_alipay_dict'):
params['prize_id'] = self.prize_id.to_alipay_dict()
else:
params['prize_id'] = self.prize_id
if self.reduction_ratio:
if hasattr(self.reduction_ratio, 'to_alipay_dict'):
params['reduction_ratio'] = self.reduction_ratio.to_alipay_dict()
else:
params['reduction_ratio'] = self.reduction_ratio
if self.show_order:
if hasattr(self.show_order, 'to_alipay_dict'):
params['show_order'] = self.show_order.to_alipay_dict()
else:
params['show_order'] = self.show_order
if self.to_amount:
if hasattr(self.to_amount, 'to_alipay_dict'):
params['to_amount'] = self.to_amount.to_alipay_dict()
else:
params['to_amount'] = self.to_amount
if self.voucher_biz_code:
if hasattr(self.voucher_biz_code, 'to_alipay_dict'):
params['voucher_biz_code'] = self.voucher_biz_code.to_alipay_dict()
else:
params['voucher_biz_code'] = self.voucher_biz_code
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = UnitedVoucherDigest()
if 'budget_close' in d:
o.budget_close = d['budget_close']
if 'ceiling_amount' in d:
o.ceiling_amount = d['ceiling_amount']
if 'discount_type' in d:
o.discount_type = d['discount_type']
if 'from_amount' in d:
o.from_amount = d['from_amount']
if 'prize_id' in d:
o.prize_id = d['prize_id']
if 'reduction_ratio' in d:
o.reduction_ratio = d['reduction_ratio']
if 'show_order' in d:
o.show_order = d['show_order']
if 'to_amount' in d:
o.to_amount = d['to_amount']
if 'voucher_biz_code' in d:
o.voucher_biz_code = d['voucher_biz_code']
return o
|
[
"jishupei.jsp@alibaba-inc.com"
] |
jishupei.jsp@alibaba-inc.com
|
c8be4275c567f1b45a9c30be24e70d5c9f86cb5f
|
bc441bb06b8948288f110af63feda4e798f30225
|
/terraform_sdk/model/inspection/metric_group_pb2.pyi
|
744283cf0fc142859439410d535c139cf855a348
|
[
"Apache-2.0"
] |
permissive
|
easyopsapis/easyops-api-python
|
23204f8846a332c30f5f3ff627bf220940137b6b
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
refs/heads/master
| 2020-06-26T23:38:27.308803
| 2020-06-16T07:25:41
| 2020-06-16T07:25:41
| 199,773,131
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,951
|
pyi
|
# @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from terraform_sdk.model.inspection.dim_pb2 import (
InspectionDim as terraform_sdk___model___inspection___dim_pb2___InspectionDim,
)
from terraform_sdk.model.inspection.val_pb2 import (
InspectionVal as terraform_sdk___model___inspection___val_pb2___InspectionVal,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class InspectionMetricGroup(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
id = ... # type: typing___Text
name = ... # type: typing___Text
category = ... # type: typing___Text
memo = ... # type: typing___Text
@property
def dims(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[terraform_sdk___model___inspection___dim_pb2___InspectionDim]: ...
@property
def vals(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[terraform_sdk___model___inspection___val_pb2___InspectionVal]: ...
def __init__(self,
*,
id : typing___Optional[typing___Text] = None,
name : typing___Optional[typing___Text] = None,
category : typing___Optional[typing___Text] = None,
dims : typing___Optional[typing___Iterable[terraform_sdk___model___inspection___dim_pb2___InspectionDim]] = None,
vals : typing___Optional[typing___Iterable[terraform_sdk___model___inspection___val_pb2___InspectionVal]] = None,
memo : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> InspectionMetricGroup: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> InspectionMetricGroup: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"category",b"category",u"dims",b"dims",u"id",b"id",u"memo",b"memo",u"name",b"name",u"vals",b"vals"]) -> None: ...
|
[
"service@easyops.cn"
] |
service@easyops.cn
|
811d4b3ee04b7828cbc1c09b26fa4f53da40ae09
|
2b28814e50b036a17afb26cd56accdacb6f38854
|
/src/api/migrations/0001_initial.py
|
c91b14a01d4a3367c1fbda7671980d2c60cc7a76
|
[
"BSD-2-Clause"
] |
permissive
|
Financial-Times/bullet-train-api
|
c6660965ca5e8f956b7666bde35b5f64bf18f773
|
a54e0df1c85ff353c1b5c3056ea3e3e4d1fc7006
|
refs/heads/master
| 2023-08-03T16:31:06.503907
| 2021-02-10T09:29:35
| 2021-02-10T09:29:35
| 170,123,292
| 1
| 1
|
BSD-3-Clause
| 2019-10-01T16:26:51
| 2019-02-11T12:14:35
|
Python
|
UTF-8
|
Python
| false
| false
| 4,536
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-04-20 12:59
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Environment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name=b'DateCreated')),
('api_key', models.UUIDField(default=uuid.uuid4)),
],
),
migrations.CreateModel(
name='Feature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name=b'DateCreated')),
],
),
migrations.CreateModel(
name='FeatureState',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enabled', models.BooleanField()),
('environment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='featurestates', to='api.Environment')),
('feature', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='featurestates', to='api.Feature')),
],
),
migrations.CreateModel(
name='FFAdminUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='Identity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.CharField(max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name=b'DateCreated')),
('version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='identities', to='api.Environment')),
],
options={
'verbose_name_plural': 'Identities',
},
),
migrations.CreateModel(
name='IdentityFeature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enabled', models.BooleanField()),
('feature', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='identityfeatures', to='api.Feature')),
('identity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='identityfeatures', to='api.Identity')),
],
),
migrations.CreateModel(
name='Organisation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name=b'DateCreated')),
('organisation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='projects', to='api.Organisation')),
],
),
migrations.AddField(
model_name='feature',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='features', to='api.Project'),
),
migrations.AddField(
model_name='environment',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='environments', to='api.Project'),
),
]
|
[
"matthewe@solidstategroup.com"
] |
matthewe@solidstategroup.com
|
60ea8faede03779356d2fbe48b62b1b293086d76
|
1c0175a77bb5edb1a9d2f9ea795cf15b42e5c5c2
|
/hackbright.py
|
93d596be219ac559ef29f834e8d4ef8136720d13
|
[] |
no_license
|
jengrace/project-tracker-flask2
|
89fb53a03c0e07a4c25c3ff45be424c03b7e78d2
|
49125d272537d2dbb8259e2b9b94f8816a3a7d26
|
refs/heads/master
| 2021-04-30T07:00:24.470001
| 2017-01-25T21:03:10
| 2017-01-25T21:03:10
| 79,973,913
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,368
|
py
|
"""Hackbright Project Tracker.
A front-end for a database that allows users to work with students, class
projects, and the grades students receive in class projects.
"""
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def connect_to_db(app):
"""Connect the database to our Flask app."""
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///hackbright'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.app = app
db.init_app(app)
def get_student_by_github(github):
"""Given a github account name, print information about the
matching student."""
QUERY = """
SELECT first_name, last_name, github
FROM Students
WHERE github = :github
"""
db_cursor = db.session.execute(QUERY, {'github': github})
row = db_cursor.fetchone()
print "Student: %s %s\nGithub account: %s" % (row[0], row[1],
row[2])
return row
def make_new_student(first_name, last_name, github):
"""Add a new student and print confirmation.
Given a first name, last name, and GitHub account, add student to the
database and print a confirmation message.
"""
QUERY = """
INSERT INTO Students
VALUES (:first_name, :last_name, :github)
"""
db.session.execute(QUERY, {'first_name': first_name,
'last_name': last_name,
'github': github})
db.session.commit()
print "Successfully added student: %s %s" % (first_name, last_name)
def get_project_by_title(title):
"""Given a project title, print information about the project."""
QUERY = """
SELECT title, description, max_grade
FROM Projects
WHERE title = :title
"""
db_cursor = db.session.execute(QUERY, {'title': title})
row = db_cursor.fetchone()
print "Title: %s\nDescription: %s\nMax Grade: %d" % (row[0], row[1],
row[2])
return row
def get_grade_by_github_title(github, title):
"""Print grade student received for a project."""
QUERY = """
SELECT grade
FROM Grades
WHERE student_github = :github
AND project_title = :title
"""
db_cursor = db.session.execute(QUERY, {'github': github, 'title': title})
row = db_cursor.fetchone()
print "Student %s in project %s received grade of %s" % (
github, title, row[0])
return row
def assign_grade(github, title, grade):
"""Assign a student a grade on an assignment and print a confirmation."""
QUERY = """INSERT INTO Grades (student_github, project_title, grade)
VALUES (:github, :title, :grade)"""
db_cursor = db.session.execute(QUERY, {'github': github, 'title': title, 'grade': grade})
db.session.commit()
print "Successfully assigned grade of %s for %s in %s" % (
grade, github, title)
def get_grades_by_github(github):
"""Get a list of all grades for a student by their github username"""
QUERY = """
SELECT project_title, grade
FROM Grades
WHERE student_github = :github
"""
db_cursor = db.session.execute(QUERY, {'github': github})
rows = db_cursor.fetchall()
for row in rows:
print "Student %s received grade of %s for project %s" % (
github, row[1], row[0])
print '**********************rows: ', rows
return rows
def get_grades_by_title(title):
"""Get a list of all student grades for a project by its title"""
QUERY = """
SELECT student_github, grade
FROM Grades
WHERE project_title = :title
"""
db_cursor = db.session.execute(QUERY, {'title': title})
rows = db_cursor.fetchall()
for row in rows:
print "Student %s received grade of %s for project %s" % (
row[0], row[1], title)
return rows
def handle_input():
"""Main loop.
Repeatedly prompt for commands, performing them, until 'quit' is received as a
command."""
command = None
while command != "quit":
input_string = raw_input("HBA Database> ")
tokens = input_string.split()
command = tokens[0]
args = tokens[1:]
if command == "student":
github = args[0]
get_student_by_github(github)
elif command == "new_student":
first_name, last_name, github = args # unpack!
make_new_student(first_name, last_name, github)
elif command == "project":
title = args[0]
get_project_by_title(title)
elif command == "grade":
github, title = args
get_grade_by_github_title(github, title)
elif command == "assign_grade":
github, title, grade = args
assign_grade(github, title, grade)
elif command == "student_grades":
github = args[0]
get_grades_by_github(github)
elif command == "project_grades":
title = args[0]
get_grades_by_title(title)
if __name__ == "__main__":
app = Flask(__name__)
connect_to_db(app)
handle_input()
# To be tidy, we'll close our database connection -- though, since this
# is where our program ends, we'd quit anyway.
db.session.close()
|
[
"no-reply@hackbrightacademy.com"
] |
no-reply@hackbrightacademy.com
|
c9923efa24c881279a13f57397a90a6c1b858295
|
a00ed711e3e08b50ad6e91cc07a2cddc4a1de5ea
|
/scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py
|
d4fe0ea78d24abc8a7965ec863bd16283ce84c71
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
ishiis/airflow
|
4305794e36b611d01f49e3f2401be3dc49782670
|
292440d54f4db84aaf0c5a98cf5fcf34303f2fa8
|
refs/heads/master
| 2022-07-30T00:51:28.806940
| 2022-07-14T12:07:11
| 2022-07-14T12:07:11
| 209,801,072
| 1
| 0
|
Apache-2.0
| 2019-09-20T13:47:26
| 2019-09-20T13:47:26
| null |
UTF-8
|
Python
| false
| false
| 4,803
|
py
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
import sys
from pathlib import Path
from typing import List
from rich.console import Console
if __name__ not in ("__main__", "__mp_main__"):
raise SystemExit(
"This file is intended to be executed as an executable program. You cannot use it as a module."
f"To run this script, run the ./{__file__} command [FILE] ..."
)
console = Console(color_system="standard", width=200)
errors: List[str] = []
SKIP_COMP_CHECK = "# ignore airflow compat check"
TRY_NUM_MATCHER = re.compile(r".*context.*\[[\"']try_number[\"']].*")
GET_MANDATORY_MATCHER = re.compile(r".*conf\.get_mandatory_value")
GET_AIRFLOW_APP_MATCHER = re.compile(r".*get_airflow_app\(\)")
HOOK_PARAMS_MATCHER = re.compile(r".*get_hook\(hook_params")
def _check_file(_file: Path):
lines = _file.read_text().splitlines()
for index, line in enumerate(lines):
if SKIP_COMP_CHECK in line:
continue
if "XCom.get_value(" in line:
if "if ti_key is not None:" not in lines[index - 1]:
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3.0 only):[/]\n\n"
f"{lines[index-1]}\n{lines[index]}\n\n"
"[yellow]When you use XCom.get_value( in providers, it should be in the form:[/]\n\n"
"if ti_key is not None:\n"
" value = XCom.get_value(...., ti_key=ti_key)\n\n"
"See: https://airflow.apache.org/docs/apache-airflow-providers/"
"howto/create-update-providers.html#using-providers-with-dynamic-task-mapping\n"
)
if "ti.map_index" in line:
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not use map_index field in providers "
"as it is only available in Airflow 2.3+[/]"
)
if TRY_NUM_MATCHER.match(line):
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not expect try_number field for context in providers "
"as it is only available in Airflow 2.3+[/]"
)
if GET_MANDATORY_MATCHER.match(line):
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not use conf.get_mandatory_value in providers "
"as it is only available in Airflow 2.3+[/]"
)
if HOOK_PARAMS_MATCHER.match(line):
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not use 'hook_params' in get_hook as it has been added in providers "
"as it is not available in Airflow 2.3+. Use get_hook() instead.[/]"
)
if GET_AIRFLOW_APP_MATCHER.match(line):
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.4+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not use airflow.utils.airflow_flask_app.get_airflow_app() in providers "
"as it is not available in Airflow 2.4+. Use current_app instead.[/]"
)
if __name__ == '__main__':
for file in sys.argv[1:]:
_check_file(Path(file))
if errors:
console.print("[red]Found Airflow 2.2 compatibility problems in providers:[/]\n")
for error in errors:
console.print(f"{error}")
sys.exit(1)
|
[
"noreply@github.com"
] |
ishiis.noreply@github.com
|
28643ece960fbdb5ec5bf2ab0ecc38b9f974345c
|
b8fd7e01a7069a0666eb2fe21991753fd5ff7860
|
/Python Language/Stack/3 . top.py
|
89b10fca7acd6e331210a9db5f36ac8f9bed3454
|
[] |
no_license
|
Jafoor/Leet-Code-Solved-Problems
|
0b6be0f3c82b1bc13c0c484782db65601cefa7b8
|
935e5679e04bf6f9c9d8a0bdf8b204923a2bc7a5
|
refs/heads/master
| 2023-07-02T13:38:59.690783
| 2021-07-19T16:20:48
| 2021-07-19T16:20:48
| 256,105,425
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 571
|
py
|
class Stack:
#declearing stack
def __init__(self):
self.stack = []
#pushing values
def push(self, data):
self.stack.append(data)
#pop element from Stack
def pop(self):
if len(self.stack) <= 0:
return ("No elemennt to pop")
else:
return self.stack.pop()
def top(self):
if len(self.stack) <= 0:
return ("No elements in the stack")
else:
return self.stack[-1]
S = Stack()
S.push("Mon")
S.push("sun")
print(S.top())
print(S.pop())
print(S.top())
|
[
"abujafor.cse11@gmai.com"
] |
abujafor.cse11@gmai.com
|
e5e2f907488bbd1e533f57e90d04fbf9ca1a94e9
|
33ce95a46bad431fb9acde07f10f472c43533824
|
/functions_advanced_exercise/keyword_argument_length.py
|
8521f0728b213aa0197f75f270a97e0e6e220239
|
[] |
no_license
|
ivan-yosifov88/python_advanced
|
91dead1a44771a46e85cecdfc6b02e11c0cb4d91
|
21830aabc87eb28eb32bf3c070bf202b4740f628
|
refs/heads/main
| 2023-06-29T21:31:30.285019
| 2021-06-23T20:31:36
| 2021-06-23T20:31:36
| 342,571,734
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 148
|
py
|
def kwargs_length(**kwargs):
return len(kwargs)
dictionary = {'name': 'Peter', 'age': 25, 'baba': 'Peter'}
print(kwargs_length(**dictionary))
|
[
"ivan.yosifov88gmail.com"
] |
ivan.yosifov88gmail.com
|
e902eec2088a110c55e9d2063a8d882bcd34f49d
|
0c021e881a2d5507f2dd0600a7bc36fa2b8de112
|
/turtlecircle.py
|
31d3b56a4add57cb27afc03411115accd4f46ccf
|
[] |
no_license
|
Techsrijan/girlspolytechnic
|
016299f28d808f7628693dec555642463862a537
|
0eb136632dd37c1e65ac290509ed166144e8d2a0
|
refs/heads/main
| 2023-02-03T20:40:30.474711
| 2020-12-16T10:22:59
| 2020-12-16T10:22:59
| 307,243,788
| 0
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 385
|
py
|
from turtle import *
import time
t=Turtle()
s=Screen()
s.title("Circle Example")
s.setup(800,600)
'''t.circle(50) # anticlockwise
t.circle((-100))
t.up()
t.forward(200)
t.down()
t.pencolor("red")
t.circle(300)
'''
t.circle(50)
t.up()
t.goto(-100,-100)
t.down()
t.circle(50)
t.undo()
t.reset()
time.sleep(2)
t.write("This is my turtle tutorial",font=("Comic Sans Ms",25,"bold"))
done()
|
[
"aswanibtech@gmail.com"
] |
aswanibtech@gmail.com
|
1e75f91cfeb5d9ec5f124aa4adcddf060fb9624d
|
2329ba07662331526c508e16512eb8c194c2d0c8
|
/src/measurement/area/imperial.py
|
6b26952f153010fe609458008baeca02b9c31ad8
|
[] |
no_license
|
patricknevindwyer/Converto
|
9265e2c6a7af88f6d2a767baed78ce8e95b83215
|
8b9a874a63a8fefde3149d8f6c28c338cc20f09f
|
refs/heads/master
| 2020-05-18T19:58:49.335100
| 2010-12-03T13:40:40
| 2010-12-03T13:40:40
| 1,091,854
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
'''
Created on Dec 3, 2010
@author: patricknevindwyer
Imperial measurements of Area.
'''
from measurement.bases import Area
square_feet = Area(
toBaseUnit = 0.09290304,
fromBaseUnit = 10.7639104,
suffixes = ('square foot', 'square feet')
)
|
[
"patricknevindwyer@gmail.com"
] |
patricknevindwyer@gmail.com
|
879cdf55ca3f1f61547f825cbd91ff885eb084e1
|
2734b77a68f6d7e22e8b823418ad1c59fe1a34af
|
/opengever/dossier/content_deleter.py
|
a165f1f965b4eda76e04acabca72a444e73e2129
|
[] |
no_license
|
4teamwork/opengever.core
|
5963660f5f131bc12fd0a5898f1d7c8f24a5e2b1
|
a01bec6c00d203c21a1b0449f8d489d0033c02b7
|
refs/heads/master
| 2023-08-30T23:11:27.914905
| 2023-08-25T14:27:15
| 2023-08-25T14:27:15
| 9,788,097
| 19
| 8
| null | 2023-09-14T13:28:56
| 2013-05-01T08:28:16
|
Python
|
UTF-8
|
Python
| false
| false
| 707
|
py
|
from opengever.base.browser.folder_buttons_availability import FolderButtonsAvailabilityView
from opengever.base.content_deleter import BaseContentDeleter
from opengever.dossier.behaviors.dossier import IDossierMarker
from zExceptions import Forbidden
from zope.component import adapter
@adapter(IDossierMarker)
class DossierDeleter(BaseContentDeleter):
permission = 'opengever.dossier: Delete dossier'
def verify_may_delete(self, **kwargs):
super(DossierDeleter, self).verify_may_delete()
if not self.context.objectCount() == 0:
raise Forbidden()
if FolderButtonsAvailabilityView(self.context, None)._has_linked_workspaces():
raise Forbidden()
|
[
"e.schmutz@4teamwork.ch"
] |
e.schmutz@4teamwork.ch
|
a01f9443c22e4e22c3db6a462ca1c40ad91e0bd4
|
d85fbcf9199a46d7ce43537a6b333e381fd1b868
|
/argo/workflows/dsl/__about__.py
|
eb032ad16c682ac31874759e105c26fd5d1f1c5c
|
[
"Python-2.0",
"Apache-2.0"
] |
permissive
|
binarycrayon/argo-python-dsl
|
78af89ab6a4e522a0bd9b1a28124ce687d544cef
|
0eb6fcf1c0901c842dd280d8c052981b5b3378ce
|
refs/heads/master
| 2021-02-13T15:35:52.177954
| 2020-03-08T07:41:46
| 2020-03-08T07:41:46
| 244,710,162
| 0
| 0
|
Apache-2.0
| 2020-03-03T18:22:47
| 2020-03-03T18:22:47
| null |
UTF-8
|
Python
| false
| false
| 500
|
py
|
"""About this package."""
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]
__title__ = "argo-workflows-dsl"
__summary__ = "DSL for Argo Workflows"
__uri__ = "https://github.com/CermakM/argo-python-dsl"
__version__ = "0.1.0-rc"
__author__ = "Marek Cermak"
__email__ = "macermak@redhat.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2019 {0}".format(__author__)
|
[
"macermak@redhat.com"
] |
macermak@redhat.com
|
a13ba3403a954a805e8bc586f2179587ed19b562
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02843/s696796090.py
|
05e70395147aa3317b5c278c07796ed013434575
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 348
|
py
|
X = int(input())
price = [100,101,102,103,104,105]
dp = [[0]*100001 for _ in range(6)]
dp[0][0] = 1
for i in range(100,100001):
dp[0][i] = dp[0][i-100]
for i in range(1,6):
for j in range(price[i]):
dp[i][j] = dp[i-1][j]
for j in range(price[i],100001):
dp[i][j] = max(dp[i-1][j],dp[i][j-price[i]])
print(dp[-1][X])
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
b0ae8f4bfb739eb24ac9fc0d06617715c46a65d9
|
e84020108a7037d8d4867d95fada1b72cbcbcd25
|
/src/chattisgarh/fto/processFTOStatus.py
|
d660f4fc16006a208a39e4abc60bff1a449ef8b8
|
[] |
no_license
|
rajesh241/libtech
|
8384316051a2e8c2d4a925cd43216b855b82e4d9
|
0105e717357a3626106028adae9bf162a7f93fbf
|
refs/heads/master
| 2022-12-10T03:09:00.048841
| 2020-06-14T09:39:04
| 2020-06-14T09:39:04
| 24,629,538
| 1
| 1
| null | 2022-12-08T02:26:11
| 2014-09-30T07:57:45
|
Python
|
UTF-8
|
Python
| false
| false
| 3,182
|
py
|
import csv
from bs4 import BeautifulSoup
import requests
import MySQLdb
import time
import re
import os
import sys
fileDir=os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, fileDir+'/../../includes/')
from settings import dbhost,dbuser,dbpasswd,sid,token
from globalSettings import datadir
#Error File Defination
errorfile = open('/tmp/processFTO.log', 'a')
#File Path where all the Downloaded FTOs would be placed
districtName="SURGUJA"
ftofilepath=datadir+districtName+"/"
#Connect to MySQL Database
db = MySQLdb.connect(host=dbhost, user=dbuser, passwd=dbpasswd, db="surguja",charset='utf8')
cur=db.cursor()
db.autocommit(True)
#Query to set up Database to read Hindi Characters
query="SET NAMES utf8"
cur.execute(query)
#Query to get the FTO
query=" select f.id,f.ftoNo,b.name,f.finyear,f.blockCode from ftoDetails f,blocks b where b.blockCode=f.blockCode and b.isActive=1 and f.finyear='16' and f.isProcessed=1 and f.isStatusDownloaded=1 and f.isStatusProcessed=0 and f.incorrectPOFile!=1 "
#query=" select f.id,f.ftoNo,b.name,f.finyear,f.blockCode from ftoDetails f,blocks b where b.blockCode=f.blockCode and b.isActive=1 and f.finyear='16' and f.isStatusDownloaded=1 and f.isStatusProcessed=0 and ftoNo='CH3305003_081015FTO_142597'"
cur.execute(query)
if cur.rowcount:
results = cur.fetchall()
for row in results:
ftoid=str(row[0])
ftoNo=row[1]
blockName=row[2]
finyear=row[3]
blockCode=row[4]
print str(ftoid)+" "+finyear+" "+ftoNo+" "+blockName
if finyear=='16':
fullfinyear='2015-2016'
elif finyear=='15':
fullfinyear='2014-2015'
else:
fullfinyear='2013-2014'
ftofilename=ftofilepath+blockName+"/FTO/"+fullfinyear+"/"+ftoNo+"_status.html"
print ftofilename
if (os.path.isfile(ftofilename)):
ftohtml=open(ftofilename,'r').read()
if "The file name does not appear to be correct" in ftohtml:
print "This does not seem like a postoffice PO"
errorflag=1
else:
htmlsoup=BeautifulSoup(ftohtml)
try:
table=htmlsoup.find('table',id="ctl00_ContentPlaceHolder1_Table1")
rows = table.findAll('tr')
errorflag=0
except:
errorflag=1
print "errorflag is "+str(errorflag)
if errorflag==0:
for tr in rows:
cols = tr.findAll('td')
tdtext=''
eventDate= cols[0].text
if eventDate != 'Date Time':
print eventDate
event = cols[1].text
office= cols[2].text
fileid=cols[3].text
print eventDate+event+office+fileid
eventDateFormat='%d %M %Y %H:%i:%s'
query="insert into ftoStatus (ftoNo,blockCode,finyear,eventDate,event,office,fileid) values ('%s','%s','%s',STR_TO_DATE('%s','%s'),'%s','%s','%s');" % (ftoNo,blockCode,finyear,eventDate,eventDateFormat,event,office,fileid)
print query
cur.execute(query)
query="update ftoDetails set isStatusProcessed=1 where id=%s" %(ftoid)
cur.execute(query)
else:
query="update ftoDetails set incorrectPOFile=1 where id=%s" %(ftoid)
cur.execute(query)
|
[
"togoli@gmail.com"
] |
togoli@gmail.com
|
04231d91e5d33e957c15c28892b7716bb60a70c1
|
895dfeb5c6af5e8c66772491d164e9d5b57d4302
|
/Courses/Full_Stack_Foundations/restaurant_raw/webserver.py
|
7fa49de6ca38602236c51866bf2598078cc5b8a0
|
[] |
no_license
|
JPGITHUB1519/FSND-Udacity
|
753747aac6b727ac7a0e98d95059127bf8a3690d
|
a7acb4df6ff3b2216da5749e7087b0e254ed80cd
|
refs/heads/master
| 2020-12-30T12:11:21.051853
| 2017-10-05T02:30:22
| 2017-10-05T02:30:22
| 91,410,311
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,540
|
py
|
import os
import cgi
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from jinja2 import Environment, FileSystemLoader, select_autoescape
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Restaurant, MenuItem
### jinja config
config = {
"directory": os.path.dirname(os.path.abspath(__file__)),
"templates_dir": '/templates'
}
directory = os.path.dirname(os.path.abspath(__file__))
env = Environment(
loader = FileSystemLoader(config["directory"] + config['templates_dir']),
autoescape=select_autoescape('html', 'xml')
)
### database config
engine = create_engine('sqlite:///restaurantmenu.db')
# Bind the engine to the metadata of the Base class so that the
# declaratives can be accessed through a DBSession instance
Base.metadata.bing = engine
DBSession = sessionmaker(bind=engine)
# A DBSession() instance establishes all conversations with the database
# and represents a "staging zone" for all the objects loaded into the
# database session object. Any change made against the objects in the
# session won't be persisted into the database until you call
# session.commit(). If you're not happy about the changes, you can
# revert all of them back to the last commit by calling
# session.rollback()
session = DBSession()
class webserverHandler(BaseHTTPRequestHandler):
def render_str(self, template, **params):
template = env.get_template(template)
return template.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
def write(self, element):
self.wfile.write(element)
def send_headers(self, request_type="get"):
if request_type == "get":
self.send_response(200)
if request_type == "post":
self.send_response(301)
self.send_header('Content-Type', 'text/html')
self.end_headers()
def do_GET(self):
try:
if self.path.endswith('/test'):
self.get_sent_headers()
self.render("test.html", name="jean")
return
if self.path.endswith('/restaurants'):
self.send_headers()
restaurants = session.query(Restaurant).all()
self.render('restaurants.html', restaurants=restaurants)
return
if self.path.endswith('/restaurants/create'):
self.send_headers()
self.render("restaurants_create.html")
return
if self.path.endswith('/restaurants/edit/*'):
self.send_headers()
self.write("hey")
print self.path
# restaurant = session.query(Restaurant).filter_by(id=id)
# self.render("restaurants_edit.html")
except IOError:
self.send_error(404, "File not found %s" % self.path)
def do_POST(self):
try:
if self.path.endswith("/restaurants/store"):
self.send_headers("post")
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
# collect field from the form
fields = cgi.parse_multipart(self.rfile, pdict)
name = fields.get('name')
restaurant = Restaurant(name=name[0])
session.add(restaurant)
session.commit()
# redirect
self.write("Restaurante Creado %s" % restaurant.name)
if self.path.endswith('/restaurants/update'):
pass
except IOError:
self.send_error(404, "File not found %s" % self.path)
def main():
try:
port = 8080
server = HTTPServer(('', port), webserverHandler)
print "Web Server Running on port %s" % port
server.serve_forever()
except KeyboardInterrupt:
print "^C entered, stopping web server..."
server.socket.close()
if __name__ == "__main__":
main()
|
[
"juanpedro1519@gmail.com"
] |
juanpedro1519@gmail.com
|
1aa3752c58335a1b8cb63f5ca192c48180bc3889
|
d554b1aa8b70fddf81da8988b4aaa43788fede88
|
/5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4062/codes/1845_2220.py
|
3d32d34040dc707ac31d11f0735accdb07b48017
|
[] |
no_license
|
JosephLevinthal/Research-projects
|
a3bc3ca3b09faad16f5cce5949a2279cf14742ba
|
60d5fd6eb864a5181f4321e7a992812f3c2139f9
|
refs/heads/master
| 2022-07-31T06:43:02.686109
| 2020-05-23T00:24:26
| 2020-05-23T00:24:26
| 266,199,309
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 131
|
py
|
from numpy import *
m= array(eval(input("Pagamentos: ")))
l = shape(m)[0]
c = shape(m)[1]
for i in range(l):
print(max(m[i, :]))
|
[
"jvlo@icomp.ufam.edu.br"
] |
jvlo@icomp.ufam.edu.br
|
21a347c147a0776a230dc184af319b1643c6a013
|
4b758ca583d2a58d4d711381405e024109a0f08f
|
/dali/test/python/test_operator_random_resized_crop.py
|
483d481dbf53a927fd48e58f3eff3b31bc9c649c
|
[
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
ConnectionMaster/DALI
|
76ff07b2fa3f62490b059088c88ade7570130ff4
|
6b90519d2c209d705e8912a5f00b71a018aeaa52
|
refs/heads/master
| 2023-04-14T13:04:57.520421
| 2021-01-22T16:34:31
| 2021-01-22T16:34:31
| 187,683,855
| 1
| 1
|
Apache-2.0
| 2023-04-03T23:45:28
| 2019-05-20T17:18:56
|
C++
|
UTF-8
|
Python
| false
| false
| 6,577
|
py
|
import nvidia.dali as dali
import nvidia.dali.fn as fn
import numpy as np
import test_utils
def close(a, b):
return abs(a - b) < 1e-5 or abs(a - b) < abs(a) + abs(b) * 1e-6
def analyze_frame(image, channel_dim):
def pixel(x, y):
return image[:, y, x] if channel_dim == 0 else image[y, x, :]
x0, y0, f0 = pixel(0, 0)
x1, y1, f1 = pixel(-1, 0)
x2, y2, f2 = pixel(0, -1)
x3, y3, f3 = pixel(-1, -1)
assert close(x0, x2), "x0 = {} != x2 = {}".format(x0, x2)
assert close(x1, x3), "x1 = {} != x3 = {}".format(x1, x3)
assert close(y0, y1), "y0 = {} != y1 = {}".format(y0, y1)
assert close(y2, y3), "y2 = {} != y3 = {}".format(y2, y3)
assert close(f0, f1) and close(f0, f2) and close(f0, f3)
return x0, y0, x3, y3, int(np.round(f0))
def check_frame(image, frame_index, total_frames, channel_dim, roi, w, h, aspect_ratio_range, area_range, value_range):
x0, y0, x1, y1, f = analyze_frame(image, channel_dim)
assert f == frame_index * value_range // total_frames
out_h, out_w = image.shape[:2] if channel_dim != 0 else image.shape[1:3]
xeps = np.ceil(2 + 2 * w / out_w)
yeps = np.ceil(2 + 2 * h / out_h)
if frame_index == 0:
roi_w_max = min((x1 - x0) * w / value_range + xeps, w)
roi_w_min = max((x1 - x0) * w / value_range - xeps, 1)
roi_h_max = min((y1 - y0) * h / value_range + yeps, h)
roi_h_min = max((y1 - y0) * h / value_range - xeps, 1)
ratio_min = roi_w_min / roi_h_max
ratio_max = roi_w_max / roi_h_min
area_min = roi_w_min * roi_h_min / (w * h)
area_max = roi_w_max * roi_h_max / (w * h)
assert ratio_max >= aspect_ratio_range[0] and ratio_min <= aspect_ratio_range[1], \
"aspect ratio estimated at {}..{} outside valiid range [{} .. {}]".format(
ratio_min, ratio_min, *aspect_ratio_range)
assert area_max >= area_range[0] and area_min <= area_range[1], \
"area estimated at {}..{} outside valiid range [{} .. {}]".format(
area_min, area_max, *area_range)
return x0, y0, x1, y1
else:
assert (x0, y0, x1, y1) == roi
return roi
def check_seq(seq, channel_dim, w, h, aspect_ratio_range, area_range, value_range):
frame_dim = 1 if channel_dim == 0 else 0
frame_channel_dim = -1 if channel_dim == -1 else 0
roi = None
total_frames = seq.shape[frame_dim]
for f in range(total_frames):
frame = seq[:,f] if frame_dim == 1 else seq[f]
roi = check_frame(frame, f, total_frames, frame_channel_dim, roi, w, h, aspect_ratio_range, area_range, value_range)
def check_output(output, channel_dim, input_shape, aspect_ratio_range, area_range, value_range):
if len(input_shape) == 3:
h, w = input_shape[1:3] if channel_dim == 0 else input_shape[0:2]
check_frame(output, 0, 1, channel_dim, None, w, h, aspect_ratio_range, area_range, value_range)
else:
hidx = 1 if channel_dim == -1 else 2
h, w = input_shape[hidx:hidx+2]
check_seq(output, channel_dim, w, h, aspect_ratio_range, area_range, value_range)
def type_range(type):
if np.issubdtype(type, np.integer):
return np.iinfo(type).max
else:
return 100000
def generate_data(frames, width, height, channel_dim, type):
value_range = type_range(type)
no_frames = (frames is None)
if no_frames:
frames = 1
x = (np.arange(0, width) * value_range // width).astype(type)[np.newaxis,np.newaxis,:]
y = (np.arange(0, height) * value_range // height).astype(type)[np.newaxis,:,np.newaxis]
f = (np.arange(0, frames) * value_range // frames).astype(type)[:,np.newaxis,np.newaxis]
x = np.broadcast_to(x, (frames, height, width))
y = np.broadcast_to(y, (frames, height, width))
f = np.broadcast_to(f, (frames, height, width))
seq = np.stack([x, y, f], axis=channel_dim)
if no_frames:
seq = seq[:, 0] if channel_dim == 0 else seq[0]
return seq
def generator(batch_size, max_frames, channel_dim, type):
type = test_utils.dali_type_to_np(type)
assert max_frames is not None or channel_dim != 1
def generate():
batch = []
for _ in range(batch_size):
frames = None if max_frames is None else np.random.randint(1, max_frames+1)
sz = np.random.randint(100, 2000 / (max_frames or 1))
w, h = np.random.randint(sz, 2*sz, [2])
batch.append(generate_data(frames, w, h, channel_dim, type))
return batch
return generate
def _test_rrc(device, max_frames, layout, aspect_ratio_range, area_range, output_size, input_type, output_type):
batch_size = 4
pipe = dali.pipeline.Pipeline(batch_size, 4, 0)
channel_dim = layout.find('C')
value_range = type_range(test_utils.dali_type_to_np(input_type))
if channel_dim == len(layout)-1:
channel_dim = -1
input = fn.external_source(source=generator(batch_size, max_frames, channel_dim, input_type), layout=layout)
shape = fn.shapes(input)
if device == "gpu":
input = input.gpu()
out = fn.random_resized_crop(input, random_aspect_ratio=aspect_ratio_range, random_area=area_range,
size=output_size, interp_type=dali.types.INTERP_LINEAR, seed=12321, dtype=output_type)
pipe.set_outputs(out, shape)
pipe.build()
for iter in range(3):
outputs, input_shapes = pipe.run()
if device == "gpu":
outputs = outputs.as_cpu()
assert outputs.layout() == layout
for i in range(batch_size):
out = outputs.at(i)
input_shape = input_shapes.at(i).tolist()
check_output(out, channel_dim, input_shape, aspect_ratio_range, area_range, value_range)
def test_random_resized_crop():
np.random.seed(12345)
types = [dali.types.UINT8, dali.types.INT16, dali.types.FLOAT]
for device in ["cpu", "gpu"]:
for max_frames in [None, 1, 8]:
for layout in ["FHWC", "FCHW", "CFHW"] if max_frames is not None else ["HWC", "CHW"]:
for aspect, area in [
((0.5, 2), (0.1, 0.8)),
((1, 2), (0.4, 1.0)),
((0.5, 1), (0.1, 0.5))]:
for size in [(100,100), (640,480)]:
input_type = types[np.random.randint(0, len(types))]
output_type = dali.types.FLOAT if np.random.randint(0, 2) else None
yield _test_rrc, device, max_frames, layout, aspect, area, size, input_type, output_type
|
[
"noreply@github.com"
] |
ConnectionMaster.noreply@github.com
|
2e71d123768a4baaceb7cf78751643f3bc645473
|
eb8855df9efcaafe7be3c33ac6a9eba09190a802
|
/article/migrations/0002_articlepost_total_views.py
|
1afe206df23165b96a38b7402b009dcd70fbdaca
|
[] |
no_license
|
demo112/Bloging_new
|
4441b32cde519b8aa6e5fd04a30af09a2e2cbe46
|
ab8fbf0e33dd91413a707491bfce3e46d4d021c9
|
refs/heads/master
| 2020-04-30T00:52:41.741959
| 2019-03-28T00:08:28
| 2019-03-28T00:08:28
| 176,513,462
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 389
|
py
|
# Generated by Django 2.1.7 on 2019-03-22 19:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('article', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='articlepost',
name='total_views',
field=models.PositiveIntegerField(default=0),
),
]
|
[
"huafengdongji@hotmail.com"
] |
huafengdongji@hotmail.com
|
44c5dbbc3246735c0cdeeccde1162967ba8c658e
|
f768ddba19c7bc7664ae581daeef5fe2f650a539
|
/post/post.py
|
ddb481a99e2e6e443b0f580bbc720d0f68fac4c2
|
[] |
no_license
|
douwings/pythonWork
|
36ac2ba32a721ed6d9af62a9168eee12a61891af
|
70f84eb179e12cc36b521fdb9a2573a14a300d23
|
refs/heads/master
| 2022-12-23T18:41:50.560391
| 2020-09-28T06:20:36
| 2020-09-28T06:20:36
| 273,136,705
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,047
|
py
|
# -*- coding=utf-8 -*-
import urllib.request
import urllib.parse
import ssl
import json
import time
def postsendsms():
ssl._create_default_https_context = ssl._create_unverified_context
sendsmspost_url = 'http://172.16.2.111:3000/v1/login_by_wx'
t = time.time()
sendsmsheaders = {
# 'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
}
sendsmsform_data = {
'userwxcode': '043Ow9h228nyXU0IPFi22wbMg22Ow9h2',
'encryptedData' : '76uQW81eWiIo8JCsRTUWf4QuR3PkgctOfF3rvAcTx2jXHRfauzia0XggmMANUYK0WylpOp6Ms3YXkOTqNWvQJ4ualQkpZiowJzrL71p1AXCCdpGlDo7YiB9Qt0K5In/h0mme0Jzw99LCyPTac2dwepNle0LRe1HCNmKlYyyyF5kpgB/ZD8BSBSGw8VFpufXWagZb+iQ88T25iPnPrTMhpJRDun7F/JROcNERmjCHlSzYADYMxaOqaj17tk1mKRe5lt2tphxARR++ZfWaTqqSESJn/ywYZr+3XATsZZ/Ve7rgieoQshb6drQTEhCUDoUB2bz3XVHNsm6X8wiv7rCBBe4Gm6MEguKCJxeXcS41kYaKS+caZarcLJ3JD5sstrQ6zx6pKaGhIja5AvBWWyBtyMgn3tvXeh28Glt5qrOK2GuvWK5BoKC71tQSi6iz8OQhCS3P3hdGfNf/tSH6Eb3krzZOaV19jQGAgVI+wNpVvDY=',
'iv' : 'kDJKkS9g6mFXputhbNouMw==',
}
# 构建post请求
request = urllib.request.Request(url=sendsmspost_url, headers=sendsmsheaders)
form_data = urllib.parse.urlencode(sendsmsform_data).encode()
# 发送post请求
response = urllib.request.urlopen(request, data=form_data)
dict_json = json.loads(response.read().decode())
print(dict_json)
print("####################")
return dict_json
def testtoken(data) :
print(data['errcode'])
ssl._create_default_https_context = ssl._create_unverified_context
sendsmspost_url = 'http://172.16.2.44:3000/v1/sforeignlogin'
t = time.time()
sendsmsheaders = {
# 'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
}
authData = {
'openid' : data['openid'],
'access_token' : data['access_token'],
}
sendsmsform_data = {
'platform': 'weixin',
'authData' : json.dumps(authData),
}
# 构建post请求
request = urllib.request.Request(url=sendsmspost_url, headers=sendsmsheaders)
form_data = urllib.parse.urlencode(sendsmsform_data).encode()
# 发送post请求
response = urllib.request.urlopen(request, data=form_data)
dict_json = json.loads(response.read().decode())
print(dict_json)
print("####################")
return dict_json
def get_wx_userinfo():
url = 'http://172.16.2.44:3000/v1/app/get_wx_userinfo?'
data = {
'code' : '0231512s1uGMOk0kWY0s1bDV1s11512o',
}
query_string = urllib.parse.urlencode(data)
url += query_string
headers = {
'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
}
# get
requset = urllib.request.Request(url = url , headers=headers)
response = urllib.request.urlopen(requset)
print("####################")
print(response.read().decode())
if __name__ == '__main__':
# get_wx_userinfo()
# testtoken(postsendsms())
postsendsms()
# data = postsendsms()
# testtoken(data)
# testtoken(data)
# testtoken(data)
# 23_BgsyJbTUfekU4pynn0EbQpC1T0jCOiwUEtSJhF6FghVFFLcgXyXflYQx5wAv1-qqBfGcYLZE6KfBUk3oPt9ELrDkA6lB_tgiVJG6hRI7V0_20jaPvkHbL_eG9PgPIKbADAZSU
# 23_BgsyJbTUfekU4pynn0EbQpC1T0jCOiwUEtSJhF6FghVFFLcgXyXflYQx5wAv1-qqBfGcYLZE6KfBUk3oPt9ELrDkA6lB_tgiVJG6hRI7V0_20jaPvkHbL_eG9PgPIKbADAZSU
# 23_bej0j1-r1VKCVYNB7ZDi05kRhrN1J3jXlVXjrsmClv7s_azAgShoybtG99VoCh3791R4Yp3i4d7o2FcmmQ5kLuLehdfSfKPMnKw79x3f0ke_-XE7hkG5irujGjEoKI9ZNeGhaOaadKDNeqhVGRBdAFAJUE
# 23_bej0j1-r1VKCVYNB7ZDi05kRhrN1J3jXlVXjrsmClv7s_azAgShoybtG99VoCh3791R4Yp3i4d7o2FcmmQ5kLuLehdfSfKPMnKw79x3f0ke_-XE7hkG5irujGjEoKI9ZNeGhaOaadKDNeqhVGRBdAFAJUE
# { params: { userwxcode: '011oLiaZ1Vnt5U0aF98Z17ZDaZ1oLiah' } }
# simpleForeignLogin
|
[
"1020763068@qq.com"
] |
1020763068@qq.com
|
736aecf523f7137ffba1df7104167bee096698f0
|
035ec6f79bb70374a54d6b491b34114fcc9f0e24
|
/wtDigiTwin/fast/fastlinfiles.py
|
0b796235f81f0435db9e1ae181eee4b64a327b49
|
[
"MIT"
] |
permissive
|
deyh2020/wtDigiTwin
|
b8b8d8f0a0ca73a6e7dddba0e3b6457a7551a9da
|
2c1e965ab5fdca10e67b0db9ef87837f5abebc02
|
refs/heads/master
| 2023-07-14T07:23:28.628625
| 2021-07-26T15:12:22
| 2021-07-26T15:12:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,700
|
py
|
import numpy as np
import pickle
import glob
import os
import weio
class FASTPeriodicOP(object):
""" Class for a set of *.lin files, all assumed to be for the same periodic operating point"""
def __init__(self,prefix,nLin=None):
if nLin is None:
linfiles= glob.glob(prefix + '*.*.lin')
self.nLinTimes = len(linfiles)
else:
self.nLinTimes = nLin
print(prefix, self.nLinTimes)
self.prefix = prefix
self.Data = []
self.vAzim = []
self.vWS = []
self.vPitch = []
self.vRotSpeed = []
self.vBu = []
for i in np.arange(self.nLinTimes):
linfilename= prefix+'.'+str(i+1)+'.lin'
print(linfilename)
if not os.path.exists(linfilename):
print('Linearization file missing: ',linfilename)
linfile=weio.read(linfilename)
df=linfile.toDataFrame()
self.Data.append(linfile)
#self.A=lin['A']
#B=linfile['B']
#u=linfile['u']
#self.C=lin['C']
#self.D=lin['D']
try:
self.vWS.append(df['u']['WS_[m/s]'][0])
except:
print('Wind speed not found in input, assuming 0m/s')
self.vWS.append(0)
self.vRotSpeed.append(linfile.RotSpeed)
self.vAzim.append(linfile.Azimuth)
self.vPitch.append(df['u']['B1pitch_[rad]'][0]*180/np.pi)
self.WS = np.mean(self.vWS)
self.Pitch = np.mean(self.vPitch)
self.RotSpeed = np.mean(self.vRotSpeed)
self.x = df['x']
self.y = df['y']
self.u = df['u']
try:
self.EDdescr = linfile['EDDOF']
except:
print('EDDOF not available. A special version of OpenFAST is required.')
class FASTLin(object):
""" Class for linearization data for different operating points (typically Campbell) """
def __init__(self,folder='./', prefix='',nLin=None):
fstfiles= glob.glob(folder + prefix + '*.*.lin')
Sim_Prefix=np.unique(['.'.join(f.split('.')[:-2]) for f in fstfiles])
nSim = len(Sim_Prefix)
# --- Read period operating points
print('Reading linearizations for {} operating points'.format(nSim))
self.OP_Data=[FASTPeriodicOP(pref,nLin=nLin) for pref in Sim_Prefix]
# --- Sort by wind speed
Isort = np.argsort(self.WS)
self.OP_Data = [self.OP_Data[i] for i in Isort]
if self.MaxNLinTimes>1:
IBad = [i for i in np.arange(nSim) if self.nLinTimes[i]<self.MaxNLinTimes and self.OP_Data[i].WS>0]
if len(IBad)>0:
print('>>> The following simulations have insufficient number of data points:')
for i in IBad:
print(self.OP_Data[i].prefix, self.OP_Data[i].nLinTimes)
self.OP_Data = [self.OP_Data[i] for i in np.arange(nSim) if i not in IBad]
@property
def WS(self):
return np.array([sim.WS for sim in self.OP_Data])
@property
def nLinTimes(self):
return np.array([sim.nLinTimes for sim in self.OP_Data])
@property
def MaxNLinTimes(self):
return np.max(self.nLinTimes)
@property
def nOP(self):
return len(self.OP_Data)
@property
def xdescr(self):
return self.OP_Data[0].x.columns.values
@property
def ydescr(self):
return self.OP_Data[0].y.columns.values
@property
def EDdescr(self):
return self.OP_Data[0].EDdescr
@property
def udescr(self):
return self.OP_Data[0].u.columns.values
@property
def xop_mean(self):
return np.mean(np.abs(np.array([op.x.values for op in self.OP_Data])),axis=0)
@property
def uop_mean(self):
return np.mean(np.abs(np.array([op.u.values for op in self.OP_Data])),axis=0)
@property
def uop_mean(self):
return np.mean(np.abs(np.array([op.u.values for op in self.OP_Data])),axis=0)
@property
def yop_mean(self):
return np.mean(np.abs(np.array([op.y.values for op in self.OP_Data])),axis=0)
def stats(self,matName,WS=None):
if WS is None:
WS = self.WS
nOP=self.nOP
else:
nOP=len(WS)
print('Returning stats for WS:',WS)
M_mean=[]
shape= self.OP_Data[0].Data[0][matName].shape
M_all = np.zeros( (nOP, self.MaxNLinTimes, shape[0],shape[1]))
M_mean_perWS= np.zeros( (nOP, shape[0],shape[1]))
M_std_perWS = np.zeros( (nOP, shape[0],shape[1]))
# loop on operating points (e.g. WS)
ii=0
for iop, op in enumerate(self.OP_Data):
if op.WS in WS:
# Loop on linearization times (e.g. Azimuth)
for iTimes in np.arange(self.MaxNLinTimes):
if op.nLinTimes==1:
M_all[ii,iTimes,:,:]=op.Data[0][matName]
else:
M_all[ii,iTimes,:,:]=op.Data[iTimes][matName]
M_mean_perWS[ii,:,:] = np.mean(M_all[ii,:,:,:],axis=0)
M_std_perWS [ii,:,:] = np.std(M_all[ii,:,:,:],axis=0)
ii+=1
M_mean = np.mean( M_mean_perWS, axis=0 )
M_stdWS = np.std ( M_mean_perWS, axis=0 ) # How much elements vary with wind speed
M_stdAzim = np.mean( M_std_perWS , axis=0) # How much elements vary due to azimuth
return M_mean, M_mean_perWS, M_stdAzim, M_stdWS, M_all
def save(self,filename):
with open(filename,'wb') as f:
pickle.dump(self,f)
# def full_linear_model
|
[
"elmanuelito.github@gmail.com"
] |
elmanuelito.github@gmail.com
|
b6076b5e720e510264d359984df649983226d155
|
bac5ecb5eef06dfe76b9b7bff80faee7485c67dd
|
/.history/django_vuejs_tutorial/django_vuejs/dataiku/admin_20200829045700.py
|
95c85e8343b24f314b19ffb3176a56a4567e185b
|
[] |
no_license
|
MChrys/dataiku
|
fb1e48401d544cbcc5a80a0a27668dc9d2d196e5
|
6091b24f565224260a89246e29c0a1cbb72f58ed
|
refs/heads/master
| 2022-12-16T11:06:13.896643
| 2020-09-12T19:03:51
| 2020-09-12T19:03:51
| 293,287,620
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 430
|
py
|
from django.contrib import admin
# Register your models here.
from .models import QCM, Answer, Task, Dataiku_account, Operation, Session, Question, Run, Posibility
admin.site.register(Task)
admin.site.register(Dataiku_account)
admin.site.register(Operation)
admin.site.register(Session)
admin.site.register(Question)
admin.site.register(Run)
admin.site.register(Posibility)
admin.site.register(QCM)
#admin.site.register(Answer)
|
[
"cbeltran@umanis.com"
] |
cbeltran@umanis.com
|
7b7c17e361f20416c6b1ac8296418c9a8ec75d00
|
cba8f623e613cfb0cdba73fb373bec68f7bbfdcb
|
/ABC085D.py
|
9802bdf40462578e6247674b1b8a473147a6a75d
|
[] |
no_license
|
bonoron/Atcoder
|
7d0af77a12b40ce2bdebf5ab5a76462629a03ea5
|
e8c0d1ed7d113a0ea23e30d20e8d9993ba1430fa
|
refs/heads/master
| 2022-12-24T20:00:32.702741
| 2020-09-24T07:03:48
| 2020-09-24T07:03:48
| 271,685,972
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 293
|
py
|
n,h=map(int,input().split())
A=list(tuple(map(int,input().split())) for i in range(n))
A,B=zip(*A)
a=max(A)
count=0
for i in sorted(B)[::-1]:
if h<=0:break
elif i>=a:
count +=1
h -=i
if h>0:
if h%a==0:count +=h//a
else:count +=h//a+1
print(count)
|
[
"noreply@github.com"
] |
bonoron.noreply@github.com
|
76078cb90de2d17011da5b083945e6a4c92670c6
|
a0b27e1a3a17ce9ec21bea460ba75b9d72564e33
|
/ublog/routes.py
|
3975864270044f0b4cf884b72d3cc09e93b704b2
|
[] |
no_license
|
florije1988/ublog
|
f662bc0dadf0fb244af054db2ca1a491b0454827
|
a94fd3a52a9898b8e89020a7b8754e52ea00d664
|
refs/heads/master
| 2020-12-24T20:25:02.679572
| 2016-05-09T09:39:50
| 2016-05-09T09:39:50
| 58,360,735
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,758
|
py
|
# -*- coding: utf-8 -*-
__author__ = 'florije'
from ublog import app
from flask import render_template, request, flash, session, url_for, redirect
from forms import ContactForm, SignupForm, SigninForm
from flask_mail import Message, Mail
from models import db, User
mail = Mail()
@app.route('/')
def home():
return render_template('home.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/contact', methods=['GET', 'POST'])
def contact():
form = ContactForm()
if request.method == 'POST':
if not form.validate():
flash('All fields are required.')
return render_template('contact.html', form=form)
else:
msg = Message(form.subject.data, sender='contact@example.com', recipients=['your_email@example.com'])
msg.body = """
From: %s <%s>
%s
""" % (form.name.data, form.email.data, form.message.data)
mail.send(msg)
return render_template('contact.html', success=True)
elif request.method == 'GET':
return render_template('contact.html', form=form)
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm()
if 'email' in session:
return redirect(url_for('profile'))
if request.method == 'POST':
if not form.validate():
return render_template('signup.html', form=form)
else:
newuser = User(form.firstname.data, form.lastname.data, form.email.data, form.password.data)
db.session.add(newuser)
db.session.commit()
session['email'] = newuser.email
return redirect(url_for('profile'))
elif request.method == 'GET':
return render_template('signup.html', form=form)
@app.route('/profile')
def profile():
if 'email' not in session:
return redirect(url_for('signin'))
user = User.query.filter_by(email=session['email']).first()
if user is None:
return redirect(url_for('signin'))
else:
return render_template('profile.html')
@app.route('/signin', methods=['GET', 'POST'])
def signin():
form = SigninForm()
if 'email' in session:
return redirect(url_for('profile'))
if request.method == 'POST':
if not form.validate():
return render_template('signin.html', form=form)
else:
session['email'] = form.email.data
return redirect(url_for('profile'))
elif request.method == 'GET':
return render_template('signin.html', form=form)
@app.route('/signout')
def signout():
if 'email' not in session:
return redirect(url_for('signin'))
session.pop('email', None)
return redirect(url_for('home'))
|
[
"florije1988@gmail.com"
] |
florije1988@gmail.com
|
d2d869abcf663c008f157d716357b792152c0431
|
3a891a79be468621aae43defd9a5516f9763f36e
|
/desktop/core/ext-py/tablib-0.10.0/tablib/packages/openpyxl3/shared/date_time.py
|
8a58cd098dcb8ffa0bdc3773c76a3d882c73d5c1
|
[
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"BSD-Advertising-Acknowledgement",
"MIT"
] |
permissive
|
oyorooms/hue
|
b53eb87f805063a90f957fd2e1733f21406269aa
|
4082346ef8d5e6a8365b05752be41186840dc868
|
refs/heads/master
| 2020-04-15T20:31:56.931218
| 2019-01-09T19:02:21
| 2019-01-09T19:05:36
| 164,998,117
| 4
| 2
|
Apache-2.0
| 2019-01-10T05:47:36
| 2019-01-10T05:47:36
| null |
UTF-8
|
Python
| false
| false
| 5,931
|
py
|
# file openpyxl/shared/date_time.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
"""Manage Excel date weirdness."""
# Python stdlib imports
from math import floor
import calendar
import datetime
import time
import re
# constants
W3CDTF_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
RE_W3CDTF = '(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(.(\d{2}))?Z'
EPOCH = datetime.datetime.utcfromtimestamp(0)
def datetime_to_W3CDTF(dt):
"""Convert from a datetime to a timestamp string."""
return datetime.datetime.strftime(dt, W3CDTF_FORMAT)
def W3CDTF_to_datetime(formatted_string):
"""Convert from a timestamp string to a datetime object."""
match = re.match(RE_W3CDTF,formatted_string)
digits = list(map(int, match.groups()[:6]))
return datetime.datetime(*digits)
class SharedDate(object):
"""Date formatting utilities for Excel with shared state.
Excel has a two primary date tracking schemes:
Windows - Day 1 == 1900-01-01
Mac - Day 1 == 1904-01-01
SharedDate stores which system we are using and converts dates between
Python and Excel accordingly.
"""
CALENDAR_WINDOWS_1900 = 1900
CALENDAR_MAC_1904 = 1904
datetime_object_type = 'DateTime'
def __init__(self):
self.excel_base_date = self.CALENDAR_WINDOWS_1900
def datetime_to_julian(self, date):
"""Convert from python datetime to excel julian date representation."""
if isinstance(date, datetime.datetime):
return self.to_julian(date.year, date.month, date.day, \
hours=date.hour, minutes=date.minute, seconds=date.second)
elif isinstance(date, datetime.date):
return self.to_julian(date.year, date.month, date.day)
def to_julian(self, year, month, day, hours=0, minutes=0, seconds=0):
"""Convert from Python date to Excel JD."""
# explicitly disallow bad years
# Excel 2000 treats JD=0 as 1/0/1900 (buggy, disallow)
# Excel 2000 treats JD=2958466 as a bad date (Y10K bug!)
if year < 1900 or year > 10000:
msg = 'Year not supported by Excel: %s' % year
raise ValueError(msg)
if self.excel_base_date == self.CALENDAR_WINDOWS_1900:
# Fudge factor for the erroneous fact that the year 1900 is
# treated as a Leap Year in MS Excel. This affects every date
# following 28th February 1900
if year == 1900 and month <= 2:
excel_1900_leap_year = False
else:
excel_1900_leap_year = True
excel_base_date = 2415020
else:
raise NotImplementedError('Mac dates are not yet supported.')
#excel_base_date = 2416481
#excel_1900_leap_year = False
# Julian base date adjustment
if month > 2:
month = month - 3
else:
month = month + 9
year -= 1
# Calculate the Julian Date, then subtract the Excel base date
# JD 2415020 = 31 - Dec - 1899 -> Excel Date of 0
century, decade = int(str(year)[:2]), int(str(year)[2:])
excel_date = floor(146097 * century / 4) + \
floor((1461 * decade) / 4) + floor((153 * month + 2) / 5) + \
day + 1721119 - excel_base_date
if excel_1900_leap_year:
excel_date += 1
# check to ensure that we exclude 2/29/1900 as a possible value
if self.excel_base_date == self.CALENDAR_WINDOWS_1900 \
and excel_date == 60:
msg = 'Error: Excel believes 1900 was a leap year'
raise ValueError(msg)
excel_time = ((hours * 3600) + (minutes * 60) + seconds) / 86400
return excel_date + excel_time
def from_julian(self, value=0):
"""Convert from the Excel JD back to a date"""
if self.excel_base_date == self.CALENDAR_WINDOWS_1900:
excel_base_date = 25569
if value < 60:
excel_base_date -= 1
elif value == 60:
msg = 'Error: Excel believes 1900 was a leap year'
raise ValueError(msg)
else:
raise NotImplementedError('Mac dates are not yet supported.')
#excel_base_date = 24107
if value >= 1:
utc_days = value - excel_base_date
return EPOCH + datetime.timedelta(days=utc_days)
elif value >= 0:
hours = floor(value * 24)
mins = floor(value * 24 * 60) - floor(hours * 60)
secs = floor(value * 24 * 60 * 60) - floor(hours * 60 * 60) - \
floor(mins * 60)
return datetime.time(int(hours), int(mins), int(secs))
else:
msg = 'Negative dates (%s) are not supported' % value
raise ValueError(msg)
|
[
"abraham@elmahrek.com"
] |
abraham@elmahrek.com
|
f5d5c94e264e9b416f9e321c6f92c2a627772d05
|
9b265894d94a46f91ca20aab4fb5ece5d635dd16
|
/LPAv/Trabalho 9 - Python/questao1.py
|
5fe445972f602478bf68c2a4d9cdbae5b266bf31
|
[] |
no_license
|
JosephLevinthal/LPAv
|
fad4499ec294b3bbec82f73c9989fcf602f65d79
|
73ecf1cb39b550b13be558b80cbd97701ea34fa0
|
refs/heads/master
| 2021-09-16T11:08:07.828030
| 2018-06-20T02:56:42
| 2018-06-20T02:56:42
| 130,502,754
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 214
|
py
|
import re
stringDeEntrada = input()
while (stringDeEntrada != "####"):
if (re.match("^([0-9]{7,7}-[0-9])$", stringDeEntrada)):
print("SIM")
else:
print("NAO")
stringDeEntrada = input()
|
[
"jvlo@icomp.ufam.edu.br"
] |
jvlo@icomp.ufam.edu.br
|
faa7bd06c204447f3804c32070026adf782a82f3
|
45243d7b7412814b8cb43fefdf088099e0c93419
|
/src/user/migrations/0007_user_uid.py
|
090e72b9c2445055597e04d4b1cb18f3a2b3feab
|
[] |
no_license
|
AlexanderNevarko/drec_stud_site
|
e08e621c54bdd145709913d4a69c8f089475fcad
|
4e176a77bf5b4a20e57e1379581713a6fa6d2d21
|
refs/heads/master
| 2020-09-21T07:55:46.820999
| 2019-01-14T20:22:50
| 2019-01-14T20:22:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 496
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-09-04 12:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0006_auto_20170809_1308'),
]
operations = [
migrations.AddField(
model_name='user',
name='uid',
field=models.CharField(blank=True, max_length=128, null=True, verbose_name='UID карты'),
),
]
|
[
"makaleks@live.ru"
] |
makaleks@live.ru
|
54e37c4b81fec916c8fc8cd7108b3e190dcd4ace
|
f00699824a8c5def54421ee3cf836ec2cd15d957
|
/3/django_1703_day3/app01/urls.py
|
e7561509abebc6e85ef5e6b1b8c19ae0201bd4be
|
[] |
no_license
|
ZhiqiWu/student_manage
|
9171d78c32d6900b08de9034b9a2f50c9e24d0b8
|
da12ebaf4e9d6357cd9f832144ed756a55510433
|
refs/heads/master
| 2023-03-29T04:56:09.486061
| 2020-01-24T08:11:50
| 2020-01-24T08:11:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 177
|
py
|
from django.conf.urls import include, url
import views
urlpatterns = [
url(r'^$', views.index),
url(r'^zhale/$', views.zhale),
url(r'^tpl/$', views.tpl),
]
|
[
"1049939190@qq.com"
] |
1049939190@qq.com
|
8b50c33b2267b1a35cc4a73a591c173d6eea2280
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/otherforms/_revues.py
|
f49493dcdf30e1cce36d2723a65a559d1a24960d
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 218
|
py
|
#calss header
class _REVUES():
def __init__(self,):
self.name = "REVUES"
self.definitions = revue
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['revue']
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
60847a2a95c5d716ddfcd9d539af72449d1f0591
|
aa49120740b051eed9b7199340b371a9831c3050
|
/greaterTree.py
|
3e539eb3cf0e2a93dd98c58dad666501da74daf5
|
[] |
no_license
|
ashutosh-narkar/LeetCode
|
cd8d75389e1ab730b34ecd860b317b331b1dfa97
|
b62862b90886f85c33271b881ac1365871731dcc
|
refs/heads/master
| 2021-05-07T08:37:42.536436
| 2017-11-22T05:18:23
| 2017-11-22T05:18:23
| 109,366,819
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,593
|
py
|
#!/usr/bin/env python
'''
Given a Binary Search Tree (BST), convert it to a Greater Tree such that every key of the original BST is changed
to the original key plus sum of all keys greater than the original key in BST.
Example:
Input: The root of a Binary Search Tree like this:
5
/ \
2 13
Output: The root of a Greater Tree like this:
18
/ \
20 13
Solution:
By leveraging the fact that the tree is a BST, we can find an O(n) solution.
The idea is to traverse BST in reverse inorder.
Reverse inorder traversal of a BST gives us keys in decreasing order.
Before visiting a node, we visit all greater nodes of that node.
While traversing we keep track of sum of keys which is the sum of all the keys greater than the key of current node.
##### Code flow is similar to diameterOfBinaryTree.py ######
'''
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def convert_bst(root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if not root:
return None
# Initialize sum
result = [0]
generate_greater_tree(root, result)
return root
def generate_greater_tree(node, result):
# Base Case
if not node:
return None
# Recur for right subtree
generate_greater_tree(node.right, result)
# Update Sum
node.val += result[0]
result[0] = node.val
# Recur for left subtree
generate_greater_tree(node.left, result)
|
[
"ashutosh.narkar@one.verizon.com"
] |
ashutosh.narkar@one.verizon.com
|
7f18193d0a006abf5e133bd1826ca925772415d9
|
a4ea525e226d6c401fdb87a6e9adfdc5d07e6020
|
/src/azure-cli/azure/cli/command_modules/marketplaceordering/manual/custom.py
|
c3d4d03fbbc01f38de3e9be67de777584acd4f5b
|
[
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] |
permissive
|
Azure/azure-cli
|
13340eeca2e288e66e84d393fa1c8a93d46c8686
|
a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca
|
refs/heads/dev
| 2023-08-17T06:25:37.431463
| 2023-08-17T06:00:10
| 2023-08-17T06:00:10
| 51,040,886
| 4,018
| 3,310
|
MIT
| 2023-09-14T11:11:05
| 2016-02-04T00:21:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,754
|
py
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
def term_accept(client,
publisher,
product,
plan):
offerDetail = client.get(offer_type="virtualmachine",
publisher_id=publisher,
offer_id=product,
plan_id=plan)
if offerDetail is None:
from azure.cli.core.azclierror import ValidationError
raise ValidationError(
'cannot find offer with publisher {}, product {} and plan {}.'.format(publisher, product, plan))
parameters = {}
parameters['publisher'] = publisher
parameters['product'] = product
parameters['plan'] = plan
parameters['license_text_link'] = offerDetail.license_text_link
parameters['privacy_policy_link'] = offerDetail.privacy_policy_link
parameters['marketplace_terms_link'] = offerDetail.marketplace_terms_link
parameters['retrieve_datetime'] = offerDetail.retrieve_datetime
parameters['signature'] = offerDetail.signature
parameters['accepted'] = True
return client.create(offer_type="virtualmachine",
publisher_id=publisher,
offer_id=product,
plan_id=plan,
parameters=parameters)
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
36678e9ffcaa6862ab6d1a86c3294802fe005f96
|
1a1e4f0f525ebe080dcd74b7e5e8c33477eab6b8
|
/backend/dating/api/v1/viewsets.py
|
86fd6380f5c9628e9208d847b9a0a804ff8898db
|
[] |
no_license
|
crowdbotics-apps/homedesign-20669
|
0b1e65aa4f169dd9f54cbbb9d6c4af38057b7efe
|
0623f0d613cdc057079a799fccfe05db9c51ca73
|
refs/heads/master
| 2022-12-31T18:34:29.656171
| 2020-09-25T07:55:18
| 2020-09-25T07:55:18
| 298,505,595
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,095
|
py
|
from rest_framework import authentication
from dating.models import Setting, Profile, Inbox, Dislike, Match, UserPhoto, Like
from .serializers import (
SettingSerializer,
ProfileSerializer,
InboxSerializer,
DislikeSerializer,
MatchSerializer,
UserPhotoSerializer,
LikeSerializer,
)
from rest_framework import viewsets
class DislikeViewSet(viewsets.ModelViewSet):
serializer_class = DislikeSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Dislike.objects.all()
class UserPhotoViewSet(viewsets.ModelViewSet):
serializer_class = UserPhotoSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = UserPhoto.objects.all()
class MatchViewSet(viewsets.ModelViewSet):
serializer_class = MatchSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Match.objects.all()
class ProfileViewSet(viewsets.ModelViewSet):
serializer_class = ProfileSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Profile.objects.all()
class LikeViewSet(viewsets.ModelViewSet):
serializer_class = LikeSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Like.objects.all()
class SettingViewSet(viewsets.ModelViewSet):
serializer_class = SettingSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Setting.objects.all()
class InboxViewSet(viewsets.ModelViewSet):
serializer_class = InboxSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Inbox.objects.all()
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
fde9ceaca4cd01c3520936f567cc4c0b9da7ea2f
|
6a7058009587e78b5c758ff783410325ad7c2a4b
|
/leet/stack/removeDuplicates.py
|
7b454b38480b649f7012a1ba584510296d51d6df
|
[
"Apache-2.0"
] |
permissive
|
stacykutyepov/python-cp-cheatsheet
|
8b96b76403c501f5579befd07b3c4a4c69fe914e
|
a00a57e1b36433648d1cace331e15ff276cef189
|
refs/heads/master
| 2023-07-16T13:26:35.130763
| 2021-08-30T11:23:39
| 2021-08-30T11:23:39
| 401,442,535
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 499
|
py
|
"""
time: n
space: n
Remove All Adjacent Duplicates in String II
"""
class Solution:
def removeDuplicates(self, s: str, k: int) -> str:
stk = []
for c in s:
if stk and stk[-1][0] == c:
stk[-1][1] += 1
if stk[-1][1] >= k:
stk.pop()
else:
stk.append([c, 1])
ans = []
for c in stk:
ans.extend([c[0]] * c[1])
return "".join(ans)
|
[
"peterrlamar@gmail.com"
] |
peterrlamar@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.