blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d8e898c797f3ea91916d23af426d66afe175ce8f
|
6952e830c95a766cbcd92aa263cfebb94d0296cd
|
/FeelTheGraph/Test6.py
|
9f484d71b0a9cbe212cd8633bd35c38cd22c61ff
|
[] |
no_license
|
skianzad/MagicPen
|
b0b3bdb2a68f70ce53aac31feb1b86f5e82177e6
|
360cd956b972f9257851e89fafc0c7183f131c55
|
refs/heads/master
| 2021-10-20T10:53:30.118353
| 2021-10-13T21:17:37
| 2021-10-13T21:17:37
| 95,934,955
| 2
| 2
| null | 2021-06-16T22:33:03
| 2017-07-01T01:33:34
|
Java
|
UTF-8
|
Python
| false
| false
| 4,520
|
py
|
import numpy as np
import parser
import serial
import time
from threading import Thread,Event
from time import sleep
from bokeh.io import curdoc
from bokeh.layouts import row, widgetbox
from bokeh.models import ColumnDataSource,Div,CustomJS, Circle
from bokeh.models.widgets import Slider, TextInput
from bokeh.plotting import figure,ColumnDataSource
from bokeh.models.widgets import Button
from bokeh.models.widgets import Dropdown
from bokeh import events
# Set up data
N = 400
Del=10
Sp=250
x = np.linspace(0, 4*np.pi, N)
y = np.sin(x)
source = ColumnDataSource(data=dict(x=x, y=y))
ser=serial.Serial('COM4',19200,timeout=0.2)
# Set up plot
plot = figure(plot_height=600, plot_width=800, title="my Graph",
tools="crosshair,pan,reset,save,wheel_zoom",
x_range=[-4*np.pi, 4*np.pi], y_range=[-2.5, 2.5])
plot.line('x', 'y', source=source, line_width=3, line_alpha=0.6)
# Set up widgets
text = TextInput(title="Custom function", value='Enter f(x)')
offset = Slider(title="Offset", value=0.0, start=-5.0, end=5.0, step=0.1)
amplitude = Slider(title="Amplitude", value=1.0, start=-3.0, end=3.0, step=0.01)
Speed= Slider(title="Speed", value=250, start=100, end=250)
Delay = Slider(title="Delay", value=1, start=1, end=100)
CurveList=[("Sin","C1"),("Poly","C2"),("Abs","C3"),("inv/inf","C5"),("Frq","C6"),("Custom","C4")]
dropdown=Dropdown(label="Curve Lists",button_type="warning",menu=CurveList)
button = Button(label="Run ", button_type="success")
def update_title(attrname, old, new):
plot.title.text = text.value
x=np.linspace(-4*np.pi,4*np.pi,N)
text.on_change('value', update_title)
div = Div(width=1000)
def change_output(attr, old, new):
global Del
global Sp
Del=Delay.value
Sp=Speed.value
def display_event(div):
return CustomJS(args=dict(div=div), code="""
console.log("run")
""" )
def update_data(attrname, old, new):
# Get the current slider values
a = amplitude.value
b = offset.value
global x
global y
# Generate the new curve
if dropdown.value=='C1':
x = np.linspace(-4*np.pi, 4*np.pi, N)
y = a*np.sin(x) + b
elif dropdown.value=='C2':
x = np.linspace(-1*np.pi, 1*np.pi, N/2)
y = a*0.2*(x*x) + b
elif dropdown.value=='C3':
x = np.linspace(-2, 2, N/2)
y = a*np.abs(x) + b
elif dropdown.value=='C4':
x=np.linspace(-4*np.pi,4*np.pi,N)
eq=parser.expr(text.value).compile()
y = a*eval(eq)+ b
elif dropdown.value=='C5':
x=np.linspace(0.1,13, N)
y = 1/(a*x) + b
elif dropdown.value=='C6':
x = np.linspace(-4*np.pi, 4*np.pi, N)
y = np.sin(a*x) + b
else:
x = np.linspace(-4*np.pi, 4*np.pi, N)
y = a*np.sin(x) + b
source.data = dict(x=x, y=y)
for w in [offset, amplitude]:
w.on_change('value', update_data)
for Z in [Delay,Speed]:
Z.on_change('value',change_output)
def stop():
global flag
flag=False
print("stop the thread")
def function_to_call(attr, old, new):
print(dropdown.value)
update_data(attr, old, new)
def run():
if(ser.isOpen() == False):
ser.open()
dx=np.gradient(x)
dy=np.gradient(y)
scale=np.divide(dy,dx)
source.data = dict(x=[], y=[])
for i in range(len(x)-1):
scale=np.divide(dy[i],dx[i])
if np.abs(dy[i])>=np.abs(dx[i]):
mx_int=int((250/(np.sqrt(1+np.power(scale,2)))))
my_int=int((mx_int*scale))
else:
mx_int=int((250/(np.sqrt(1+np.power(scale,2)))))
my_int=int((mx_int*scale))
new_str = ''.join(['a',str('{:03d}'.format(my_int+256)),'b',str('{:03d}'.format(mx_int+256)),'/r','\n'])
ser.write(new_str .encode('ascii','replace'))
print(ser.readline())
sleep(Del/1000)
new_data={'x':[x[i]*(1+Del/25),x[i+1]*(1+Del/25)],'y':[y[i]*(1+Del/25),y[i+1]*(1+Del/25)]}
source.stream(new_data)
new_str = ''.join(['a',str('{:03d}'.format(256)),'b',str('{:03d}'.format(256)),'r','\n'])
ser.write(new_str .encode('ascii','replace'))
sleep(0.01)
print(ser.readline())
sleep(0.01)
ser.close()
print ("Process finished...exiting1")
button = Button(label="Run ", button_type="success")
dropdown.on_change('value', function_to_call)
button.on_click(run)
inputs = widgetbox(dropdown,text, offset, amplitude, Speed, Delay,button)
# Set up Server
curdoc().add_root(row(inputs,plot, width=1200))
curdoc().title = "Sliders"
|
[
"skianzad@cs.ubc.ca"
] |
skianzad@cs.ubc.ca
|
8dfb340638288546bff11413b78f1e901f158e1d
|
496e05014492b4bbecf9f15c40ae416c21e27a46
|
/src/outpost/django/campusonline/migrations/0013_course_group_term.py
|
f48c4ba82d8ab1981cb8ca052bd05218d9ba72cf
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
medunigraz/outpost_deprecated
|
b1ff802054c04cf989b3b660e132fa6a1c2a078c
|
bc88eaa3bb504d394fdf13f1131e40db27759c89
|
refs/heads/master
| 2022-01-23T15:46:34.859095
| 2019-05-21T08:38:11
| 2019-05-21T08:38:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,374
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-15 15:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
forward = [
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_selection_idx;
''',
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_person_idx;
''',
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_room_idx;
''',
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_timerange_idx;
''',
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_id_idx;
''',
'''
DROP MATERIALIZED VIEW IF EXISTS "public"."campusonline_coursegroupterm";
''',
'''
DROP FOREIGN TABLE IF EXISTS "campusonline"."lv_grp_term";
''',
'''
CREATE FOREIGN TABLE "campusonline"."lv_grp_term" (
LV_GRP_NR numeric,
PERS_NR numeric,
TERMIN_NR numeric,
LV_BEGINN timestamp,
LV_ENDE timestamp,
RAUM_NR numeric,
LERNEINHEIT varchar
)
SERVER sqlalchemy OPTIONS (
tablename 'LV_GRP_TERM_V',
db_url '{}'
);
'''.format(settings.MULTICORN.get('campusonline')),
'''
CREATE MATERIALIZED VIEW "public"."campusonline_coursegroupterm" AS SELECT
format('%s-%s-%s', termin_nr::integer, lv_grp_nr::integer, pers_nr::integer) AS id,
termin_nr::integer AS term,
lv_grp_nr::integer AS coursegroup_id,
pers_nr::integer AS person_id,
(lv_beginn AT TIME ZONE 'Europe/Vienna') ::timestamptz AS start,
(lv_ende AT TIME ZONE 'Europe/Vienna') ::timestamptz AS end,
raum_nr::integer AS room_id,
lerneinheit as title
FROM "campusonline"."lv_grp_term"
WITH DATA;
''',
'''
CREATE INDEX campusonline_coursegroupterm_id_idx ON "public"."campusonline_coursegroupterm" ("id");
''',
'''
CREATE INDEX campusonline_coursegroupterm_timerange_idx ON "public"."campusonline_coursegroupterm" ("start", "end");
''',
'''
CREATE INDEX campusonline_coursegroupterm_room_idx ON "public"."campusonline_coursegroupterm" ("room_id");
''',
'''
CREATE INDEX campusonline_coursegroupterm_person_idx ON "public"."campusonline_coursegroupterm" ("person_id");
''',
'''
CREATE INDEX campusonline_coursegroupterm_selection_idx ON "public"."campusonline_coursegroupterm" ("person_id", "room_id", "start", "end");
''',
]
reverse = [
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_selection_idx;
''',
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_person_idx;
''',
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_room_idx;
''',
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_timerange_idx;
''',
'''
DROP INDEX IF EXISTS campusonline_coursegroupterm_id_idx;
''',
'''
DROP MATERIALIZED VIEW IF EXISTS "public"."campusonline_coursegroupterm";
''',
'''
DROP FOREIGN TABLE IF EXISTS "campusonline"."lv_grp_term";
''',
'''
CREATE FOREIGN TABLE "campusonline"."lv_grp_term" (
LV_GRP_NR numeric,
PERS_NR numeric,
TERMIN_NR numeric,
LV_BEGINN timestamptz,
LV_ENDE timestamptz,
RAUM_NR numeric
)
SERVER sqlalchemy OPTIONS (
tablename 'LV_GRP_TERM_V',
db_url '{}'
);
'''.format(settings.MULTICORN.get('campusonline')),
'''
CREATE MATERIALIZED VIEW "public"."campusonline_coursegroupterm" AS SELECT
format('%s-%s-%s', termin_nr, lv_grp_nr, pers_nr) AS id,
termin_nr::integer AS termroom_id,
lv_grp_nr::integer AS coursegroup_id,
pers_nr::integer AS person_id,
lv_beginn AS start,
lv_ende AS end,
raum_nr::integer AS room_id
FROM "campusonline"."lv_grp_term"
WITH DATA;
''',
'''
CREATE INDEX campusonline_coursegroupterm_id_idx ON "public"."campusonline_coursegroupterm" ("id");
''',
'''
CREATE INDEX campusonline_coursegroupterm_timerange_idx ON "public"."campusonline_coursegroupterm" ("start", "end");
''',
'''
CREATE INDEX campusonline_coursegroupterm_room_idx ON "public"."campusonline_coursegroupterm" ("room_id");
''',
'''
CREATE INDEX campusonline_coursegroupterm_person_idx ON "public"."campusonline_coursegroupterm" ("person_id");
''',
'''
CREATE INDEX
campusonline_coursegroupterm_selection_idx
ON
"public"."campusonline_coursegroupterm"
(
"person_id",
"room_id",
"start",
"end"
);
''',
]
dependencies = [
('campusonline', '0012_event'),
]
operations = [
migrations.RunSQL(
forward,
reverse
)
]
|
[
"michael@fladi.at"
] |
michael@fladi.at
|
93e61bee22e5b5f112cdee09a24953ca0a9a1b57
|
cf3f8ed240aac2076cac2940f9821775e3ee09c0
|
/env.py
|
bdbaacf799cbd20b6df893866b2b4fa3b33ea9ee
|
[] |
no_license
|
IceFlameWorm/home_credit_default
|
37f4959aee8ff01606dfab8d3ca6a1faee4d6ef1
|
a84631efa643aa48113d8875d76023b104448a42
|
refs/heads/master
| 2020-03-23T05:00:12.254364
| 2018-07-25T01:53:25
| 2018-07-25T01:53:25
| 141,118,674
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,161
|
py
|
import os
PROJECT_PATH = os.path.dirname(__file__)
DATA_PATH = os.path.join(PROJECT_PATH, 'data')
ORIGINAL_DATA_PATH = os.path.join(DATA_PATH, 'original')
APPLICATION_TRAIN_CSV = os.path.join(ORIGINAL_DATA_PATH, 'application_train.csv')
APPLICATION_TEST_CSV = os.path.join(ORIGINAL_DATA_PATH, 'application_test.csv')
BUREAU_CSV = os.path.join(ORIGINAL_DATA_PATH, 'bureau.csv')
BUREAU_BALANCE_CSV = os.path.join(ORIGINAL_DATA_PATH, 'bureau_balance.csv')
CREDIT_CARD_BALANCE_CSV = os.path.join(ORIGINAL_DATA_PATH, 'credit_card_balance.csv')
HOMECREDIT_COLUMNS_DESCRIPTION_CSV = os.path.join(ORIGINAL_DATA_PATH, 'HomeCredit_columns_description.csv')
INSTALLMENTS_PAYMENTS_CSV = os.path.join(ORIGINAL_DATA_PATH, 'installments_payments.csv')
POS_CASH_BALANCE_CSV = os.path.join(ORIGINAL_DATA_PATH, 'POS_CASH_balance.csv')
PREVIOUS_APPLICATION_CSV = os.path.join(ORIGINAL_DATA_PATH, 'previous_application.csv')
SAMPLE_SUBMISSION_CSV = os.path.join(ORIGINAL_DATA_PATH, 'sample_submission.csv')
EDA_DATA_PATH = os.path.join(DATA_PATH, 'eda')
PREPROCESSING_DATA_PATH = os.path.join(DATA_PATH, 'preprocessing')
FEATURES_DATA_PATH = os.path.join(DATA_PATH, 'features')
|
[
"empty@email.com"
] |
empty@email.com
|
0669027bc64f9d1711ba58806db266d9ff983c81
|
7ec4ace3e19307329291bff17b190769d15ff4f6
|
/habitica/migrations/0002_auto_20200617_1521.py
|
3b2273df4c28d4cca9f1a8ac6d4fe6f8f7785dfb
|
[] |
no_license
|
suhaibakram/task-application
|
33aa0afd9bfcff733f47242c11cea77c6ada086a
|
b82e7d4175ef164fa96f281de8fff8e716b28715
|
refs/heads/master
| 2022-11-05T20:17:02.988726
| 2020-06-19T12:42:07
| 2020-06-19T12:42:07
| 273,490,860
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 393
|
py
|
# Generated by Django 3.0.7 on 2020-06-17 09:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('habitica', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='task',
name='datecompleted',
field=models.DateTimeField(blank=True, null=True),
),
]
|
[
"suhaibakram@gmail.com"
] |
suhaibakram@gmail.com
|
bacc3db01b5989422ee6634f4042279b4e3cf782
|
8b829db4c81e032ce488924302cf40a6028dd3c5
|
/Kernal/wsgi.py
|
6bd87853c385125f6d8e1f72054dd3f2ed9dfc1a
|
[] |
no_license
|
diamondlx/Ruaaaaaa
|
65081ea1f61fbe0c411299fffad695b8c78b7b3d
|
b89f1fcbcccf5ae2857d668436a748bb443556b2
|
refs/heads/master
| 2021-01-11T00:44:50.270601
| 2016-10-10T21:53:51
| 2016-10-10T21:53:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 392
|
py
|
"""
WSGI config for SoftEngi project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Kernal.settings")
application = get_wsgi_application()
|
[
"l-mh@qq.com"
] |
l-mh@qq.com
|
54665c086c321548e228c9b8028aae095b3792ff
|
36f820a085179dca152bdd27a2e6ec2b5b5da9e8
|
/f-string practice 1.py
|
49158bf14e04d0c76acb447eda168e4de5103e73
|
[] |
no_license
|
ThePergament/kyh-practice
|
43a53882511f42468555462b594195f84dfcfa08
|
679f6af5337b88d86eff823086edb565f3cc6fbb
|
refs/heads/master
| 2022-12-30T15:36:15.997292
| 2020-10-22T08:58:33
| 2020-10-22T08:58:33
| 291,672,259
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17
|
py
|
print(f"{5 * 5}")
|
[
"david.pergament@student.kyh.se"
] |
david.pergament@student.kyh.se
|
dff0f20dcd47ac1830b20ac7218335a2cdb76fa0
|
c73657ff5b091b46f06584a578323c9fc93caf07
|
/src/harrastuspassi/harrastuspassi/migrations/0011_create_promotion_model.py
|
e5c459eb754a9ce0fc2a915ae990d0cfe0ea7dda
|
[
"MIT"
] |
permissive
|
City-of-Helsinki/harrastuspassi-backend
|
d91e291e9e153e900d5893c527a03e7c18bc1cb5
|
b9812f1b3169e6d0c7da20224fd66d0d74ff5255
|
refs/heads/master
| 2023-04-30T11:57:51.067772
| 2022-06-06T12:17:42
| 2022-06-06T12:17:42
| 198,590,780
| 2
| 6
|
MIT
| 2023-03-31T14:35:17
| 2019-07-24T08:20:44
|
Python
|
UTF-8
|
Python
| false
| false
| 1,411
|
py
|
# Generated by Django 2.2.4 on 2019-11-08 06:33
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('harrastuspassi', '0010_category_name_not_unique'),
]
operations = [
migrations.CreateModel(
name='Promotion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated_at', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=1024)),
('description', models.TextField()),
('start_date', models.DateField(verbose_name='Start date')),
('start_time', models.TimeField(verbose_name='Start time')),
('end_date', models.DateField(verbose_name='End date')),
('end_time', models.TimeField(verbose_name='End time')),
('cover_image', models.ImageField(blank=True, null=True, upload_to='promo_images')),
('hobby', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='harrastuspassi.Hobby')),
],
options={
'abstract': False,
},
),
]
|
[
"oskari.tiala@haltu.fi"
] |
oskari.tiala@haltu.fi
|
7fae0b39fa9d2a48694f8401cc02bf46a12360c2
|
c1bd12405d244c5924a4b069286cd9baf2c63895
|
/azure-cognitiveservices-vision-face/azure/cognitiveservices/vision/face/models/group_result.py
|
5902d1d3e8c52dadc9ec67bfb1c4f19174a5209c
|
[
"MIT"
] |
permissive
|
lmazuel/azure-sdk-for-python
|
972708ad5902778004680b142874582a284a8a7c
|
b40e0e36cc00a82b7f8ca2fa599b1928240c98b5
|
refs/heads/master
| 2022-08-16T02:32:14.070707
| 2018-03-29T17:16:15
| 2018-03-29T17:16:15
| 21,287,134
| 1
| 3
|
MIT
| 2019-10-25T15:56:00
| 2014-06-27T19:40:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,281
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class GroupResult(Model):
"""An array of face groups based on face similarity.
:param groups: A partition of the original faces based on face similarity.
Groups are ranked by number of faces
:type groups: list[list[str]]
:param messy_group: Face ids array of faces that cannot find any similar
faces from original faces.
:type messy_group: list[str]
"""
_validation = {
'groups': {'required': True},
}
_attribute_map = {
'groups': {'key': 'groups', 'type': '[[str]]'},
'messy_group': {'key': 'messyGroup', 'type': '[str]'},
}
def __init__(self, groups, messy_group=None):
super(GroupResult, self).__init__()
self.groups = groups
self.messy_group = messy_group
|
[
"laurent.mazuel@gmail.com"
] |
laurent.mazuel@gmail.com
|
e59e1e28d923ae25dfe4cffdc7fce23e4c048e0a
|
2e07f6c0545164b350b39cb0c0aca12c66794c0f
|
/brain_games/scripts/brain_progression.py
|
937eee7e38dd88d161668eb9d603fb770ca57b15
|
[] |
no_license
|
nightdentist/python-project-lvl1
|
d9575c274a8c5afd17a7e6687fcdd832da5ae5c4
|
af77e68acdb670d9499a094b8534d92c5d84d89d
|
refs/heads/master
| 2023-09-03T21:36:35.325294
| 2023-09-03T13:13:11
| 2023-09-03T13:13:11
| 274,198,698
| 0
| 1
| null | 2021-07-11T14:22:12
| 2020-06-22T17:14:51
| null |
UTF-8
|
Python
| false
| false
| 149
|
py
|
#!/usr/bin/env python3
from brain_games.games.brain_progression import run_game
def main():
run_game()
if __name__ == "__main__":
main()
|
[
"sasha_2014@mail.ru"
] |
sasha_2014@mail.ru
|
b68fdf5ed28f9776e107e9fb139a9a61db938a29
|
9e4b28953346211906663006e43508ee86cfcf84
|
/hello_flask.py
|
ed90fcf50b2436243fefbed4339581b7daf8ceed
|
[
"MIT"
] |
permissive
|
Kimblebrook/hello_bears
|
3d8825d59d1db15344b337d4ef80e9b1fa888542
|
15a1ddc3a67885689ed23507a930266b4e0ae666
|
refs/heads/master
| 2021-05-04T10:11:47.468446
| 2018-02-06T21:28:02
| 2018-02-06T21:28:02
| 52,266,059
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,110
|
py
|
from flask import Flask, render_template, request, redirect
from snowplow_tracker import Emitter, Tracker
from snowplow_tracker import SelfDescribingJson
app = Flask(__name__)
email_addresses = []
e = Emitter("localhost:8080")
t = Tracker(e, namespace="python", app_id="hello_bears")
@app.route('/emails', methods=['GET'])
def emails():
t.track_self_describing_event(SelfDescribingJson(
"iglu:com.hellobears/email_addresses_viewed/jsonschema/1-0-0",
{
"test": "stewart"
}
))
return render_template('emails.html', email_addresses=email_addresses)
@app.route('/signup', methods=['POST'])
def signup():
email = request.form['email']
email_addresses.append(email)
t.track_self_describing_event(SelfDescribingJson(
"iglu:com.hellobears/email_address_submitted/jsonschema/1-0-0",
{
"email_address": email
}
))
return redirect('/')
@app.route('/')
def hello_world():
t.track_page_view("www.hellobears.com", "Index")
return render_template('index.html')
if __name__ == '__main__':
app.run()
|
[
"stewart.duncan@simplybusiness.co.uk"
] |
stewart.duncan@simplybusiness.co.uk
|
583004daaa7b7a7ef88a665bbefb063dfe271824
|
94677e1d12cf07307c59eba4a82c9deebbbcb2f6
|
/currencyAtlas.py
|
09a27417f30795b363b5c8cb20d87a1de9a89644
|
[] |
no_license
|
jonnyf89/AirlineManagementSystem
|
aa244f9257f69e0fcd7be5c6c877b9068680685f
|
1c54d0b833bc05008e8d6aa784e7967cfd901584
|
refs/heads/master
| 2021-05-15T03:14:07.178179
| 2017-10-04T16:37:10
| 2017-10-04T16:37:10
| 105,790,351
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,782
|
py
|
'''CurrencyAtlas class holds information on currencies'''
'''it takes a csv file as an input and uses this to populate a dictionary object with relevant data about each currency'''
import csv
from currency import Currency
class CurrencyAtlas:
__currencyDict = {} #this is the currency object that we will populate with aircraft data
def __init__(self, csvFile): #the constructor method has the loadData method as an attribute
self.loadData(csvFile)
def loadData(self, fileName): #the loadData method loads the dictionary object with aircraft data. It does not need to be called
try: #as it is invoked automatically by the constructor.
with open(fileName, errors = 'ignore') as csvfile: #I use 'errors = 'ignore'' here because it is more effective than a try catch
reader = csv.DictReader(csvfile) #Please see note 1 in supporting document
for row in reader:
self.__currencyDict[row['code']] = Currency(row['exchangeRate'], row['name'])
except FileNotFoundError:
print("No CSV file found.")
def getCurrency(self, code): #this method returns the dictionary entry associated with the provided code
return self.__currencyDict[code]
def showAll(self): #returns the entire contents of the __currencyDict
return self.__currencyDict
def getExch(self, code): #returns the exchange rate for a currency based on a given currnecy code.
currency1 = self.getCurrency(code)
return currency1.exchangeRate
|
[
"jonathan.gorselodge@gmail.com"
] |
jonathan.gorselodge@gmail.com
|
df623888c31304f5867fa8b6f810794da0acaf65
|
00f51ffb9cfe1785871269aee5b12be0b692763a
|
/neuralnetwork/models.py
|
f992c6f56b22c1cd7381a9bbd421f97fee61943b
|
[] |
no_license
|
ailiasi/Drafter
|
c46445ea250f8d8c21a85b8ea0afdf13e53a3e0c
|
821d2c8b4b8896d61c745faf1c2b998254ddc300
|
refs/heads/master
| 2020-04-22T21:09:23.943553
| 2019-03-04T12:33:15
| 2019-03-04T12:33:15
| 170,664,308
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,391
|
py
|
import keras
from keras.layers import Input, Dense, Dropout
from keras.models import Model
from keras import regularizers
def siamese_model(input1_shape, input2_shape,
output_nodes,
num_siam_layers, num_siam_nodes,
num_hid_layers, num_hid_nodes,
dropout=0, regularization =0):
# TODO: add dropout
input_a = Input(shape = input1_shape, name = "input_a")
input_b = Input(shape = input2_shape, name = "input_b")
if num_siam_layers > 0:
siamese_layer = Dense(num_siam_nodes, activation = 'relu', kernel_regularizer = regularizers.l1(regularization), name = "siamese_0")
hidden_a = siamese_layer(input_a)
hidden_b = siamese_layer(input_b)
for i in range(1, num_siam_layers):
siamese_layer = Dense(num_siam_nodes, activation = 'relu', kernel_regularizer = regularizers.l1(regularization), name = "siamese_" + str(i))
hidden_a = siamese_layer(hidden_a)
hidden_b = siamese_layer(hidden_b)
merged_layer = keras.layers.concatenate([hidden_a, hidden_b], axis = -1, name = "merged") # Why is the default axis -1?
for i in range(num_hid_layers):
merged_layer = Dense(num_hid_nodes, activation = 'relu', kernel_regularizer = regularizers.l1(regularization), name = "hidden_" + str(i))(merged_layer)
output = Dense(output_nodes, activation = 'softmax', name = "output")(merged_layer)
model = Model(inputs=[input_a, input_b], outputs=output)
model.compile(optimizer = 'adam', loss='binary_crossentropy', metrics = ['accuracy'])
return model
def simple_model(input_shape, output_nodes, num_hid_layers, num_hid_nodes, dropout = 0, regularization = 0):
input_layer = Input(shape = input_shape, name = "input")
hidden_layer = input_layer
for i in range(0,num_hid_layers):
hidden_layer = Dense(num_hid_nodes, activation = 'relu', kernel_regularizer = regularizers.l1(regularization), name = "hidden_" + str(i))(hidden_layer)
hidden_layer = Dropout(dropout, name = "dropout_" + str(i))(hidden_layer)
output = Dense(output_nodes, activation = 'softmax', name = "output")(hidden_layer)
model = Model(inputs=input_layer, outputs = output)
model.compile(optimizer = 'adam', loss='binary_crossentropy', metrics = ['accuracy'])
return model
|
[
"aili.asikainen92@gmail.com"
] |
aili.asikainen92@gmail.com
|
87575ea8f47f2baef66edf756f50578d54f9d9b1
|
03969015ab882f4751dc0e91beeda1212babca48
|
/robot_code/Nimbus_ws/build/kinova-ros/kinova_moveit/robot_configs/j2s7s300_moveit_config/catkin_generated/pkg.develspace.context.pc.py
|
6e682d56813b11fdfaeba0a3081955bd2437a094
|
[] |
no_license
|
lnairGT/Thesis_code
|
f3ad57f4344691227dcd128a741eb9c0e937738e
|
6f5dbfc2510272f294a0e9bb4273beceeacbff2a
|
refs/heads/master
| 2023-03-17T21:43:56.320553
| 2020-09-26T16:05:31
| 2020-09-26T16:05:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 384
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "j2s7s300_moveit_config"
PROJECT_SPACE_DIR = "/home/lnair3/Nimbus_ws/devel"
PROJECT_VERSION = "0.2.0"
|
[
"lnair3@gatech.edu"
] |
lnair3@gatech.edu
|
b36e5ba6704b5848181977ee4ee4422bac400451
|
fb377465aaf1fa4e90bd659e58fdc71ad19a5b45
|
/scripts/ROS Simulation/test/input.py
|
6c988fab059445b415ae36a38815d30a4e5251bd
|
[] |
no_license
|
MohamedAlaaEldinFarghaly/DeepRL-Autonomous-Vehicle-ROS
|
1a469b3086804a7d8dbf50856fa32d5fb7a24315
|
ab9baf0454122b8058340a291fe2c030b02e159f
|
refs/heads/master
| 2021-05-21T22:44:03.434289
| 2020-04-03T20:56:28
| 2020-04-03T20:56:28
| 252,839,624
| 0
| 0
| null | 2020-04-03T20:54:31
| 2020-04-03T20:54:31
| null |
UTF-8
|
Python
| false
| false
| 370
|
py
|
import numpy as np
with open('W9.txt', "r") as ins:
for line in ins:
items = line.split(",")
tmp=np.asarray(items)
ttmp = tmp.astype(np.float)
W7_txt = ttmp.reshape(1, 10)
with open('W1.txt', "r") as ins:
for line in ins:
items = line.split(",")
tmp=np.asarray(items)
ttmp = tmp.astype(np.float)
W1_txt = ttmp.reshape(5,5,1,24)
print(type(W1_txt[0][0][0][0]))
|
[
"noreply@github.com"
] |
MohamedAlaaEldinFarghaly.noreply@github.com
|
dc9b9f5a1b09050d49ef8368e52933489708309b
|
0c575380b862ef3b7e06b57bd923cae589c042a6
|
/python/problem11.py
|
e6ed18f07536b0cdfc8c936506195e581e0a81a6
|
[] |
no_license
|
mawaldne/projecteuler
|
9e6bbef2b49fd7a37b1f33c6e4a9868f37606fe5
|
b9f158abd9dec4d54e6c0f17ca5e554a0a31c6c4
|
refs/heads/master
| 2020-08-05T00:58:27.950931
| 2015-05-23T21:03:26
| 2015-05-23T21:03:26
| 1,083,702
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,328
|
py
|
from operator import mul
from functools import reduce
data = open('problem11.input').readlines()
maxnums = [];
#horizontal
for n in data:
n = n.split(" ");
nos = [int(c) for c in n];
maxnum = max([reduce(mul, nos[i:i+4]) for i in range(len(nos)+1-4)])
maxnums.append(maxnum)
#vertical
rows = 20
a = [[] for j in range(rows)]
for n in data:
n = n.split(" ");
nos = [int(c) for c in n];
for i in range(len(nos)):
a[i].append(nos[i]);
for nos in a:
if (len(nos) >= 4):
maxnum = max([reduce(mul, nos[i:i+4]) for i in range(len(nos)+1-4)])
maxnums.append(maxnum)
#diagonally right
rows = 19
a = [[] for j in range(rows)]
for n in data:
n = n.split(" ");
nos = [int(c) for c in n];
for i in range(len(nos)):
if (i == 0):
a.insert(0, []);
a[i].append(nos[i]);
for nos in a:
if (len(nos) >= 4):
maxnum = max([reduce(mul, nos[i:i+4]) for i in range(len(nos)+1-4)])
maxnums.append(maxnum)
#diagonally left
rows = 19
a = [[] for j in range(rows)]
for n in data:
n = n.split(" ");
nos = [int(c) for c in n];
nos = nos[::-1]; #reverse each line and do the same
for i in range(len(nos)):
if (i == 0):
a.insert(0, []);
a[i].append(nos[i]);
for nos in a:
if (len(nos) >= 4):
maxnum = max([reduce(mul, nos[i:i+4]) for i in range(len(nos)+1-4)])
maxnums.append(maxnum)
print(max(maxnums))
|
[
"mike.waldner@gmail.com"
] |
mike.waldner@gmail.com
|
d7d7fdfe8c4897c3966327426b5d757e48d83d63
|
a39a06bd11832e1d8fda45e3905042a2a230f17c
|
/user/migrations/0007_publishersettings.py
|
10b054dfd1a45322750bbf20aab59c6298784821
|
[] |
no_license
|
yugeshk/event-notifications
|
a33abf3d643857b02a8d5d932a693843a58c90d6
|
6879d796b9e1865fdd45a823f2f732aaa0789816
|
refs/heads/master
| 2021-07-24T14:01:19.253174
| 2017-11-06T18:32:02
| 2017-11-06T18:32:02
| 105,417,889
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 866
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-09 15:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('user', '0006_usersettings'),
]
operations = [
migrations.CreateModel(
name='publisherSettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('displayName', models.CharField(max_length=100)),
('website', models.CharField(max_length=100)),
('contentType', models.CharField(max_length=100)),
('profileId', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user.baseUser')),
],
),
]
|
[
"yugesh.kothari@gmail.com"
] |
yugesh.kothari@gmail.com
|
d3d6a6dd4e5a0e01011c19d53eb94ff46405d565
|
2b7122d25339155f33efb2f742c1ed8c0f324e4d
|
/fifo_return_and_transfer/models/transfer_over_locations_lot_lines.py
|
79dba89835332ecc3b0cf44e021089fc7b134ba8
|
[] |
no_license
|
mohamed-helmy/business_land
|
4534678883aae0b4ba713e0a3253264310f76585
|
0dfad1783183fe52e603dc1cd168241f62d97dd2
|
refs/heads/master
| 2022-04-15T17:25:16.661766
| 2020-04-15T12:29:50
| 2020-04-15T12:29:50
| 255,908,150
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,445
|
py
|
# -*- coding: utf-8 -*-
import os
import base64
import logging
from odoo import models, fields, api, _
from odoo.exceptions import ValidationError, UserError
_logger = logging.getLogger(__name__)
class TransferOverLocationsLines(models.Model):
_name = 'transfer.over.locations.lines'
product_id = fields.Many2one('product.product', string="Product", required=True)
product_tracking = fields.Selection(related="product_id.tracking")
quantity = fields.Float(default=1.0)
transfer_id = fields.Many2one(comodel_name="transfer.over.locations", required=True, ondelete='cascade')
lot_ids = fields.One2many(comodel_name="transfer.over.locations.lot.lines", inverse_name="line_id",
string="Lots/Serials")
file_import = fields.Binary("Import 'csv' File",
help="*Import a list of lot/serial numbers from a csv file \n *Only csv files is allowed"
"\n *The csv file must contain a row header namely 'Serial Number'")
file_name = fields.Char("file name")
# importing "csv" file and appending the datas from file to transfer lines
@api.multi
def input_file(self):
if self.file_import:
file_value = self.file_import.decode("utf-8")
filename, FileExtension = os.path.splitext(self.file_name)
if FileExtension != '.csv':
raise UserError("Invalid File! Please import the 'csv' file")
data_list = []
input_file = base64.b64decode(file_value)
lst = []
for loop in input_file.decode("utf-8").split("\n"):
lst.append(loop)
if 'Serial Number' not in lst[0]:
raise UserError('Row header name "Serial Number" is not found in CSV file')
lst_index = lst[0].replace('\r', '').split(',').index("Serial Number")
lst.pop(0)
for vals in lst:
lst_r = []
for value in vals.split(','):
lst_r.append(value)
if vals and lst_r:
data = self.env['stock.production.lot'].search(
[('product_id', '=', self.product_id.id), ('name', '=', lst_r[lst_index].replace('\r', ''))])
# conditions based on unique serial number
if not data:
raise UserError(
_('Serial Number %s does not belong to product - "%s".') % (
str(vals), self.product_id.name))
data_list.append((0, 0, {'lot_id': data.id,
'quantity': 1,
'line_id': self.id,
'product_id': self.product_id.id,
}))
if self.lot_ids:
self.lot_ids.unlink()
self.lot_ids = data_list
else:
raise UserError("Invalid File! Please import the 'csv' file")
# view reference for lot_ids
return {
'name': _('Serials/Lots'),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'transfer.over.locations.lines',
'views': [(self.env.ref('fifo_return_and_transfer.transfer_over_locations_lines_form').id, 'form')],
'target': 'new',
'res_id': self.id,
}
def action_show_lots(self):
return {
'name': _('Serials/Lots'),
'type': 'ir.actions.act_window',
'view_type': 'tree',
'view_mode': 'tree, form',
'views': [(self.env.ref('fifo_return_and_transfer.transfer_over_locations_lines_form').id, 'form')],
'res_model': 'transfer.over.locations.lines',
'target': 'new',
'res_id': self.id,
}
class TransferOverLocationsLotLines(models.Model):
_name = 'transfer.over.locations.lot.lines'
lot_id = fields.Many2one(comodel_name="stock.production.lot", string="Lot/Serial", required=True)
line_id = fields.Many2one(comodel_name="transfer.over.locations.lines", required=True, ondelete='cascade')
product_id = fields.Many2one(comodel_name="product.product", related="line_id.product_id", readonly=True)
quantity = fields.Float(default=1.0)
|
[
"helmy419@gmail.com"
] |
helmy419@gmail.com
|
6875ed7f1a4016c5bfb9400aa55a125b509c46ec
|
692eceac2533150b86aa173b451698b7a12ff735
|
/PycharmProjects/AutoPackageForiOS/venv/bin/pyrcc5
|
5c54f39fda0bbb0eac69a225525e8372d8500495
|
[] |
no_license
|
linlufeng/LufengLearnPython
|
cb74f34926663dc9b7d4d6789e6e7e044dd73db3
|
bedcbf4fea6d048a3903a623a4386ac5d484a70d
|
refs/heads/master
| 2022-09-12T22:14:19.243757
| 2022-08-25T02:54:13
| 2022-08-25T02:54:13
| 200,183,327
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 301
|
#!/Users/lufenglin/Documents/git/LufengLearnPython/PycharmProjects/AutoPackageForiOS/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from PyQt5.pyrcc_main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"yuzhibo_8486@qq.com"
] |
yuzhibo_8486@qq.com
|
|
5b0ac47bd8ea13361467fd9f3835b9eaa5426b9a
|
6547262baa89554b8e98a697302c74e84852f88a
|
/server/openslides/assignments/migrations/0023_assignmentpoll_change_fields_2.py
|
492ac18ce6907eea05b544a75f769a90a0dec5c3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
FinnStutzenstein/OpenSlides
|
f957ec63fd5224e9fce1a3ea0ea24a0cf5cf7454
|
7dc35dce404339b41c7729eb3de29010ca63f9a0
|
refs/heads/master
| 2021-11-26T21:19:42.408206
| 2021-07-15T08:38:26
| 2021-07-15T08:38:26
| 64,663,189
| 0
| 0
|
MIT
| 2020-08-18T10:52:58
| 2016-08-01T11:58:39
|
TypeScript
|
UTF-8
|
Python
| false
| false
| 529
|
py
|
# Generated by jsangmeister on 2021-03-22 12:44
from django.db import migrations
from ...poll.migrations.poll_migration_helper import (
calculate_vote_fields,
set_is_pseudoanonymized,
)
class Migration(migrations.Migration):
dependencies = [
("assignments", "0022_assignmentpoll_change_fields_1"),
]
operations = [
migrations.RunPython(set_is_pseudoanonymized("assignments", "AssignmentPoll")),
migrations.RunPython(calculate_vote_fields("assignments", "AssignmentPoll")),
]
|
[
"joshua.sangmeister@gmail.com"
] |
joshua.sangmeister@gmail.com
|
06bd436df6b2f75483f574c29f1c39df8fbfe3c1
|
9d07c1343412a6e426724e92b451c4313a6f729b
|
/ranker/ESLinearRanker.py
|
a3d350b0db5c51b91a802b1f151b83a3c16d8dcb
|
[] |
no_license
|
hscells/OLTR
|
793f3879fbecd5072e157ad875346c0a4e4fccec
|
1a305ab7a4219737b673aca597a70fd4f634e8d8
|
refs/heads/master
| 2020-11-25T17:22:12.503247
| 2019-12-18T07:17:16
| 2019-12-18T07:17:16
| 228,771,699
| 0
| 0
| null | 2019-12-18T06:17:46
| 2019-12-18T06:17:45
| null |
UTF-8
|
Python
| false
| false
| 1,702
|
py
|
from ranker.COLTRLinearRanker import COLTRLinearRanker
import numpy as np
class ESLinearRanker(COLTRLinearRanker):
def __init__(self, num_features, learning_rate, sigma, tau, gamma,
learning_rate_decay=1, random_initial=True):
super().__init__(num_features, learning_rate, 1, tau, gamma, learning_rate_decay, random_initial)
self.sigma = sigma
def sample_random_vectors(self, n):
random_vectors = np.random.randn(n, self.num_features) * self.sigma
return random_vectors
def sample_canditate_rankers(self, unit_vectors):
new_weights = self.weights + unit_vectors
return new_weights
def get_SNIPS(self, canditate_rankers, record):
current_ranker = self.weights
all_ranker = np.vstack((current_ranker, canditate_rankers)) # all rankers weights
query = record[0]
result_list = record[1]
click_label = record[2]
log_weight = np.array(record[3])
doc_indexes = [np.where(self.docid_list == i)[0][0] for i in result_list]
scores = np.dot(self.feature_matrix, all_ranker.T)
log_score = np.dot(self.feature_matrix, log_weight.T)
propensities = self.softmax(scores)[doc_indexes]
log_propensity = self.softmax(log_score)[doc_indexes]
log_propensity = log_propensity.reshape(len(result_list), 1)
SNIPS = self.compute_SNIPS(log_propensity, propensities, click_label)
winners = np.where(SNIPS < SNIPS[0])[0]
# IPS = self.compute_IPS(log_propensity, propensities, click_label)
# winners = np.where(IPS < IPS[0])[0]
if len(winners) == 0:
return None
return SNIPS * -1
|
[
"s4416495@gs640-7539.eait.uq.edu.au"
] |
s4416495@gs640-7539.eait.uq.edu.au
|
f315d6ed32378aad77511b5cccf9a2e5ba9196d9
|
2b494a59a440ffd9254ce5123a19c6b530e05046
|
/FileGen.py
|
ef000e40399c39fcdb8a8cf056a722830fe0ba63
|
[] |
no_license
|
doofusdavid/CSC507_PortfolioProject
|
23ced3ed7a749183f0c3c1ef7d59ae9bc1ed41e2
|
96e4b32f741224cc1188f2e68acc5783264c5477
|
refs/heads/main
| 2023-06-11T12:35:07.792653
| 2021-07-04T05:47:04
| 2021-07-04T05:47:04
| 381,175,136
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 401
|
py
|
import random
def createFile(file_name, filesize):
with open(file_name, 'w') as file:
for i in range(filesize):
if i == filesize - 1:
file.write(str(random.randint(0, 32767)))
else:
file.write(str(random.randint(0, 32767)) + '\n')
filesize = 1000000000
createFile("hugefile1.txt", filesize)
createFile("hugefile2.txt", filesize)
|
[
"david.edwards@gmail.com"
] |
david.edwards@gmail.com
|
e4696b685242ca20a4308f4347f3e862d92b0812
|
83092db43b4c9d814d7b7703fcf4ff8118197eef
|
/Extracao_de_pico.py
|
f469b9cc4776bc36eb0b2b3b45f94edb917332ad
|
[] |
no_license
|
AnaFrozza/TCC
|
4d3237feb35bdb5c67e1c18e6c3027c057ffde3e
|
e014addf65737b580fa00e59429b017ecb7246d6
|
refs/heads/master
| 2020-03-17T12:09:04.811103
| 2019-05-15T06:18:53
| 2019-05-15T06:18:53
| 133,576,362
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,817
|
py
|
# -*- coding: utf-8 -*-
import matplotlib
import seaborn
import numpy, scipy, IPython.display as ipd, matplotlib.pyplot as plt
import librosa, librosa.display
plt.rcParams['figure.figsize'] = (14, 8)
# Exibe sinal CQT
def exibe_cqt(x, sr):
cqt = librosa.cqt(x, sr=sr, n_bins=(n_octaves * bins_per_octave), bins_per_octave=bins_per_octave)
log_cqt = librosa.amplitude_to_db(cqt)
# librosa.display.specshow(abs(cqt), sr=sr, x_axis='time', y_axis='cqt_note')
print(cqt.shape)
return log_cqt
# Espctrograma
def expectrograma(x, log_cqt, sr, bins_per_octave):
librosa.display.specshow(log_cqt, sr=sr, x_axis='time', y_axis='cqt_note', bins_per_octave=bins_per_octave)
print("\n")
plt.title('Tom de cada nota')
plt.show()
# plt.colorbar(format='%2.0f dB')
# Envelope de forca
def envelope(x, sr, hop_length):
print("Envelope da força inical")
onset_env = librosa.onset.onset_strength(x, sr=sr, hop_length=hop_length)
print(onset_env)
# plt.xlim(0, len(onset_env)) -> (0, 11210)
# Posicao estimada dos onsets
def posicao_onsets(x, sr, hop_length):
print("Posicao estimada")
onset_samples = librosa.onset.onset_detect(x, sr=sr, units='samples', hop_length=hop_length, backtrack=False, pre_max=20, post_max=20, pre_avg=100, post_avg=100, delta=0.2, wait=0)
print(onset_samples)
return onset_samples
# Concatena
def concatena(x, sr, onset_samples):
print("Concatena onsets")
onset_boundaries = numpy.concatenate([[0], onset_samples, [len(x)]])
# print(onset_boundaries)
return onset_boundaries
# Tempo dos onsets em seg
def tempo_onsets(sr, onset_boundaries):
print("Tempo dos onsets")
onset_times = librosa.samples_to_time(onset_boundaries, sr=sr)
print(onset_times)
return onset_times
# Forma da onda
def forma_onda(x, sr, onset_times):
librosa.display.waveplot(x, sr=sr)
plt.vlines(onset_times, -1, 1, color='r')
plt.title('Amplitude da forma da onda')
def estimate_pitch(segment, sr, fmin=50.0, fmax=2000.0):
# Computa a autocorrelação do segmento de entrada.
r = librosa.autocorrelate(segment)
# Defini os limites inferiores e superiores para o argmax de autocorrelação.
i_min = sr/fmax
i_max = sr/fmin
r[:int(i_min)] = 0
r[int(i_max):] = 0
# Encontra a localização da autocorrelação máxima.
i = r.argmax()
f0 = float(sr)/i
return f0
def generate_sine(f0, sr, n_duration):
n = numpy.arange(n_duration)
return 0.2*numpy.sin(2*numpy.pi*f0*n/float(sr))
def estimate_pitch_and_generate_sine(x, onset_samples, i, sr):
n0 = onset_samples[i]
n1 = onset_samples[i+1]
f0 = estimate_pitch(x[n0:n1], sr)
return generate_sine(f0, sr, n1-n0)
def concatena_sintetizado(x, sr, onset_boundaries):
print("Tom de cada nota sintetizado")
y = numpy.concatenate([
estimate_pitch_and_generate_sine(x, onset_boundaries, i, sr=sr)
for i in range(len(onset_boundaries)-1)
])
# ipd.Audio(y, rate=sr)
cqt = librosa.cqt(y, sr=sr)
librosa.display.specshow(abs(cqt), sr=sr, x_axis='time', y_axis='cqt_note')
plt.title('Notas sintetizadas')
plt.show()
if __name__ == '__main__':
from sys import argv
filename = argv[1]
x, sr = librosa.load(filename)
hop_length = 100
bins_per_octave = 12 * 3
n_octaves = 7
# ipd.Audio(x, rate=sr)
log_cqt = exibe_cqt(x, sr)
expectrograma(x, log_cqt, sr, bins_per_octave)
envelope(x, sr, hop_length)
onset_samples = posicao_onsets(x, sr, hop_length)
onset_boundaries = concatena(x, sr, onset_samples)
onset_times = tempo_onsets(sr, onset_boundaries)
forma_onda(x, sr, onset_times)
concatena_sintetizado(x, sr, onset_boundaries)
print("\nConcluido!")
|
[
"annafrozza@gmail.com"
] |
annafrozza@gmail.com
|
da40d2299ca6a29dba2c1bd7aa94ff0fe0103f3b
|
824e4f31870c3efb7620dba94889dd80f84dbd5d
|
/personalizacion/__manifest__.py
|
cc9cf10ee9008e5385a6246faca395e694f9bd8d
|
[] |
no_license
|
sistemasedy/test-odoo
|
a7f018da34c00fa34454fcb33506f88cd3c99ff4
|
1d04a405b446b64e3bdbf2bdb18e1cd48ebde450
|
refs/heads/master
| 2020-03-28T03:50:48.453973
| 2018-11-21T18:18:28
| 2018-11-21T18:18:28
| 147,675,500
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 833
|
py
|
# -*- coding: utf-8 -*-
{
'name': "Personalizacion",
'summary': """Personalizacion a medida para sistemas""",
'description': """
Desarrollo a medida
""",
'author': "Em Software",
'website': "http://www.emsoftware.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/odoo/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base','account','sale','product'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'views/views.xml',
'views/templates.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/demo.xml',
],
}
|
[
"serviciosedy@gmail.com"
] |
serviciosedy@gmail.com
|
5880c0827ea66934508a726be8872889a8a495b6
|
64bf39b96a014b5d3f69b3311430185c64a7ff0e
|
/intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/f5networks/f5_modules/plugins/modules/bigip_ipsec_policy.py
|
6465a35577db1b7ad970a373ef3d7dce243d5781
|
[
"MIT",
"GPL-3.0-only"
] |
permissive
|
SimonFangCisco/dne-dna-code
|
7072eba7da0389e37507b7a2aa5f7d0c0735a220
|
2ea7d4f00212f502bc684ac257371ada73da1ca9
|
refs/heads/master
| 2023-03-10T23:10:31.392558
| 2021-02-25T15:04:36
| 2021-02-25T15:04:36
| 342,274,373
| 0
| 0
|
MIT
| 2021-02-25T14:39:22
| 2021-02-25T14:39:22
| null |
UTF-8
|
Python
| false
| false
| 24,128
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: bigip_ipsec_policy
short_description: Manage IPSec policies on a BIG-IP
description:
- Manage IPSec policies on a BIG-IP device.
version_added: "1.0.0"
options:
name:
description:
- Specifies the name of the IPSec policy.
type: str
required: True
description:
description:
- Description of the policy
type: str
protocol:
description:
- Specifies the IPsec protocol.
- Options include ESP (Encapsulating Security Protocol) or AH (Authentication Header).
type: str
choices:
- esp
- ah
mode:
description:
- Specifies the processing mode.
- When C(transport), specifies a mode that encapsulates only the payload (adding
an ESP header, trailer, and authentication tag).
- When C(tunnel), specifies a mode that includes encapsulation of the header as
well as the payload (adding a new IP header, in addition to adding an ESP header,
trailer, and authentication tag). If you select this option, you must also
provide IP addresses for the local and remote endpoints of the IPsec tunnel.
- When C(isession), specifies the use of iSession over an IPsec tunnel. To use
this option, you must also configure the iSession endpoints with IPsec in the
Acceleration section of the user interface.
- When C(interface), specifies the IPsec policy can be used in the tunnel
profile for network interfaces.
type: str
choices:
- transport
- interface
- isession
- tunnel
tunnel_local_address:
description:
- Specifies the local endpoint IP address of the IPsec tunnel.
- This parameter is only valid when C(mode) is C(tunnel).
type: str
tunnel_remote_address:
description:
- Specifies the remote endpoint IP address of the IPsec tunnel.
- This parameter is only valid when C(mode) is C(tunnel).
type: str
encrypt_algorithm:
description:
- Specifies the algorithm to use for IKE encryption.
type: str
choices:
- none
- 3des
- aes128
- aes192
- aes256
- aes-gmac256
- aes-gmac192
- aes-gmac128
- aes-gcm256
- aes-gcm192
- aes-gcm256
- aes-gcm128
route_domain:
description:
- Specifies the route domain, when C(interface) is selected for the C(mode) setting.
type: int
auth_algorithm:
description:
- Specifies the algorithm to use for IKE authentication.
type: str
choices:
- sha1
- sha256
- sha384
- sha512
- aes-gcm128
- aes-gcm192
- aes-gcm256
- aes-gmac128
- aes-gmac192
- aes-gmac256
ipcomp:
description:
- Specifies whether to use IPComp encapsulation.
- When C(none), specifies IPComp is disabled.
- When C(deflate), specifies IPComp is enabled and uses the Deflate
compression algorithm.
type: str
choices:
- none
- "null"
- deflate
lifetime:
description:
- Specifies the length of time before the IKE security association expires,
in minutes.
type: int
kb_lifetime:
description:
- Specifies the length of time before the IKE security association, in kilobytes.
expires.
type: int
perfect_forward_secrecy:
description:
- Specifies the Diffie-Hellman group to use for IKE Phase 2 negotiation.
type: str
choices:
- none
- modp768
- modp1024
- modp1536
- modp2048
- modp3072
- modp4096
- modp6144
- modp8192
ipv4_interface:
description:
- When C(mode) is C(interface), indicates if the IPv4 C(any) address should be used.
By default C(BIG-IP) assumes C(any6) address for tunnel addresses when C(mode) is C(interface).
- This option takes effect only when C(mode) is set to C(interface).
type: bool
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
state:
description:
- When C(present), ensures the resource exists.
- When C(absent), ensures the resource is removed.
type: str
choices:
- present
- absent
default: present
extends_documentation_fragment: f5networks.f5_modules.f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a IPSec policy
bigip_ipsec_policy:
name: policy1
mode: tunnel
tunnel_local_address: 1.1.1.1
tunnel_remote_address: 2.2.2.
auth_algorithm: sha1
encrypt_algorithm: 3des
protocol: esp
perfect_forward_secrecy: modp1024
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
auth_algorithm:
description: The new IKE Phase 2 Authentication Algorithm value.
returned: changed
type: str
sample: sha512
encrypt_algorithm:
description: The new IKE Phase 2 Encryption Algorithm value.
returned: changed
type: str
sample: aes256
lifetime:
description: The new IKE Phase 2 Lifetime value.
returned: changed
type: int
sample: 1440
kb_lifetime:
description: The new IKE Phase 2 KB Lifetime value.
returned: changed
type: int
sample: 0
perfect_forward_secrecy:
description: The new IKE Phase 2 Perfect Forward Secrecy value.
returned: changed
type: str
sample: modp2048
tunnel_local_address:
description: The new Tunnel Local Address value.
returned: changed
type: str
sample: 1.2.2.1
tunnel_remote_address:
description: The new Tunnel Remote Address value.
returned: changed
type: str
sample: 2.1.1.2
mode:
description: The new Mode value.
returned: changed
type: str
sample: tunnel
protocol:
description: The new IPsec Protocol value.
returned: changed
type: str
sample: ah
ipcomp:
description: The new IKE Phase 2 IPComp value.
returned: changed
type: str
sample: deflate
description:
description: The new description value.
returned: changed
type: str
sample: My policy
route_domain:
description: The new Route Domain value when in Tunnel mode.
returned: changed
type: int
sample: 2
'''
from datetime import datetime
from ansible.module_utils.basic import (
AnsibleModule, env_fallback
)
from ..module_utils.bigip import F5RestClient
from ..module_utils.common import (
F5ModuleError, AnsibleF5Parameters, transform_name, f5_argument_spec, flatten_boolean
)
from ..module_utils.compare import cmp_str_with_none
from ..module_utils.icontrol import tmos_version
from ..module_utils.teem import send_teem
class Parameters(AnsibleF5Parameters):
api_map = {
'ikePhase2AuthAlgorithm': 'auth_algorithm',
'ikePhase2EncryptAlgorithm': 'encrypt_algorithm',
'ikePhase2Lifetime': 'lifetime',
'ikePhase2LifetimeKilobytes': 'kb_lifetime',
'ikePhase2PerfectForwardSecrecy': 'perfect_forward_secrecy',
'tunnelLocalAddress': 'tunnel_local_address',
'tunnelRemoteAddress': 'tunnel_remote_address',
}
api_attributes = [
'ikePhase2AuthAlgorithm',
'ikePhase2EncryptAlgorithm',
'ikePhase2Lifetime',
'ikePhase2LifetimeKilobytes',
'ikePhase2PerfectForwardSecrecy',
'tunnelLocalAddress',
'tunnelRemoteAddress',
'mode',
'protocol',
'ipcomp',
'description',
]
returnables = [
'auth_algorithm',
'encrypt_algorithm',
'lifetime',
'kb_lifetime',
'perfect_forward_secrecy',
'tunnel_local_address',
'tunnel_remote_address',
'mode',
'protocol',
'ipcomp',
'description',
'route_domain',
]
updatables = [
'auth_algorithm',
'encrypt_algorithm',
'lifetime',
'kb_lifetime',
'perfect_forward_secrecy',
'tunnel_local_address',
'tunnel_remote_address',
'mode',
'protocol',
'ipcomp',
'description',
'route_domain',
]
@property
def tunnel_local_address(self):
if self._values['tunnel_local_address'] is None:
return None
result = self._values['tunnel_local_address'].split('%')[0]
return result
@property
def tunnel_remote_address(self):
if self._values['tunnel_remote_address'] is None:
return None
result = self._values['tunnel_remote_address'].split('%')[0]
return result
class ApiParameters(Parameters):
@property
def description(self):
if self._values['description'] in [None, 'none']:
return None
return self._values['description']
@property
def encrypt_algorithm(self):
if self._values['encrypt_algorithm'] is None:
return None
elif self._values['encrypt_algorithm'] == 'null':
return 'none'
return self._values['encrypt_algorithm']
@property
def route_domain(self):
if self._values['tunnel_local_address'] is None and self._values['tunnel_remote_address'] is None:
return None
elif self._values['tunnel_local_address'] is None and self._values['tunnel_remote_address'] is not None:
if self._values['tunnel_remote_address'] == 'any6':
result = 'any6'
elif self._values['tunnel_remote_address'] == 'any':
result = 'any'
else:
result = int(self._values['tunnel_remote_address'].split('%')[1])
elif self._values['tunnel_remote_address'] is None and self._values['tunnel_local_address'] is not None:
if self._values['tunnel_local_address'] == 'any6':
result = 'any6'
elif self._values['tunnel_local_address'] == 'any':
result = 'any'
else:
result = int(self._values['tunnel_local_address'].split('%')[1])
else:
try:
result = int(self._values['tunnel_local_address'].split('%')[1])
except Exception:
if self._values['tunnel_local_address'] in ['any6', 'any']:
return 0
return None
try:
if result in ['any6', 'any']:
return 0
return int(self._values['tunnel_local_address'].split('%')[1])
except Exception:
return None
class ModuleParameters(Parameters):
@property
def ipv4_interface(self):
result = flatten_boolean(self._values['ipv4_interface'])
if result == 'yes':
return True
return False
@property
def description(self):
if self._values['description'] is None:
return None
elif self._values['description'] in ['none', '']:
return ''
return self._values['description']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
raise
return result
class UsableChanges(Changes):
@property
def encrypt_algorithm(self):
if self._values['encrypt_algorithm'] is None:
return None
elif self._values['encrypt_algorithm'] == 'none':
return 'null'
return self._values['encrypt_algorithm']
@property
def tunnel_local_address(self):
if self._values['tunnel_local_address'] is None:
return None
if self._values['route_domain'] and len(self._values['tunnel_local_address'].split('%')) == 1:
result = '{0}%{1}'.format(self._values['tunnel_local_address'], self._values['route_domain'])
return result
return self._values['tunnel_local_address']
@property
def tunnel_remote_address(self):
if self._values['tunnel_remote_address'] is None:
return None
if self._values['route_domain'] and len(self._values['tunnel_remote_address'].split('%')) == 1:
result = '{0}%{1}'.format(self._values['tunnel_remote_address'], self._values['route_domain'])
return result
return self._values['tunnel_remote_address']
class ReportableChanges(Changes):
@property
def encrypt_algorithm(self):
if self._values['encrypt_algorithm'] is None:
return None
elif self._values['encrypt_algorithm'] == 'null':
return 'none'
return self._values['encrypt_algorithm']
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
@property
def route_domain(self):
if self.want.route_domain is None:
return None
if self.have.route_domain != self.want.route_domain:
if self.want.route_domain == 0 and self.want.ipv4_interface:
return dict(
tunnel_local_address='any',
tunnel_remote_address='any',
route_domain=self.want.route_domain,
)
elif self.want.route_domain == 0 and not self.want.ipv4_interface:
return dict(
tunnel_local_address='any6',
tunnel_remote_address='any6',
route_domain=self.want.route_domain,
)
else:
return dict(
tunnel_local_address='any%{0}'.format(self.want.route_domain),
tunnel_remote_address='any%{0}'.format(self.want.route_domain),
route_domain=self.want.route_domain,
)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
start = datetime.now().isoformat()
version = tmos_version(self.client)
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
send_teem(start, self.module, version)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ipsec-policy/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
errors = [401, 403, 409, 500, 501, 502, 503, 504]
if resp.status in errors or 'code' in response and response['code'] in errors:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
if self.want.mode == 'interface':
if self.want.ipv4_interface:
self._set_any_on_interface(ip='ipv4')
else:
self._set_any_on_interface()
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def _set_any_on_interface(self, ip='ipv6'):
if ip == 'ipv4':
self.want.update({'tunnel_local_address': 'any'})
self.want.update({'tunnel_remote_address': 'any'})
else:
self.want.update({'tunnel_local_address': 'any6'})
self.want.update({'tunnel_remote_address': 'any6'})
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ipsec-policy/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ipsec-policy/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ipsec-policy/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status in [200, 201]:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ipsec-policy/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return ApiParameters(params=response)
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
protocol=dict(
choices=['esp', 'ah']
),
mode=dict(
choices=['transport', 'interface', 'isession', 'tunnel']
),
ipv4_interface=dict(type='bool'),
tunnel_local_address=dict(),
tunnel_remote_address=dict(),
encrypt_algorithm=dict(
choices=[
'none', '3des', 'aes128', 'aes192', 'aes256', 'aes-gmac256',
'aes-gmac192', 'aes-gmac128', 'aes-gcm256', 'aes-gcm192',
'aes-gcm256', 'aes-gcm128'
]
),
route_domain=dict(type='int'),
auth_algorithm=dict(
choices=[
'sha1', 'sha256', 'sha384', 'sha512', 'aes-gcm128',
'aes-gcm192', 'aes-gcm256', 'aes-gmac128', 'aes-gmac192',
'aes-gmac256',
]
),
ipcomp=dict(
choices=['none', 'null', 'deflate']
),
lifetime=dict(type='int'),
kb_lifetime=dict(type='int'),
perfect_forward_secrecy=dict(
choices=[
'none', 'modp768', 'modp1024', 'modp1536', 'modp2048', 'modp3072',
'modp4096', 'modp6144', 'modp8192'
]
),
state=dict(default='present', choices=['absent', 'present']),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_if = [
['mode', 'tunnel', ['tunnel_local_address', 'tunnel_remote_address']],
['mode', 'interface', ['route_domain']]
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_if=spec.required_if
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
[
"sifang@cisco.com"
] |
sifang@cisco.com
|
872ffcf5cc8c83fe7aeff360d306dbbe97d015b3
|
7661c9f14a749f7ffd4e78299ded5dd72b46553e
|
/keras_DQNdrone.py
|
3a3ba2734df60059e96d1ffb8dd46e082fa3499f
|
[
"MIT"
] |
permissive
|
ysbsb/dqn
|
8ae4edff0f900fd1efcda6d0378d1f313d88b673
|
e1b9175a5c9dc2fb7b633b4df54c33fbba7e1e15
|
refs/heads/master
| 2020-05-22T14:35:43.135505
| 2019-05-14T04:48:59
| 2019-05-14T04:48:59
| 186,390,590
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,499
|
py
|
from keras.layers import Input, Dense, Reshape, Flatten, Dropout, Activation
from keras import regularizers
from keras.models import Model
from keras.optimizers import Adam
import numpy as np
# from collections import deque
import random
import gym
# from typing import List
import argparse
class DQN():
def __init__(self, discount=0.99, batch_size = 64, max_episodes = 300):
# TODO: Import new environment
# self.env = gym.make('CartPole-v0')
# self.env.wrappers.Monitor(env, directory="results/", force=True)
# TODO: Get observation space of multirotor environment
# self.input_size= self.env.observation_space.shape[0]
# TODO: Get action space of multirotor environment
# self.output_size= self.env.action_space.n
self.DISCOUNT_RATE=discount
self.BATCH_SIZE = batch_size
self.TARGET_UPDATE_FREQUENCY = 5
self.MAX_EPISODES = max_episodes
self.main_dqn = self.build()
self.target_dqn = self.build()
self.main_dqn.compile(optimizer = Adam(), loss ="mean_squared_error")
self.target_dqn.set_weights(self.main_dqn.get_weights())
def build(self, h_size = 16, lr = 0.001):
state = Input(shape=(self.input_size,))
dense1 = Dense(h_size, activation = "relu")(state)
action = Dense(self.output_size, kernel_regularizer=regularizers.l2(0.01))(dense1)
model = Model(state, action)
return model
def train(self):
buffer = []
last_100_game_reward = []
for episode in range(self.MAX_EPISODES):
e = e = 1. / ((episode / 10) + 1)
done = False
step_count = 0
state = self.env.reset()
while not done:
state = np.reshape(state, (1,self.input_size))
if np.random.rand() < e:
action = self.env.action_space.sample()
else:
action = np.argmax(self.main_dqn.predict(state))
# print("predict", self.main_dqn.predict(state))
next_state, reward, done, info = self.env.step(action)
if done:
reward = -1
# print(action)
buffer.append((state, action, reward, next_state, done))
if len(buffer) > self.BATCH_SIZE:
minibatch = random.sample(buffer, self.BATCH_SIZE)
states = np.vstack([x[0] for x in minibatch])
actions = np.array([x[1] for x in minibatch])
rewards = np.array([x[2] for x in minibatch])
next_states = np.vstack([x[3] for x in minibatch])
done_array = np.array([x[4] for x in minibatch])
# print(actions, actions.shape)
Q_target = rewards + self.DISCOUNT_RATE*np.max(self.target_dqn.predict(next_states), axis=1) * ~done_array
y = self.main_dqn.predict(states)
y[np.arange(len(states)), actions] = Q_target
# print(y,y.shape)
# print(states.shape, actions.shape)
self.main_dqn.train_on_batch(states, y)
if step_count % self.TARGET_UPDATE_FREQUENCY == 0:
self.target_dqn.set_weights(self.main_dqn.get_weights())
state = next_state
step_count += 1
print("Episode: {} steps: {}".format(episode, step_count))
def play(self):
# TODO: Initialize obersvation by getting reset of envrionment
# observation = self.env.reset()
reward_sum = 0
while True:
# TODO: Get multirotor envrionment render
# self.env.render()
s = np.reshape(observation, (1,self.input_size))
Qs = self.main_dqn.predict(s)
a = np.argmax(Qs)
# TODO: Get step function of multirotor environment
# observation, reward, done, info = self.env.step(a)
reward_sum += reward
if done:
print("total score: {}".format(reward_sum))
# TODO: Initialize obersvation by getting reset of envrionment
# observation = self.env.reset()
reward_sum = 0
def transform_input(responses):
img1d = np.array(responses[0].image_data_float, dtype=np.float)
img1d = 255/np.maximum(np.ones(img1d.size), img1d)
img2d = np.reshape(img1d, (responses[0].height, responses[0].width))
from PIL import Image
image = Image.fromarray(img2d)
im_final = np.array(image.resize((84, 84)).convert('L'))
return im_final
def interpret_action(action):
scaling_factor = 0.25
if action == 0:
quad_offset = (0, 0, 0)
elif action == 1:
quad_offset = (scaling_factor, 0, 0)
elif action == 2:
quad_offset = (0, scaling_factor, 0)
elif action == 3:
quad_offset = (0, 0, scaling_factor)
elif action == 4:
quad_offset = (-scaling_factor, 0, 0)
elif action == 5:
quad_offset = (0, -scaling_factor, 0)
elif action == 6:
quad_offset = (0, 0, -scaling_factor)
return quad_offset
def compute_reward(quad_state, quad_vel, collision_info):
thresh_dist = 7
beta = 1
z = -10
pts = [np.array([-.55265, -31.9786, -19.0225]), np.array([48.59735, -63.3286, -60.07256]), np.array([193.5974, -55.0786, -46.32256]), np.array([369.2474, 35.32137, -62.5725]), np.array([541.3474, 143.6714, -32.07256])]
quad_pt = np.array(list((quad_state.x_val, quad_state.y_val, quad_state.z_val)))
if collision_info.has_collided:
reward = -100
else:
dist = 10000000
for i in range(0, len(pts)-1):
dist = min(dist, np.linalg.norm(np.cross((quad_pt - pts[i]), (quad_pt - pts[i+1])))/np.linalg.norm(pts[i]-pts[i+1]))
#print(dist)
if dist > thresh_dist:
reward = -10
else:
reward_dist = (math.exp(-beta*dist) - 0.5)
reward_speed = (np.linalg.norm([quad_vel.x_val, quad_vel.y_val, quad_vel.z_val]) - 0.5)
reward = reward_dist + reward_speed
return reward
def isDone(reward):
done = 0
if reward <= -10:
done = 1
return done
initX = -.55265
initY = -31.9786
initZ = -19.0225
# connect to the AirSim simulator
client = airsim.MultirotorClient()
client.confirmConnection()
client.enableApiControl(True)
client.armDisarm(True)
client.takeoffAsync().join()
client.moveToPositionAsync(initX, initY, initZ, 5).join()
client.moveByVelocityAsync(1, -0.67, -0.8, 5).join()
time.sleep(0.5)
# Make RL agent
NumBufferFrames = 4
SizeRows = 84
SizeCols = 84
NumActions = 7
agent = DeepQAgent((NumBufferFrames, SizeRows, SizeCols), NumActions, monitor=True)
# Train
epoch = 100
current_step = 0
max_steps = epoch * 250000
responses = client.simGetImages([airsim.ImageRequest(3, airsim.ImageType.DepthPerspective, True, False)])
current_state = transform_input(responses)
while True:
action = agent.act(current_state)
quad_offset = interpret_action(action)
quad_vel = client.getMultirotorState().kinematics_estimated.linear_velocity
client.moveByVelocityAsync(quad_vel.x_val+quad_offset[0], quad_vel.y_val+quad_offset[1], quad_vel.z_val+quad_offset[2], 5).join()
time.sleep(0.5)
quad_state = client.getMultirotorState().kinematics_estimated.position
quad_vel = client.getMultirotorState().kinematics_estimated.linear_velocity
collision_info = client.simGetCollisionInfo()
reward = compute_reward(quad_state, quad_vel, collision_info)
done = isDone(reward)
print('Action, Reward, Done:', action, reward, done)
agent.observe(current_state, action, reward, done)
agent.train()
if done:
client.moveToPositionAsync(initX, initY, initZ, 5).join()
client.moveByVelocityAsync(1, -0.67, -0.8, 5).join()
time.sleep(0.5)
current_step +=1
responses = client.simGetImages([airsim.ImageRequest(3, airsim.ImageType.DepthPerspective, True, False)])
current_state = transform_input(responses)
|
[
"subinlab.yang@gmail.com"
] |
subinlab.yang@gmail.com
|
c731a0422743b260fba8416847e5a1c79428c958
|
bc21702676592039ec1adda986a1dabe8f26ac7f
|
/lemonade_stand.py
|
7a6d5d1053af06ed267602289384111bfb618c47
|
[] |
no_license
|
rhvonlehe/KidsPrograms-LemonadeStand
|
8c96f0653f0ce567f636493aad85c981eeb949f5
|
134465ce25f15956dd30a0abac2e4fa24e7fd9ea
|
refs/heads/master
| 2021-01-19T00:25:03.980929
| 2016-05-31T22:49:40
| 2016-05-31T22:49:40
| 60,126,239
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,709
|
py
|
#!/usr/bin/env python
""" Simple version of classic lemonade stand game. Written in Python 2.7
"""
__author__ = 'Rich von Lehe'
import random
class Day:
""" Day responsible for things that change daily
"""
def __init__(self):
self.day = 0
self.weather = 0
self.lemonade_price = 0
def new_day(self):
self.day += 1
self._update_lemonade_price()
self._update_weather()
print("Today's weather: %d" % self.weather)
print("Cost per cup of lemonade: " + str(float(self.lemonade_price) / 100))
print
def demand(self, price):
cups = random.randrange(1, 101)
price_factor = float(100 - price) / 100
heat_factor = 1 - (((100 - self.weather) * 2) / float(100))
demand = 0
if 0 == price:
demand += cups + random.randrange(1, 100) # Additional demand when free
demand += int(round(cups * price_factor * heat_factor))
return demand
def _update_lemonade_price(self):
self.lemonade_price = random.randrange(1, 10)
def _update_weather(self):
self.weather = random.randrange(50, 100)
class Game:
""" Game class enables multi-player lemonade
"""
def __init__(self):
""" setup initial parameters
"""
self.players = 0
self.day = Day()
self.active = True
self.lemonade_stands = []
def _run_day(self):
self.day.new_day()
for stand in self.lemonade_stands:
stand.run(self.day)
print('Day: ' + str(self.day.day))
print('Weather: ' + str(self.day.weather))
for stand in self.lemonade_stands:
stand.print_summary()
print
def _get_players(self):
while True:
try:
self.players = int(raw_input('How many players are there (1-3)?'))
if self.players not in range(1, 4):
raise ValueError
else:
break
except ValueError:
print('Please choose a number from 1 to 3')
for i in range(1, self.players + 1):
print('Enter a name for player %d \n' % i)
name = raw_input()
self.lemonade_stands += [LemonadeStand(name)]
def _prompt_continue(self):
while True:
print('Press 1 to continue, 2 to quit')
try:
choice = int(raw_input())
if choice in range(1, 3):
break
except ValueError:
pass # TODO
if 2 == choice:
print('Are you sure you want to quit? (y/n)')
choice = raw_input()
if 'y' == choice:
self.active = False
def run(self):
self._get_players()
while self.active:
self._run_day()
self._prompt_continue()
class LemonadeStand:
""" LemonadeStand keeps track of player-specific data
"""
def __init__(self, name):
self.name = name
self.lemonade = 0
self.cash = 100.00
self.cups_sold = 0
self.cost_to_make = 0
self.cups_made = 0
self.earnings = 0.0
self.cups_demanded = 0
print('Lemonade stand owned by %s\n' % self.name)
def run(self, day):
""" Get decision and update lemonade, cash, """
self.cups_made = 0
self.cups_sold = 0
self.cups_demanded = 0
self.earnings = 0
while True:
print('%s: 1 to make lemonade, 2 to sell lemonade' % self.name)
try:
choice = int(raw_input())
if choice in range(1, 3):
break
else:
raise ValueError
except ValueError:
print('Please try again')
if 1 == choice:
self._make_lemonade(day)
elif 2 == choice:
self._sell_lemonade(day)
def _make_lemonade(self, day):
while True:
try:
self.cups_made = int(raw_input('How many cups will you make (1-10)?'))
if self.cups_made in range(1, 11):
break
else:
raise ValueError
except ValueError:
print('You must choose an integer from 1 to 10')
self.lemonade += self.cups_made
self.cost_to_make = self.cups_made * (float(day.lemonade_price) / 100)
self.cash -= self.cost_to_make
def _sell_lemonade(self, day):
while True:
try:
price = int(raw_input('How many cents will you charge per cup of lemonade? (0-100)?'))
if price in range(0, 101):
break
else:
raise ValueError
except ValueError:
pass
self.cups_demanded = day.demand(price)
if self.cups_demanded <= self.lemonade:
self.lemonade -= self.cups_demanded
self.cups_sold = self.cups_demanded
else:
self.cups_sold = self.lemonade
self.lemonade = 0
self.earnings = self.cups_sold * (float(price) / 100)
self.cash += self.earnings
def print_summary(self):
print('Player: %s' % self.name)
print('---------------')
print('Total lemonade: %d' % self.lemonade)
print('Demand: %d' % self.cups_demanded)
print('Cups sold: %d' % self.cups_sold)
print('Earnings: %.02f' % (round(self.earnings, 2)))
print('Cash: %.02f' % (round(self.cash, 2)))
def main():
game = Game()
game.run()
main()
|
[
"rhvonlehe@gmail.com"
] |
rhvonlehe@gmail.com
|
db7dec2273d99297a4fd13152de5bb4dd61f6195
|
d390393d52bb3b73a94bfa0094c089d94ce17b40
|
/10_multiprocessing/class_.py
|
e0c7c98126165c434637299fd9d51a19be320513
|
[] |
no_license
|
bunbunjin/exercise
|
b975eff5ecd0c778a24f02bcf7c675d3920ca2db
|
101b112ce27691edda5c73179498f4da81681188
|
refs/heads/main
| 2023-06-06T07:17:58.719902
| 2021-06-23T12:59:34
| 2021-06-23T12:59:34
| 345,110,456
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 277
|
py
|
from concurrent.futures import (ThreadPoolExecutor, Future)
def func():
return 1
future = ThreadPoolExecutor().submit(func)
a = isinstance(future, Future)
print(a)
aa = future.done()
print(aa)
aaa = future.running()
print(aaa)
aaaa = future.cancelled()
print(aaaa)
|
[
"bunbunjin.aisu@gmail.com"
] |
bunbunjin.aisu@gmail.com
|
14d0bb0192ee02c572986075e83149d437e63705
|
f69fbe895fe05725a5a146d69963d47d3b512514
|
/Python/Tutorial/raw_input.py
|
f01bd676b326f8041320bc7c5f9c40f7770a43ad
|
[] |
no_license
|
mcmullan0/scripts
|
f17e333d72161728c9bd05732a9cedc570fbb0b4
|
30ed75b6523c6b1506d38590e01f7f51e3a943b9
|
refs/heads/master
| 2021-01-17T01:25:49.251510
| 2020-12-04T21:51:43
| 2020-12-04T21:51:43
| 16,819,813
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
# This line makes 'a' equal to whatever yoo type in
a=raw_input("Type in something, and it will be repeated on screes:")
# This line prints what 'a' is now worth
print a
|
[
"mcmullan0@gmail.com"
] |
mcmullan0@gmail.com
|
e2832281c1aa8ca77772d77f40f3b977505f4801
|
1b5598ff8ebe75af6abded38e38072fb7592ef5a
|
/Homework2Prob2FeysFunction.py
|
c94da14d19e0cc09e65975528f98e8770048877f
|
[] |
no_license
|
TheTony/CompMeth_Tony
|
34ac2b1e7f552c32c6f14a15ca4aa54e35321d84
|
8a6097ce2ca5ec9f5c68e94b6ec1d099222fbbbe
|
refs/heads/master
| 2021-01-23T17:05:37.044560
| 2017-10-02T22:10:19
| 2017-10-02T22:10:19
| 102,759,159
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 638
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 18 10:57:13 2017
@author: Le
"""
import matplotlib.pyplot as plt
import numpy as np
from math import sqrt,sin,cos,pi
from numpy import empty,zeros
from pylab import imshow,gray,show
theta=np.arange(0,24*np.pi,0.01)
r=np.e**(np.cos(theta))-2*np.cos(4*theta)+np.sin(theta/12)**5
x=r*np.cos(theta)
y=r*np.sin(theta)
ax = plt.subplot(projection='polar')
ax.plot(theta,r)
ax.set_rticks([1,2, 3, 4, 5,6]) # less radial ticks
ax.set_rlabel_position(90) # get radial labels away from plotted line
ax.grid(True)
plt.show()
bx = plt.subplot()
bx.plot(x,y)
plt.show()
|
[
"noreply@github.com"
] |
TheTony.noreply@github.com
|
0ac8818db9722f646577afd3742934b3f89c1e33
|
caa6dced49f2d03d4fed4c0e9c6f09d0d374e7ce
|
/Python/2631.py
|
ee325c0fec1547f03f350ba8ee03a5739e889670
|
[] |
no_license
|
hooong/baekjoon
|
72e70cea5769467609150416be6f075f9d4c136a
|
541f67859de0871ecfa134f8e5c6a8f399d17a1e
|
refs/heads/master
| 2022-04-06T09:21:26.761766
| 2022-03-02T12:57:21
| 2022-03-02T12:57:21
| 180,535,683
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 305
|
py
|
# 2631번 줄세우기
# main
n = int(input())
children = []
for _ in range(n):
children.append(int(input()))
# LIS
dp = [0 for _ in range(n)]
for i in range(n):
dp[i] = 1
for j in range(i):
if children[j] < children[i]:
dp[i] = max(dp[i], dp[j] + 1)
print(n - max(dp))
|
[
"tjrwns0529@gmail.com"
] |
tjrwns0529@gmail.com
|
978c325e392f017cfe2d09bf32c63c7858502254
|
4501366d68af6a083f3c97bc97ceed0491b9412a
|
/Karaoke/tests/songs_test.py
|
4f7d949ef9a18a0132f1889df4db8669bc213f9e
|
[] |
no_license
|
klamb95/Karaoke_weekend_homework
|
ea0dc0dad4e56edeb2b34f2c6b55c717f3044055
|
6666dacae729ead64c4c477b12490168a883691b
|
refs/heads/main
| 2023-04-09T17:00:42.640367
| 2021-04-11T15:30:05
| 2021-04-11T15:30:05
| 356,241,333
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 336
|
py
|
import unittest
from src.songs import Songs
class TestSongs(unittest.TestCase):
def setUp(self):
self.songs = Songs("Dive", "Ed Sheeran")
def test_songs_has_title(self):
self.assertEqual("Dive", self.songs.title)
def test_songs_has_artist(self):
self.assertEqual("Ed Sheeran", self.songs.artist)
|
[
"klamb1995@gmail.com"
] |
klamb1995@gmail.com
|
1f9f53bb94e996fa539cdb01e1239003135843b5
|
7ca55428503fc915fcffb8059d30654b625a6b26
|
/144-move_x_recursion.py
|
e1e65a31d6f019b14ca4a2d00c47464002378b39
|
[] |
no_license
|
Akshata2704/APS-2020
|
453e9eafb511e3e5fc73d939180c3402eb93134e
|
8f095ae1af9653499f1dedcdfe12b60b1ad1f65c
|
refs/heads/master
| 2020-12-21T03:10:52.043400
| 2020-05-15T18:51:48
| 2020-05-15T18:51:48
| 236,286,545
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 305
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Fri May 15 21:53:42 2020
@author: AKSHATA
"""
def mov(s,l,h):
if(l>=h):
return
sw=s[l]
if(sw!='x'):
print(s[l],end="")
mov(s,l+1,h)
if(sw=='x'):
print('x',end="")
return
s=input()
mov(s,0,len(s))
|
[
"noreply@github.com"
] |
Akshata2704.noreply@github.com
|
0c0c9aaffa49dbc21bd275216e833c1d48c321ed
|
b187b3775c8eeda62a868be93ea67defc0f27187
|
/raycom/migrations/0002_auto_20190426_1142.py
|
2769740baf9ea799697222e02d707c954eb391ac
|
[] |
no_license
|
azizcruz/reviews_api
|
598f040c276273a8e918fc215e087faeddb46158
|
73966e6be7138d885fa8378db284e0aa26226a5b
|
refs/heads/master
| 2020-05-16T12:28:49.032994
| 2019-05-14T11:36:20
| 2019-05-14T11:36:20
| 183,046,370
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,516
|
py
|
# Generated by Django 2.2 on 2019-04-26 11:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('raycom', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='post',
name='writer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='comment',
name='dislikes',
field=models.ManyToManyField(blank=True, related_name='mydislikes', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='comment',
name='likes',
field=models.ManyToManyField(blank=True, related_name='mylikes', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='post_comments', to='raycom.Post'),
),
migrations.AddField(
model_name='comment',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_comments', to=settings.AUTH_USER_MODEL),
),
]
|
[
"edu@localhost.localdomain"
] |
edu@localhost.localdomain
|
0f6b9a29f695d6a12f1e4b962dc13b9b1785e7da
|
b06e79c15b9d635e61de5e48dcb820e4a45033bb
|
/python/Django学习模板/HelloWorld/HelloWorld/settings.py
|
f2e0a84f5415c9af338372c420e9e28ba3094550
|
[] |
no_license
|
pangdoh/code
|
370fc5dd6d0ab4726f2740a6149ee61b7ed6ccff
|
4f7713fa509b4dd677dcef8fccef09e260c8713a
|
refs/heads/master
| 2021-06-30T12:25:31.314372
| 2020-12-24T14:46:07
| 2020-12-24T14:46:07
| 205,757,203
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,169
|
py
|
"""
Django settings for HelloWorld project.
Generated by 'django-admin startproject' using Django 2.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'uukv547n0661lh22)tpzlz=!vxp=0*w#o&hff1v=nd$-v%61p!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# ALLOWED_HOSTS = []
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'HelloWorld.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# 'DIRS': [],
'DIRS': [BASE_DIR+"/templates", ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'HelloWorld.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
|
[
"pang_doh@163.com"
] |
pang_doh@163.com
|
c5ec7693c4d1ad50eb2488ac5e263064011ac2d7
|
70b6d3cedcc698cd6aae5b4885db2c1b6c71e77b
|
/source/maizi_website/wsgi.py
|
320998cfcadd42f22b0b11821e6d63e31134737f
|
[] |
no_license
|
maizi-stu-000/maizi-stu-000-project
|
f1ed81b8f72cc23f83855a9893f5b177fcc850da
|
ce2d1ff792bf72d83567067ec95660d38bdff6bb
|
refs/heads/master
| 2021-01-10T07:48:43.673528
| 2015-11-05T02:45:33
| 2015-11-05T02:45:33
| 45,023,078
| 0
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 403
|
py
|
"""
WSGI config for maizi_website project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "maizi_website.settings")
application = get_wsgi_application()
|
[
"yopoing@maiziedu.com"
] |
yopoing@maiziedu.com
|
3a5b6bb4c201130db6a6a0af3e6eb8c0618ff697
|
23b9fab50737e95c36ede555d9fefa8a680b128e
|
/LCS.py
|
26051b28878328caa0d862eeea408093a913e25c
|
[] |
no_license
|
jagjeetsian/LCS
|
10facf5d879ee29dcc975f59c1cdad6c5d052065
|
4b193e82254c305fda1e76b4a8a32c3775566e47
|
refs/heads/main
| 2023-03-26T14:08:34.639864
| 2021-03-29T18:15:01
| 2021-03-29T18:15:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,254
|
py
|
s1=input('enter a string:')
s2=input('enter another string:')
if len(s2)>len(s1):
s2,s1=s1,s2
#Creating lcs matrix with inputted sting in one row and one column and filling rest with zero
lcs=[]
for i in range(len(s2)+2):
l=[]
for j in range(len(s1)+2):
if i>1 and j==0:
l.append(s2[i-2])
elif i==0 and j>1:
l.append(s1[j-2])
else:
l.append(0)
lcs.append(l)
#main code of lcs
#adding 1 if character matches else putting max of the previous row or column
for i in range(2,len(lcs)):
for j in range(2,len(lcs[0])):
if lcs[i][0]==lcs[0][j]:
lcs[i][j]=1+lcs[i-1][j-1]
else:
lcs[i][j]=max(lcs[i-1][j],lcs[i][j-1])
#reading valus
#bottom up approach
i=len(lcs)-1
s=''
while i>=0:
j=len(lcs[0])-1
while j>=0:
if lcs[i][j]==0:
i=0
j=0
elif lcs[i][j]>lcs[i][j-1]:
s=lcs[0][j]+s
i=i-1
j=j-1
continue
j=j-1
i=i-1
print('Longest Common Substring is:',s)
#printing the lcs matrix you can simply use: print(lcs)
for i in range(len(lcs)):
for j in range(len(lcs[0])):
print(lcs[i][j],end=' ')
print()
|
[
"noreply@github.com"
] |
jagjeetsian.noreply@github.com
|
1489885199041c12c3297d9bafcb5cc695676b17
|
fe2b59b2e7129b8b0a1ed57f889f21aa096c19f5
|
/usr/lib/enigma2/python/Components/Converter/mFlashInfo.py
|
52c6550c5e32a6e6691a2fc68b921f873de18b58
|
[] |
no_license
|
linuxbox10/Pingu-NavyBlueFHD
|
07ebca1526327e29da457710abafb5d07226943d
|
0c16228e1a585a78967d4162f390132450a2d9b8
|
refs/heads/master
| 2020-06-10T12:43:15.468096
| 2019-06-25T06:20:00
| 2019-06-25T06:20:00
| 193,646,776
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,391
|
py
|
#
# FlashInfo - Converter
#
# Coded by Dr.Best (c) 2012
# Support: www.dreambox-tools.info
#
# This plugin is licensed under the Creative Commons
# Attribution-NonCommercial-ShareAlike 3.0 Unported
# License. To view a copy of this license, visit
# http://creativecommons.org/licenses/by-nc-sa/3.0/ or send a letter to Creative
# Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.
#
# Alternatively, this plugin may be distributed and executed on hardware which
# is licensed by Dream Multimedia GmbH.
# This plugin is NOT free software. It is open source, you are allowed to
# modify it (if you keep the license), but it may not be commercially
# distributed other than under the conditions noted above.
#
from Components.Converter.Converter import Converter
from Components.Element import cached
from os import statvfs
from Poll import Poll
from Components.config import config
class mFlashInfo(Poll, Converter, object):
def __init__(self, type):
Converter.__init__(self, type)
Poll.__init__(self)
self.path = "/"
if type == "DefaultRecordingPath":
self.path = config.usage.default_path.value
self.long_display = 1
elif type == "Long":
self.long_display = 1
else:
self.long_display = 0
self.poll_interval = 5000
self.poll_enabled = True
@cached
def getText(self):
try:
# Deprecated since version 2.6: The statvfs module has been removed in Python 3.
st = statvfs(self.path)
except OSError:
st = None
if st is not None:
size = st.f_bsize * st.f_blocks
available = st.f_bsize * st.f_bavail
used = size - available
if (size > 0):
usedpercent = "%d %%" % int(used * 100 / size)
else:
usedpercent = "n/a"
if self.long_display == 1:
return _("Size: %s Used: %s Available: %s Use%%: %s") % (self.formatFileSize(size),self.formatFileSize(used),self.formatFileSize(available),usedpercent)
else:
return _("%s free, %s in Use") % (self.formatFileSize(available),usedpercent)
else:
return "No infos available"
text = property(getText)
def formatFileSize(self, size):
filesize = size
suffix = ('bytes', 'KB', 'MB', 'GB', 'TB')
index = 0
while filesize > 1024:
filesize = float(filesize) / 1024.0
index += 1
filesize_string = "%.2f" % filesize
if not filesize_string:
filesize_string = '0'
return "%s %s" % (filesize_string, suffix[min(index, 4)])
|
[
"jaysmith940@hotmail.co.uk"
] |
jaysmith940@hotmail.co.uk
|
4db551610f08a55f29d87a232830e544a60278d3
|
6a25ea6835d8c3d9e0411449afc5a1af6fc2cba6
|
/server/webapp/modules/home/__init__.py
|
69f43e5d4da6e00d6d91a15551b27fddb007a676
|
[] |
no_license
|
igorgoncalves/NutriData
|
8887be76c84e92de2a21bab28cbd7aeb61eeba2f
|
a77e6645f2ae3cba8aabdea3bb41b4e26218abc4
|
refs/heads/master
| 2022-12-25T14:24:17.302174
| 2020-06-03T00:31:54
| 2020-06-03T00:31:54
| 161,583,550
| 1
| 1
| null | 2022-12-10T16:46:43
| 2018-12-13T04:23:58
|
Vue
|
UTF-8
|
Python
| false
| false
| 282
|
py
|
from webapp import app
from flask import Blueprint, render_template
home = Blueprint('home', __name__)
@home.route('/', defaults={'path': ''})
@home.route('/<path:path>')
def catch_all(path):
return render_template("index.html")
app.register_blueprint(home, url_prefix='/')
|
[
"igor_goncalves@live.com"
] |
igor_goncalves@live.com
|
63af706648e3f2b92f0a1eb0b10519cd555073f1
|
344e2956b4e2a30a8ef7532d951f96d995d1dd1e
|
/20_solo/lib/configs/solo/decoupled_solo_light_dcn_r50_fpn_8gpu_3x.py
|
e72d112ae51c3f5d2d8cf88daa74e455f89cabdd
|
[
"BSD-2-Clause",
"LicenseRef-scancode-proprietary-license",
"Apache-2.0",
"LGPL-3.0-only",
"MIT",
"BSD-3-Clause",
"GPL-3.0-only"
] |
permissive
|
karndeepsingh/Monk_Object_Detection
|
e64199705326e4cd65e4b29946cae210a4ef9649
|
425fa50a3236cb9097389646275da06bf9185f6b
|
refs/heads/master
| 2022-12-22T18:26:53.933397
| 2020-09-28T12:49:50
| 2020-09-28T12:49:50
| 299,307,843
| 1
| 1
|
Apache-2.0
| 2020-09-28T12:52:18
| 2020-09-28T12:52:17
| null |
UTF-8
|
Python
| false
| false
| 4,074
|
py
|
# model settings
model = dict(
type='SOLO',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3), # C2, C3, C4, C5
frozen_stages=1,
style='pytorch',
dcn=dict(
type='DCN',
deformable_groups=1,
fallback_on_stride=False),
stage_with_dcn=(False, True, True, True)),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=0,
num_outs=5),
bbox_head=dict(
type='DecoupledSOLOLightHead',
num_classes=81,
in_channels=256,
stacked_convs=4,
use_dcn_in_tower=True,
type_dcn='DCN',
seg_feat_channels=256,
strides=[8, 8, 16, 32, 32],
scale_ranges=((1, 64), (32, 128), (64, 256), (128, 512), (256, 2048)),
sigma=0.2,
num_grids=[40, 36, 24, 16, 12],
cate_down_pos=0,
loss_ins=dict(
type='DiceLoss',
use_sigmoid=True,
loss_weight=3.0),
loss_cate=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
))
# training and testing settings
train_cfg = dict()
test_cfg = dict(
nms_pre=500,
score_thr=0.1,
mask_thr=0.5,
update_thr=0.05,
kernel='gaussian', # gaussian/linear
sigma=2.0,
max_per_img=100)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize',
img_scale=[(852, 512), (852, 480), (852, 448),
(852, 416), (852, 384), (852, 352)],
multiscale_mode='value',
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(852, 512),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[27, 33])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 36
device_ids = range(8)
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/decoupled_solo_light_dcn_release_r50_fpn_8gpu_3x'
load_from = None
resume_from = None
workflow = [('train', 1)]
|
[
"abhishek4273@gmail.com"
] |
abhishek4273@gmail.com
|
726ad21fb2c8792f6d02e80696dde219bd3e0c22
|
5444932fdae48c5ff82f78eaa42052d7e86e90df
|
/PyOIO/OIOCommon/virkning.py
|
ac90ee63202c01700d2aebc77d255e43a054ae69
|
[] |
no_license
|
JosefAssad/SeMaWi-LoRA-early-sketch
|
3755849080bf3dfc91b489fe7d05b4906c521f15
|
8f92f5a8cdcc9e10c6653d0aeed944569687800f
|
refs/heads/master
| 2016-08-12T19:58:18.461900
| 2016-03-02T12:11:32
| 2016-03-02T12:11:32
| 52,959,386
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,361
|
py
|
#!/usr/bin/env python
class Virkning(object):
""" Virkning is a fairly broadly used class. Its purpose when attached to
metadata is to lend the metadata bitemporality.
"""
def __init__(self, json):
"""Args:
json: (dictionary) data containing the attributes of the Virkning object
"""
# TODO below might need to live with missing elements?
self.aktoerref = json['aktoerref']
if 'aktoertypekode' in json:
self.aktoertypekode = json['aktoertypekode']
else:
self.aktoertypekode = None
self.virkning_from = json['from']
if 'from_included' in json:
self.virkning_from_included = json['from_included']
else:
self.virkning_from_included = None
self.virkning_to = json['to']
if 'to_included' in json:
self.virkning_to_included = json['to_included']
else:
self.virkning_to_included = None
if 'notetekst' in json:
self.notetekst = json['notetekst']
else:
self.notetekst = None
# TODO timestamps for virkning_from and virkning_to
def __repr__(self):
return 'Virkning(%s, %s)' % (self.virkning_from, self.virkning_to)
def __str__(self):
return 'Virkning: %s - %s' % (self.virkning_from, self.virkning_to)
|
[
"josef@josefassad.com"
] |
josef@josefassad.com
|
2decb4dea21e871bd38fac9f23ba7ba24c87a289
|
19cec240505e27546cb9b10104ecb16cc2454702
|
/moin/lib/python2.4/site-packages/MoinMoin/parser/java.py
|
be48f35eda7775b57a712423514394173cbb7be3
|
[] |
no_license
|
imosts/flume
|
1a9b746c5f080c826c1f316a8008d8ea1b145a89
|
a17b987c5adaa13befb0fd74ac400c8edbe62ef5
|
refs/heads/master
| 2021-01-10T09:43:03.931167
| 2016-03-09T12:09:53
| 2016-03-09T12:09:53
| 53,101,798
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,451
|
py
|
# -*- coding: iso-8859-1 -*-
"""
MoinMoin - Java Source Parser
@copyright: 2002 by Taesu Pyo <bigflood@hitel.net>
@license: GNU GPL, see COPYING for details.
"""
from MoinMoin.util.ParserBase import ParserBase
Dependencies = []
class Parser(ParserBase):
parsername = "ColorizedJava"
extensions = ['.java']
Dependencies = []
def setupRules(self):
ParserBase.setupRules(self)
self.addRulePair("Comment","/[*]","[*]/")
self.addRule("Comment","//.*$")
self.addRulePair("String",'"',r'$|[^\\](\\\\)*"')
self.addRule("Char",r"'\\.'|'[^\\]'")
self.addRule("Number",r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?")
self.addRule("ID","[a-zA-Z_][0-9a-zA-Z_]*")
self.addRule("SPChar",r"[~!%^&*()+=|\[\]:;,.<>/?{}-]")
reserved_words = ['class','interface','enum','import','package',
'byte','int','long','float','double','char','short','void','boolean',
'static','final','const','private','public','protected',
'new','this','super','abstract','native','synchronized','transient','volatile','strictfp',
'extends','implements','if','else','while','for','do','switch','case','default','instanceof',
'try','catch','finally','throw','throws','return','continue','break']
self.addReserved(reserved_words)
constant_words = ['true','false','null']
self.addConstant(constant_words)
|
[
"imosts"
] |
imosts
|
b28df3062054851c0a69f512b1e9f1efc9e316a7
|
8a24da4ee92c64e0f18af2aa42486d4666b4f0d4
|
/django_login_registration/urls.py
|
7d8a8be8b5b2bf67f4b07a106874516384824ec3
|
[] |
no_license
|
gnsaddy/django_login_registration
|
515b8b1532ca90aa416313726c5766ae374a35a2
|
746ce7230496680ad1ca2912a7b50ee2b7f7df15
|
refs/heads/master
| 2022-11-07T02:58:14.811221
| 2020-06-23T10:58:02
| 2020-06-23T10:58:02
| 270,728,556
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 810
|
py
|
"""django_login_registration URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('src.urls'))
]
|
[
"aditya.x510@gmail.com"
] |
aditya.x510@gmail.com
|
76de45ae4a4b157e710bf791c0d171343a64eb72
|
56c85ae9a4e43c6c67ade51c117b1f774d941d17
|
/blog/migrations/0001_initial.py
|
1105267166f832a5daf3f4c4d725026f6df006d9
|
[
"MIT"
] |
permissive
|
v1k45/blogghar
|
d367b41ad4ff30378826d454df398cb43c0f971b
|
ec0d2f633880af7f78cfb3412325301a5976b90c
|
refs/heads/master
| 2020-12-24T20:52:20.585477
| 2016-05-15T05:29:00
| 2016-05-15T05:29:00
| 58,632,622
| 17
| 11
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,319
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-07 11:06
from __future__ import unicode_literals
import autoslug.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Blog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('tag_line', models.CharField(max_length=50)),
('short_description', models.TextField(max_length=255)),
('created', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('is_public', models.BooleanField(default=True)),
('is_deleted', models.BooleanField(default=False)),
('author', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='blogs', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=150)),
('summary', models.CharField(blank=True, max_length=255)),
('content', models.TextField()),
('slug', autoslug.fields.AutoSlugField(editable=True, populate_from='title', unique_with=['blog'])),
('created', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(choices=[('d', 'Draft'), ('p', 'Published')], default='d', max_length=1)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to=settings.AUTH_USER_MODEL)),
('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blogs', to='blog.Blog')),
],
),
]
|
[
"v1k45@users.noreply.github.com"
] |
v1k45@users.noreply.github.com
|
13051baba28a9db18301bf2d6fc526c6dd2a6f3c
|
f02a312e15006672fdf82edc8970ed310f40b9e2
|
/app.py
|
968cb414ed802599852837e79451b5a1efb76e48
|
[] |
no_license
|
gattimiswarnaravali/CNN_Cat_vs_Dog
|
c6f6a6887f51d53436747a3296e36b52b1a6bcc0
|
7a1f6bac4f7af5de9723c4d676106eac5305dd11
|
refs/heads/main
| 2023-07-11T15:37:14.831809
| 2021-08-31T16:23:59
| 2021-08-31T16:23:59
| 401,061,826
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,479
|
py
|
# application file
from flask import Flask, render_template, request
from werkzeug.utils import secure_filename
import tensorflow
from tensorflow.keras.preprocessing import image
import cv2
import numpy
import os
#insitalize the app
app = Flask(__name__)
#load the model
model = None
#tensorflow.keras.models.load_model("Cat_Dog_Classifier_97.h5")
@app.route('/')
def upload_file():
return render_template('form.html')
@app.route('/uploader', methods = ['GET', 'POST'])
def uploader():
if request.method == 'POST':
f = request.files['file']
filepath = secure_filename(f.filename) # save file
f.save(filepath)
predictive_path = os.getcwd()
#predictive_path = "C:\\Learnbay\\DeploymentLearnbay\\CNN_Cat_vs_Dog\\Flask_app"
image_path1 = f'{predictive_path}\\{filepath}'
print(image_path1)
test_image = image.load_img(f'{predictive_path}\\{filepath}', target_size=(64,64))
test_image = image.img_to_array(test_image)
test_image = test_image.reshape(1,64,64,3)
load_model = tensorflow.keras.models.load_model('Cat_Dog_Classifier_97.h5')
result = load_model.predict(test_image)
if result[0][0] == 1:
out = "DOG"
else:
out = "CAT"
return render_template('predict.html', data=f"It's a {out}", image_path = filepath)
if __name__ == '__main__':
app.run(debug = True)
|
[
"noreply@github.com"
] |
gattimiswarnaravali.noreply@github.com
|
d2199dbdc489870654093364b7aa874db921cf2c
|
614187662fa96bc1ea41cfec46ec15fb346dae4e
|
/PSF_TP/psf_python_tp/py_mod3.py
|
f42e2e2691b7ed926094e3a657338aa433f46b04
|
[] |
no_license
|
damf618/PSF_TF
|
6b8e21eb891f4ddcc34a8d4ced7e8eda36f1ed58
|
42cf820e60227db2af5e6d00bd46cfb3990a89b4
|
refs/heads/master
| 2023-07-17T05:16:53.244341
| 2021-09-03T12:06:57
| 2021-09-03T12:06:57
| 390,943,809
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,367
|
py
|
import numpy as np
import scipy.signal as sc
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
# Configuracion de las Seniales
#--------------------------------------
fs = 8000
N = 8000
M = 550
cutFrec = [200,400,800,1600,3200,fs]
f1 = 100
f2 = 300
f3 = 700
f4 = 1200
f5 = 2000
f6 = 3000
f7 = 3600
f8 = 100
f9 = 100
f10 = 100
#--------------------------------------
# Vectores de tiempo
#--------------------------------------
nData = np.arange(0,N,1) #arranco con numeros enteros para evitar errores de float
fData = nData*(fs/(N))
tData = nData/fs
#--------------------------------------
# Seniales Generadas
#--------------------------------------
signal_1 = np.sin(2*np.pi*f1*tData)
signal_2 = np.sin(2*np.pi*f2*tData)
signal_3 = np.sin(2*np.pi*f3*tData)
signal_4 = np.sin(2*np.pi*f4*tData)
signal_5 = np.sin(2*np.pi*f5*tData)
signal_6 = np.sin(2*np.pi*f6*tData)
signal_7 = np.sin(2*np.pi*f7*tData)
signal_8 = 0.01*np.sin(2*np.pi*f8*tData)
signal_9 = 0.01*np.sin(2*np.pi*f9*tData)
signal_10 = 0.01*np.sin(2*np.pi*f10*tData)
signal = signal_1 + signal_2 + signal_3 + signal_4 + signal_5 +signal_6 + signal_7 + signal_8 + signal_9 + signal_10
segmented_signals1 = np.zeros(N).astype("complex")
segmented_signals2 = np.zeros(N).astype("complex")
segmented_signals3 = np.zeros(N).astype("complex")
segmented_signals4 = np.zeros(N).astype("complex")
segmented_signals5 = np.zeros(N).astype("complex")
segmented_signals6 = np.zeros(N).astype("complex")
time_signals1 = np.zeros(N).astype("complex")
time_signals2 = np.zeros(N).astype("complex")
time_signals3 = np.zeros(N).astype("complex")
time_signals4 = np.zeros(N).astype("complex")
time_signals5 = np.zeros(N).astype("complex")
time_signals6 = np.zeros(N).astype("complex")
env_signals1 = np.zeros(N).astype("complex")
env_signals2 = np.zeros(N).astype("complex")
env_signals3 = np.zeros(N).astype("complex")
env_signals4 = np.zeros(N).astype("complex")
env_signals5 = np.zeros(N).astype("complex")
env_signals6 = np.zeros(N).astype("complex")
diff_rect_signal = np.zeros(9600).astype("complex")
diff_rect_signals1 = np.zeros(9600).astype("complex")
diff_rect_signals2 = np.zeros(9600).astype("complex")
diff_rect_signals3 = np.zeros(9600).astype("complex")
diff_rect_signals4 = np.zeros(9600).astype("complex")
diff_rect_signals5 = np.zeros(9600).astype("complex")
diff_rect_signals6 = np.zeros(9600).astype("complex")
#--------------------------------------
#------------Escpectro--------------------------
#--------------------------------------
#NO GRAPH
fftData = np.fft.fft(signal)
print(len(fftData))
#GRAPH
fftDataGraph = np.abs(1/N*fftData)**2
print(len(fftData))
for i in range(len(fData)):
if(np.abs(fData[i])>cutFrec[0]):
segmented_signals1[i] = 0
else:
segmented_signals1[i] = fftData[i]
if((np.abs(fData[i])>cutFrec[1])or(np.abs(fData[i])<cutFrec[0])):
segmented_signals2[i] = 0
else:
segmented_signals2[i] = fftData[i]
if((np.abs(fData[i])>cutFrec[2])or(np.abs(fData[i])<cutFrec[1])):
segmented_signals3[i] = 0
else:
segmented_signals3[i] = fftData[i]
if((np.abs(fData[i])>cutFrec[3])or(np.abs(fData[i])<cutFrec[2])):
segmented_signals4[i] = 0
else:
segmented_signals4[i] = fftData[i]
if((np.abs(fData[i])>cutFrec[4])or(np.abs(fData[i])<cutFrec[3])):
segmented_signals5[i] = 0
else:
segmented_signals5[i] = fftData[i]
if((np.abs(fData[i])>cutFrec[5])or(np.abs(fData[i])<cutFrec[4])):
segmented_signals6[i] = 0
else:
segmented_signals6[i] = fftData[i]
"""
MODULO 2 ENVELOPE
"""
# TRANSFORMADA INVERSA DE FOURIER
time_signal = np.fft.ifft(fftData)
time_signals1 = np.fft.ifft(segmented_signals1)
time_signals2 = np.fft.ifft(segmented_signals2)
time_signals3 = np.fft.ifft(segmented_signals3)
time_signals4 = np.fft.ifft(segmented_signals4)
time_signals5 = np.fft.ifft(segmented_signals5)
time_signals6 = np.fft.ifft(segmented_signals6)
# FILTRO PASABAJO QUE SEA LA MITAD DE UN HANNING
# EL filtro se especifica como la mitad de una ventana de Hanning con 0.4 segundos...
# fs = 8000Hz, por lo que mi filtro debe ser 0.4 * 8000 = 3200 muestras...
M = 3200
filter_h = np.hanning(M)
filter_h = filter_h[int(M/2):]
M=int(M/2)
#Teorema de Convolucion...
#Zero_Padding
#Nuevo Largo: N+M-1 -> 8000+1600-1 = 3599
time_signal_ext = np.concatenate((time_signal,np.zeros(M-1)))
time_signals1_ext = np.concatenate((time_signals1,np.zeros(M-1)))
time_signals2_ext = np.concatenate((time_signals2,np.zeros(M-1)))
time_signals3_ext = np.concatenate((time_signals3,np.zeros(M-1)))
time_signals4_ext = np.concatenate((time_signals4,np.zeros(M-1)))
time_signals5_ext = np.concatenate((time_signals5,np.zeros(M-1)))
time_signals6_ext = np.concatenate((time_signals6,np.zeros(M-1)))
filter_h = np.concatenate((filter_h,np.zeros(N-1)))
#extra_t = np.arange(N,N+M-1,1)
#extra_t = extra_t / fs
#copyData = np.concatenate((tData,extra_t))
#figw = plt.figure ( 10 )
#figwt = figw.add_subplot ( 1,1,1 )
#origfftLn, = plt.plot( copyData,filter_h,'b-o',linewidth=4,alpha=0.5,label="original")
#figwt.grid( True )
#
# RECTIFIACION DE ONDA COMPLETA
for i in range (N+M-1):
if(time_signal_ext[i]<0):
time_signal_ext[i] = -1*time_signal_ext[i]
if(time_signals1_ext[i]<0):
time_signals1_ext[i] = -1*time_signals1_ext[i]
if(time_signals2_ext[i]<0):
time_signals2_ext[i] = -1*time_signals2_ext[i]
if(time_signals3_ext[i]<0):
time_signals3_ext[i] = -1*time_signals3_ext[i]
if(time_signals4_ext[i]<0):
time_signals4_ext[i] = -1*time_signals4_ext[i]
if(time_signals5_ext[i]<0):
time_signals5_ext[i] = -1*time_signals5_ext[i]
if(time_signals6_ext[i]<0):
time_signals6_ext[i] = -1*time_signals6_ext[i]
# FFT
env_signal = np.fft.fft(time_signal_ext)
env_signals1 = np.fft.fft(time_signals1_ext)
env_signals2 = np.fft.fft(time_signals2_ext)
env_signals3 = np.fft.fft(time_signals3_ext)
env_signals4 = np.fft.fft(time_signals4_ext)
env_signals5 = np.fft.fft(time_signals5_ext)
env_signals6 = np.fft.fft(time_signals6_ext)
filter_h = np.fft.fft(filter_h)
#Teorema de Convolucion
K=1#/480*(0.515/0.55)
env_signal = np.fft.ifft(K*env_signal*filter_h)
env_signals1 = np.fft.ifft(K*env_signals1*filter_h)
env_signals2 = np.fft.ifft(K*env_signals2*filter_h)
env_signals3 = np.fft.ifft(K*env_signals3*filter_h)
env_signals4 = np.fft.ifft(K*env_signals4*filter_h)
env_signals5 = np.fft.ifft(K*env_signals5*filter_h)
env_signals6 = np.fft.ifft(K*env_signals6*filter_h)
# Diferenciacion
diferenciador = np.array(([1,-1]))
diff_signal = np.convolve(env_signal,diferenciador)
diff_signals1 = np.convolve(env_signals1,diferenciador)
diff_signals2 = np.convolve(env_signals2,diferenciador)
diff_signals3 = np.convolve(env_signals3,diferenciador)
diff_signals4 = np.convolve(env_signals4,diferenciador)
diff_signals5 = np.convolve(env_signals5,diferenciador)
diff_signals6 = np.convolve(env_signals6,diferenciador)
print(len(diff_signal))
#Rectificacion de Media Onda
for i in range(len(diff_signal)):
if(diff_signal[i]<0):
diff_rect_signal[i] = 0
else:
diff_rect_signal[i] = diff_signal[i]
if(diff_signals1[i]<0):
diff_rect_signals1[i] = 0
else:
diff_rect_signals1[i] = diff_signals1[i]
if(diff_signals2[i]<0):
diff_rect_signals2[i] = 0
else:
diff_rect_signals2[i] = diff_signals2[i]
if(diff_signals3[i]<0):
diff_rect_signals3[i] = 0
else:
diff_rect_signals3[i] = diff_signals3[i]
if(diff_rect_signals3[i]<0):
diff_rect_signals3[i] = 0
if(diff_signals4[i]<0):
diff_rect_signals4[i] = 0
else:
diff_rect_signals4[i] = diff_signals4[i]
if(diff_signals5[i]<0):
diff_rect_signals5[i] = 0
else:
diff_rect_signals5[i] = diff_signals5[i]
if(diff_signals6[i]<0):
diff_rect_signals6[i] = 0
else:
diff_rect_signals6[i] = diff_signals6[i]
#M-1 primera convolucion, pero la derivada es una convolucion a 2 por lo que....no requiero de 1
extra_t = np.arange(N,N+M,1)
extra_t = extra_t/fs
tData = np.concatenate((tData,extra_t))
#FIGURA 4
fig4 = plt.figure ( 3 )
fig4.suptitle('Paso 5: Dericada de la Envolvente de la senial', fontsize=16)
# IFFT ORIGINAL DE LA SIGNAL
dif = fig4.add_subplot ( 3,1,1 )
dif.set_title("Senial Original Envolvente Derivada",rotation=0,fontsize=10,va="center")
difLn, = plt.plot( tData,diff_signal,'b-o',linewidth=4,alpha=0.5,label="time inverse")
dif.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
dif.grid( True )
# IFFT SEGMENTADA DE LA SIGNAL
dif1 = fig4.add_subplot ( 3,3,4 )
dif1.set_title("Senial Segmentada1 Envolvente Derivada",rotation=0,fontsize=10,va="center")
dif1Ln, = plt.plot( tData,diff_signals1,'r',linewidth = 5 , label="time_s 1" )
dif1.set_xlim ( 0.5 ,0.52 )
dif1.grid( True )
dif2 = fig4.add_subplot ( 3,3,5 )
dif2.set_title("Senial Segmentada2 Envolvente Derivada",rotation=0,fontsize=10,va="center")
dif2Ln, = plt.plot( tData,diff_signals2,'g',linewidth = 5 , label="time_s 2" )
dif2.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
dif2.grid( True )
dif3 = fig4.add_subplot ( 3,3,6 )
dif3.set_title("Senial Segmentada3 Envolvente Derivada",rotation=0,fontsize=10,va="center")
dif3Ln, = plt.plot( tData,diff_signals3,'b',linewidth = 5 , label="time_s 3" )
dif3.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
dif3.grid( True )
dif4 = fig4.add_subplot ( 3,3,7 )
dif4.set_title("Senial Segmentada4 Envolvente Derivada",rotation=0,fontsize=10,va="center")
dif4Ln, = plt.plot( tData,diff_signals4,'c',linewidth = 5 , label="time_s 4" )
dif4.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
dif4.grid( True )
dif5 = fig4.add_subplot ( 3,3,8 )
dif5.set_title("Senial Segmentada5 Envolvente Derivada",rotation=0,fontsize=10,va="center")
dif5Ln, = plt.plot( tData,diff_signals5,'m',linewidth = 5 , label="time_s 5" )
dif5.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
dif5.grid( True )
dif6 = fig4.add_subplot ( 3,3,9 )
dif6.set_title("Senial Segmentada6 Envolvente Derivada",rotation=0,fontsize=10,va="center")
dif6Ln, = plt.plot( tData,diff_signals6,'k',linewidth = 5 , label="time_s 6" )
dif6.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
dif6.grid( True )
#FIGURA 3
fig3 = plt.figure ( 4 )
fig3.suptitle('Paso 6: Rectificacion de la derivada de la senial', fontsize=16)
# ENV ORIGINAL DE LA SIGNAL
origenv = fig3.add_subplot ( 3,1,1 )
origenv.set_title("Diff-Rect Senial Original",rotation=0,fontsize=10,va="center")
origenvLn, = plt.plot( tData,diff_rect_signal,'b-o',linewidth=4,alpha=0.5,label="time inverse")
origenv.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
origenv.grid( True )
# ENV SEGMENTADA DE LA SIGNAL
envAxe1 = fig3.add_subplot ( 3,3,4 )
envAxe1.set_title("Diff-Rect Senial Segmentada1",rotation=0,fontsize=10,va="center")
envs1Ln, = plt.plot( tData,diff_rect_signals1,'r',linewidth = 5 , label="time_s 1" )
envAxe1.set_xlim ( 0.5 ,0.52 )
envAxe1.grid( True )
envAxe2 = fig3.add_subplot ( 3,3,5 )
envAxe2.set_title("Diff-Rect Senial Segmentada2",rotation=0,fontsize=10,va="center")
envs2Ln, = plt.plot( tData,diff_rect_signals2,'g',linewidth = 5 , label="time_s 2" )
envAxe2.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
envAxe2.grid( True )
envAxe3 = fig3.add_subplot ( 3,3,6 )
envAxe3.set_title("Diff-Rect Senial Segmentada3",rotation=0,fontsize=10,va="center")
envs3Ln, = plt.plot( tData,diff_rect_signals3,'b',linewidth = 5 , label="time_s 3" )
envAxe3.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
envAxe3.grid( True )
envAxe4 = fig3.add_subplot ( 3,3,7 )
origenv.set_title("Diff-Rect Senial Segmentada4",rotation=0,fontsize=10,va="center")
envs4Ln, = plt.plot( tData,diff_rect_signals4,'c',linewidth = 5 , label="time_s 4" )
envAxe4.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
envAxe4.grid( True )
envAxe5 = fig3.add_subplot ( 3,3,8 )
origenv.set_title("Diff-Rect Senial Segmentada5",rotation=0,fontsize=10,va="center")
envs5Ln, = plt.plot( tData,diff_rect_signals5,'m',linewidth = 5 , label="time_s 5" )
envAxe5.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
envAxe5.grid( True )
envAxe6 = fig3.add_subplot ( 3,3,9 )
origenv.set_title("Diff-Rect Senial Segmentada6",rotation=0,fontsize=10,va="center")
envs6Ln, = plt.plot( tData,diff_rect_signals6,'k',linewidth = 5 , label="time_s 6" )
envAxe6.set_xlim ( ((M)/2)/fs ,(N+(M)/2)/fs )
envAxe6.grid( True )
plt.show()
"""
#GRAFICAS
fig = plt.figure ( 1 )
fig.suptitle('Paso 1: Segmentacion de la senial', fontsize=16)
# FFT ORIGINAL DE LA SIGNAL
origfft = fig.add_subplot ( 2,1,1 )
origfftLn, = plt.plot( fData,fftDataGraph,'b-o',linewidth=4,alpha=0.5,label="original")
origfft.grid( True )
n_data = int(N/2)
print(n_data)
print(fData[:n_data])
# FFT SEGMENTADA DE LA SIGNAL
fftAxe = fig.add_subplot ( 2,1,2 )
ffts1Ln, = plt.plot( fData[:n_data],np.abs(1/N*segmented_signals1[:n_data])**2,'r',linewidth = 5 , label="cutoff 1" )
ffts2Ln, = plt.plot( fData[:n_data],np.abs(1/N*segmented_signals2[:n_data])**2,'g',linewidth = 5 , label="cutoff 2" )
ffts3Ln, = plt.plot( fData[:n_data],np.abs(1/N*segmented_signals3[:n_data])**2,'b',linewidth = 5 , label="cutoff 3" )
ffts4Ln, = plt.plot( fData[:n_data],np.abs(1/N*segmented_signals4[:n_data])**2,'c',linewidth = 5 , label="cutoff 4" )
ffts5Ln, = plt.plot( fData[:n_data],np.abs(1/N*segmented_signals5[:n_data])**2,'m',linewidth = 5 , label="cutoff 5" )
ffts6Ln, = plt.plot( fData[:n_data],np.abs(1/N*segmented_signals6[:n_data])**2,'k',linewidth = 5 , label="cutoff 6" )
fftAxe.grid( True )
#FIGURA 2
fig2 = plt.figure ( 2 )
fig2.suptitle('Paso 2: Descomposicion de la senial', fontsize=16)
# IFFT ORIGINAL DE LA SIGNAL
origifft = fig2.add_subplot ( 3,1,1 )
origifft.set_title("Senial Original IFFT",rotation=0,fontsize=10,va="center")
origifftLn, = plt.plot( tData,time_signal,'b-o',linewidth=4,alpha=0.5,label="time inverse")
origifftLn1, = plt.plot( tData,time_signals6,'k',linewidth=1,label="time_s 6" )
origifft.set_xlim ( 0 ,0.06 )
origifft.grid( True )
# IFFT SEGMENTADA DE LA SIGNAL
ifftAxe1 = fig2.add_subplot ( 3,3,4 )
ifftAxe1.set_title("Senial Segmentada1 IFFT",rotation=0,fontsize=10,va="center")
iffts1Ln, = plt.plot( tData,time_signals1,'r',linewidth = 5 , label="time_s 1" )
ifftAxe1.set_xlim ( 0 ,0.02 )
ifftAxe1.grid( True )
ifftAxe2 = fig2.add_subplot ( 3,3,5 )
ifftAxe2.set_title("Senial Segmentada2 IFFT",rotation=0,fontsize=10,va="center")
iffts2Ln, = plt.plot( tData,time_signals2,'g',linewidth = 5 , label="time_s 2" )
ifftAxe2.set_xlim ( 0 ,0.02 )
ifftAxe2.grid( True )
ifftAxe3 = fig2.add_subplot ( 3,3,6 )
ifftAxe3.set_title("Senial Segmentada3 IFFT",rotation=0,fontsize=10,va="center")
iffts3Ln, = plt.plot( tData,time_signals3,'b',linewidth = 5 , label="time_s 3" )
ifftAxe3.set_xlim ( 0 ,0.02 )
ifftAxe3.grid( True )
ifftAxe4 = fig2.add_subplot ( 3,3,7 )
ifftAxe4.set_title("Senial Segmentada4 IFFT",rotation=0,fontsize=10,va="center")
iffts4Ln, = plt.plot( tData,time_signals4,'c',linewidth = 5 , label="time_s 4" )
ifftAxe4.set_xlim ( 0 ,0.02 )
ifftAxe4.grid( True )
ifftAxe5 = fig2.add_subplot ( 3,3,8 )
ifftAxe5.set_title("Senial Segmentada5 IFFT",rotation=0,fontsize=10,va="center")
iffts5Ln, = plt.plot( tData,time_signals5,'m',linewidth = 5 , label="time_s 5" )
ifftAxe5.set_xlim ( 0 ,0.02 )
ifftAxe5.grid( True )
ifftAxe6 = fig2.add_subplot ( 3,3,9 )
ifftAxe6.set_title("Senial Segmentada6 IFFT",rotation=0,fontsize=10,va="center")
iffts6Ln, = plt.plot( tData,time_signals6,'k',linewidth = 5 , label="time_s 6" )
ifftAxe6.set_xlim ( 0 ,0.02 )
ifftAxe6.grid( True )
extra_t = np.arange(N,N+M-1,1)
extra_t = extra_t/fs
tData = np.concatenate((tData,extra_t))
#FIGURA 3
fig3 = plt.figure ( 3 )
fig3.suptitle('Paso 2: Envolvente de la senial', fontsize=16)
# ENV ORIGINAL DE LA SIGNAL
origenv = fig3.add_subplot ( 3,1,1 )
origenv.set_title("Envolvente Senial Original",rotation=0,fontsize=10,va="center")
difLn, = plt.plot( tData,time_signal_ext,'y',linewidth=4,alpha=0.2,label="time inverse")
origenvLn, = plt.plot( tData,np.real(env_signal),'b-o',linewidth=4,alpha=0.5,label="time inverse")
origenv.set_xlim ( ((M-1)/2)/fs ,(N+(M-1)/2)/fs )
origenv.grid( True )
# ENV SEGMENTADA DE LA SIGNAL
envAxe1 = fig3.add_subplot ( 3,3,4 )
envAxe1.set_title("Envolvente Senial Segmentada1",rotation=0,fontsize=10,va="center")
envs1Ln, = plt.plot( tData,np.real(env_signals1),'r',linewidth = 5 , label="time_s 1" )
dif1Ln, = plt.plot( tData,time_signals1_ext,'b',linewidth = 1 ,alpha=0.2, label="time_s 1" )
envAxe1.set_xlim ( ((M-1)/2)/fs ,(N+(M-1)/2)/fs )
#envAxe1.set_xlim ( 0 ,0.02 )
envAxe1.grid( True )
envAxe2 = fig3.add_subplot ( 3,3,5 )
envAxe2.set_title("Envolvente Senial Segmentada2",rotation=0,fontsize=10,va="center")
envs2Ln, = plt.plot( tData,np.real(env_signals2),'g',linewidth = 5 , label="time_s 2" )
#envAxe2.set_xlim ( 0 ,0.02 )
envAxe2.grid( True )
envAxe3 = fig3.add_subplot ( 3,3,6 )
envAxe3.set_title("Envolvente Senial Segmentada3",rotation=0,fontsize=10,va="center")
envs3Ln, = plt.plot( tData,np.real(env_signals3),'b',linewidth = 5 , label="time_s 3" )
#envAxe3.set_xlim ( 0 ,0.02 )
envAxe3.grid( True )
envAxe4 = fig3.add_subplot ( 3,3,7 )
origenv.set_title("Envolvente Senial Segmentada4",rotation=0,fontsize=10,va="center")
envs4Ln, = plt.plot( tData,np.real(env_signals4),'c',linewidth = 5 , label="time_s 4" )
#envAxe4.set_xlim ( 0 ,0.02 )
envAxe4.grid( True )
envAxe5 = fig3.add_subplot ( 3,3,8 )
origenv.set_title("Envolvente Senial Segmentada5",rotation=0,fontsize=10,va="center")
envs5Ln, = plt.plot( tData,np.real(env_signals5),'m',linewidth = 5 , label="time_s 5" )
#envAxe5.set_xlim ( 0 ,0.02 )
envAxe5.grid( True )
envAxe6 = fig3.add_subplot ( 3,3,9 )
origenv.set_title("Envolvente Senial Segmentada6",rotation=0,fontsize=10,va="center")
envs6Ln, = plt.plot( tData,np.real(env_signals6),'k',linewidth = 5 , label="time_s 6" )
#envAxe6.set_xlim ( 0 ,0.02 )
envAxe6.grid( True )
#FIGURA 4
fig4 = plt.figure ( 4 )
fig4.suptitle('Paso 2: Rectificacion de Onda Completa de la senial', fontsize=16)
# IFFT ORIGINAL DE LA SIGNAL
dif = fig4.add_subplot ( 3,1,1 )
dif.set_title("Senial Original IFFT",rotation=0,fontsize=10,va="center")
difLn, = plt.plot( tData,time_signal_ext,'b-o',linewidth=4,alpha=0.5,label="time inverse")
dif.set_xlim ( 0 ,0.06 )
dif.grid( True )
# IFFT SEGMENTADA DE LA SIGNAL
dif1 = fig4.add_subplot ( 3,3,4 )
dif1.set_title("Senial Segmentada1 IFFT",rotation=0,fontsize=10,va="center")
dif1Ln, = plt.plot( tData,time_signals1_ext,'r',linewidth = 5 , label="time_s 1" )
dif1.set_xlim ( 0 ,0.02 )
dif1.grid( True )
dif2 = fig4.add_subplot ( 3,3,5 )
dif2.set_title("Senial Segmentada2 IFFT",rotation=0,fontsize=10,va="center")
dif2Ln, = plt.plot( tData,time_signals2_ext,'g',linewidth = 5 , label="time_s 2" )
dif2.set_xlim ( 0 ,0.02 )
dif2.grid( True )
dif3 = fig4.add_subplot ( 3,3,6 )
dif3.set_title("Senial Segmentada3 IFFT",rotation=0,fontsize=10,va="center")
dif3Ln, = plt.plot( tData,time_signals3_ext,'b',linewidth = 5 , label="time_s 3" )
dif3.set_xlim ( 0 ,0.02 )
dif3.grid( True )
dif4 = fig4.add_subplot ( 3,3,7 )
dif4.set_title("Senial Segmentada4 IFFT",rotation=0,fontsize=10,va="center")
dif4Ln, = plt.plot( tData,time_signals4_ext,'c',linewidth = 5 , label="time_s 4" )
dif4.set_xlim ( 0 ,0.02 )
dif4.grid( True )
dif5 = fig4.add_subplot ( 3,3,8 )
dif5.set_title("Senial Segmentada5 IFFT",rotation=0,fontsize=10,va="center")
dif5Ln, = plt.plot( tData,time_signals5_ext,'m',linewidth = 5 , label="time_s 5" )
dif5.set_xlim ( 0 ,0.02 )
dif5.grid( True )
dif6 = fig4.add_subplot ( 3,3,9 )
dif6.set_title("Senial Segmentada6 IFFT",rotation=0,fontsize=10,va="center")
dif6Ln, = plt.plot( tData,time_signals6_ext,'k',linewidth = 5 , label="time_s 6" )
dif6.set_xlim ( 0 ,0.02 )
dif6.grid( True )
plt.show()
"""
|
[
"damf618@gmail.com"
] |
damf618@gmail.com
|
d48f15e50dc1a339fcf4479a824f50b05d54cc8e
|
1415fa90c4d86e76d76ead544206d73dd2617f8b
|
/venv/Lib/site-packages/direct/leveleditor/LayerEditorUI.py
|
d77f8779bb6d51e938dbd433de3cace14d3e9688
|
[
"MIT"
] |
permissive
|
Darpra27/Juego-senales
|
84ea55aea7c61308ec1821dac9f5a29d2e0d75de
|
e94bc819e05eff1e0126c094d21ae1ec2a1ef46d
|
refs/heads/main
| 2023-04-04T07:27:53.878785
| 2021-04-09T00:00:44
| 2021-04-09T00:00:44
| 353,472,016
| 0
| 1
|
MIT
| 2021-04-09T00:04:31
| 2021-03-31T19:46:14
|
Python
|
UTF-8
|
Python
| false
| false
| 9,507
|
py
|
"""
Defines Layer UI
"""
import wx
from pandac.PandaModules import *
from . import ObjectGlobals as OG
class LayerEditorUI(wx.Panel):
def __init__(self, parent, editor):
wx.Panel.__init__(self, parent)
self.editor = editor
self.editorTxt = "Layer Editor"
self.saveData = []
self.layersDataDict = dict()
self.layersDataDictNextKey = 0
self.systemLayerKeys = []
self.llist = wx.ListCtrl(self, -1, style=wx.LC_REPORT|wx.LC_EDIT_LABELS|wx.LC_NO_HEADER)
self.llist.InsertColumn(0, "Layers")
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.llist, 1, wx.EXPAND, 0)
self.SetSizer(sizer); self.Layout()
parentSizer = wx.BoxSizer(wx.VERTICAL)
parentSizer.Add(self, 1, wx.EXPAND, 0)
parent.SetSizer(parentSizer); parent.Layout()
self.opAdd = "Add Layer"
self.opDelete = "Delete Layer"
self.opRename = "Rename Layer"
self.opAddObj = "Add Selected Object"
self.opRemoveObj = "Remove Selected Object"
self.opShowObj = "Show Layer Objects"
self.opHideObj = "Hide Layer Objects"
self.menuItemsGen = list()
self.menuItemsGen.append(self.opAdd)
#self.menuItems.append(self.opRename)
self.menuItemsObj = list()
self.menuItemsObj.append(self.opAddObj)
self.menuItemsObj.append(self.opRemoveObj)
self.menuItemsObj.append(self.opShowObj)
self.menuItemsObj.append(self.opHideObj)
self.menuItemsObj.append(self.opDelete)
self.popupmenu = wx.Menu()
for item in self.menuItemsGen:
menuItem = self.popupmenu.Append(-1, item)
self.Bind(wx.EVT_MENU, self.onPopupItemSelected, menuItem)
self.Bind(wx.EVT_CONTEXT_MENU, self.onShowPopup)
self.llist.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.onShowMembers)
def menuAppendGenItems(self):
for item in self.menuItemsGen:
menuItem = self.popupmenu.Append(-1, item)
self.Bind(wx.EVT_MENU, self.onPopupItemSelected, menuItem)
def menuAppendObjItems(self, hitItem):
for item in self.menuItemsObj:
if hitItem in self.systemLayerKeys:
if item in [self.opRemoveObj, self.opDelete, self.opAddObj]:
continue
menuItem = self.popupmenu.Append(-1, item)
self.Bind(wx.EVT_MENU, self.onPopupItemSelected, menuItem)
def onShowPopup(self, event):
pos = event.GetPosition()
pos = self.ScreenToClient(pos)
for menuItem in self.popupmenu.GetMenuItems():
self.popupmenu.RemoveItem(menuItem)
#import pdb;set_trace()
hitItem, flags = self.llist.HitTest(pos)
if hitItem == -1:
self.menuAppendGenItems()
else:
self.menuAppendObjItems(hitItem)
self.PopupMenu(self.popupmenu, pos)
def onPopupItemSelected(self, event):
menuItem = self.popupmenu.FindItemById(event.GetId())
text = menuItem.GetText()
if text == self.opAddObj:
self.addObj()
elif text == self.opRemoveObj:
self.removeObj()
elif text == self.opShowObj:
self.HideObj(False)
elif text == self.opHideObj:
self.HideObj(True)
elif text == self.opAdd:
self.addLayer()
elif text == self.opDelete:
self.deleteLayer()
elif text == self.opRename:
self.renameLayer()
else:
wx.MessageBox("You selected item '%s'" % text)
def reset(self):
#import pdb;set_trace()
self.layersDataDict.clear()
self.layersDataDictNextKey = 0
self.llist.DeleteAllItems()
self.systemLayerKeys = []
def findLabel(self, text):
found = False
for index in range(self.llist.GetItemCount()):
itemtext = self.llist.GetItemText(index)
if itemtext == text:
return True
return found
def addLayerData(self, idx, objUID):
self.removeObjData(objUID)
layerData = self.layersDataDict[idx]
layerData.append(objUID)
def addLayerEntry(self, name, idx):
index = self.llist.InsertStringItem(self.llist.GetItemCount(), name)
self.llist.SetItemData(index, idx)
layersData = list()
self.layersDataDict[idx] = layersData
if idx > self.layersDataDictNextKey:
self.layersDataDictNextKey = idx
def addLayer(self):
#import pdb;set_trace()
count = self.llist.GetItemCount()
i = 1
text = "Layer%s"%(count + i)
found = self.findLabel(text)
while found:
i = i + 1
text = "Layer%s"%(count + i)
found = self.findLabel(text)
self.layersDataDictNextKey = self.layersDataDictNextKey + 1
self.addLayerEntry(text, self.layersDataDictNextKey)
def deleteLayer(self):
index = self.llist.GetFirstSelected()
if index != -1:
key = self.llist.GetItemData(index)
del(self.layersDataDict[key])
item = self.llist.DeleteItem(index)
def renameLayer(self):
index = self.llist.GetFirstSelected()
if index != -1:
self.llist.SetItemState(index, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
self.llist.SetItemState(index, wx.LIST_STATE_FOCUSED, wx.LIST_STATE_FOCUSED)
def removeObjData(self, objUID):
layersDataDictKeys = list(self.layersDataDict.keys())
for i in range(len(layersDataDictKeys)):
layersData = self.layersDataDict[layersDataDictKeys[i]]
for j in range(len(layersData)):
if layersData[j] == objUID:
del(layersData[j])
def removeObj(self):
objNodePath = base.direct.selected.last
if objNodePath is None:
wx.MessageBox("No object was selected.", self.editorTxt, wx.OK|wx.ICON_EXCLAMATION)
return
obj = self.editor.objectMgr.findObjectByNodePath(objNodePath)
if obj is not None:
self.removeObjData(obj[OG.OBJ_UID])
def addObj(self):
index = self.llist.GetFirstSelected()
if index == -1:
wx.MessageBox("No layer was selected.", self.editorTxt, wx.OK|wx.ICON_EXCLAMATION)
return
objNodePath = base.direct.selected.last
if objNodePath is None:
wx.MessageBox("No object was selected.", self.editorTxt, wx.OK|wx.ICON_EXCLAMATION)
return
# Checking if the object was laready added to the layer
obj = self.editor.objectMgr.findObjectByNodePath(objNodePath)
if obj is not None:
i = self.llist.GetItemData(index)
layersData = self.layersDataDict[i]
for j in range(len(layersData)):
if layersData[j] == obj[OG.OBJ_UID]:
wx.MessageBox("Selected object already is this layer", self.editorTxt, wx.OK|wx.ICON_EXCLAMATION)
return
# Looking for the object in the other layers
# If the object is found - delete it.
self.removeObj()
layersData.append(obj[OG.OBJ_UID])
def onShowMembers(self, event):
item = event.GetItem()
layerMembers = list()
layerName = item.GetText()
key = item.GetData()
layerData = self.layersDataDict[key]
for i in range(len(layerData)):
obj = self.editor.objectMgr.findObjectById(layerData[i])
namestr = "%s_%s"%(obj[OG.OBJ_DEF].name, obj[OG.OBJ_UID])
layerMembers.append(namestr)
dialog = wx.SingleChoiceDialog(None, layerName, self.editorTxt, layerMembers)
if dialog.ShowModal() == wx.ID_OK:
#do something here
dialog.GetStringSelection()
dialog.Destroy()
def HideObj(self, hide):
index = self.llist.GetFirstSelected()
if index == -1:
wx.MessageBox("No layer was selected.", self.editorTxt, wx.OK|wx.ICON_EXCLAMATION)
return
key = self.llist.GetItemData(index)
layerData = self.layersDataDict[key]
if len(layerData) == 0:
return
for i in range(len(layerData)):
obj = self.editor.objectMgr.findObjectById(layerData[i])
if hide:
obj[OG.OBJ_NP].hide()
else:
obj[OG.OBJ_NP].show()
font = wx.Font
font = self.llist.GetItemFont(index)
if hide:
font.SetWeight(wx.FONTWEIGHT_BOLD)
else:
font.SetWeight(wx.FONTWEIGHT_NORMAL)
self.llist.SetItemFont(index, font)
def traverse(self):
self.saveData.append("\nif hasattr(base, 'le'):")
self.saveData.append(" ui.layerEditorUI.reset()")
for index in range(self.llist.GetItemCount()):
self.saveData.append(" ui.layerEditorUI.addLayerEntry('%s', %s )"%(self.llist.GetItemText(index), self.llist.GetItemData(index)))
layersDataDictKeys = list(self.layersDataDict.keys())
for i in range(len(layersDataDictKeys)):
layerData = self.layersDataDict[layersDataDictKeys[i]]
for j in range(len(layerData)):
self.saveData.append(" ui.layerEditorUI.addLayerData(%s, '%s')"%(layersDataDictKeys[i], layerData[j]))
def getSaveData(self):
self.saveData = []
self.traverse()
return self.saveData
|
[
"daviricado08@gmail.com"
] |
daviricado08@gmail.com
|
d2e128c4caf17377cb2158dc6a4f3971f2be5d87
|
0bcdf50443ce11216b2ca7651e18bba1fb27542d
|
/algorithm/BAEKJOON/14889스타트와링크.py
|
582433a58ac3dbf2f37ed4dfd1bf752b9464e5fb
|
[] |
no_license
|
qqyurr/TIL
|
d4bdf75bbec8c4140d8d898dfba4823de3bf3ce0
|
efb689c2dcaaf1cfc1ff429e29fdb7294a770d2a
|
refs/heads/master
| 2023-07-19T11:48:56.732651
| 2021-09-07T09:10:29
| 2021-09-07T09:10:29
| 280,100,189
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 830
|
py
|
from itertools import combinations
import sys
sys.stdin = open('14889.txt')
N = int(input())
BRD = [list(map(int, input().split())) for _ in range(N)]
members = [i for i in range(N)]
team = []
# n명을 2팀으로 나눈다 -> 조합 필요
for i in list(combinations(members, N//2)):
team.append(i)
# print(team)
min_num = 10000000
for i in range(len(team)//2):
start = team[i]
# print(start)
add_start = 0
for j in range(N//2):
one = start[j]
for m in start:
add_start += BRD[one][m]
# print(add_start)
link = team[-1-i]
add_link = 0
for j in range(N//2):
one = link[j]
for m in link:
add_link += BRD[one][m]
# print(add_link)
if min_num > (abs(add_start - add_link)):
min_num = abs(add_start-add_link)
print(min_num)
|
[
"byyulli16@gmail.com"
] |
byyulli16@gmail.com
|
b2d9331b42f37ba0e758f1c0f3d3289f23cad210
|
6cd1ee8d95ad5b99d3c41c31235bd1a2b5fa6741
|
/REST/views.py
|
cce0a822eb6bea1ab7f024289bf2ee23ab6f7973
|
[] |
no_license
|
ZohanHo/bloodhound
|
bd21ae1d167385d67234bc161d169e5920b5a72e
|
475d17bc3d86cc9a730038300b9133d743288298
|
refs/heads/master
| 2022-12-14T00:10:20.151756
| 2019-04-21T21:58:45
| 2019-04-21T21:58:45
| 176,072,576
| 0
| 0
| null | 2022-12-08T01:44:30
| 2019-03-17T08:02:04
|
Python
|
UTF-8
|
Python
| false
| false
| 1,008
|
py
|
from django.contrib.auth.models import User
from base.models import Contact
from .serializers import UserSerializer, ContactSerializer
from rest_framework.response import Response
from rest_framework import status, viewsets
from rest_framework.decorators import action
#Для доступа и авторизации
from rest_framework.permissions import IsAdminUser, IsAuthenticated, AllowAny
#Представление которое описывает наш API
class ContactViewSet(viewsets.ModelViewSet):
#lookup_field = "pk"
queryset = Contact.objects.all() #.order_by('phone')
serializer_class = ContactSerializer
#permission_classes = (IsAdminUser,)
# Представление которое описывает наш API
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
#permission_classes = (AllowAny,)
#lookup_field = "pk"
def get_queryset(self):
return User.objects.order_by("-pk")
|
[
"serdik.evgeniy@gmail.com"
] |
serdik.evgeniy@gmail.com
|
9d06f72217ec2d55e28afe9e7463b706f9083af0
|
d96890324d0b8f78cea54e473b9916d2f1eb7265
|
/setup.py
|
1a31a7b9c208ad57b7b1fc2048bb070f2e2b7bae
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
jonasrk/spock
|
55720311ca557ae8b25d38cf29c7af8ab46c117a
|
b50a5843fe8b91baf0fe34f944c9a7c478207532
|
refs/heads/master
| 2021-01-18T08:34:26.267485
| 2013-08-03T13:36:37
| 2013-08-03T13:36:37
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 136
|
py
|
from distutils.core import setup
setup (
name='spock',
version='0.20',
packages=['spock', 'spock.mcp', 'spock.net',],
)
|
[
"nickelpro@gmail.com"
] |
nickelpro@gmail.com
|
a86211de4ee94a929e61f93a9d1f3b2c9942315c
|
22161b04bbae0336f472513bbe0954d0f00ecc47
|
/setup.py
|
ffe8f6fef18d688b72d4d4f47359ffe700c486ea
|
[
"MIT"
] |
permissive
|
aleosd/confj
|
fc642e454be78d5fa19acae4ef5774b98c7e3b9b
|
d4d5bdc4a0c616fcebf5280503e74d8fdbd4b859
|
refs/heads/develop
| 2020-05-16T01:26:04.128098
| 2020-04-12T12:12:24
| 2020-04-12T12:12:24
| 182,602,068
| 1
| 1
|
MIT
| 2019-08-03T03:27:35
| 2019-04-22T01:27:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,583
|
py
|
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='confj',
version='0.2.4',
description='Load configs from json files',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/aleosd/confj',
author='Alekey Osadchuk',
author_email='osdalex@gmail.com',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords='json configs loader',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
python_requires='>=3.5, <4',
extras_require={
'validation': ['jsonschema'],
'aws': ['boto3'],
'test': ['pytest'],
},
project_urls={
'Bug Reports': 'https://github.com/aleosd/confj/issues',
'Say Thanks!': 'https://saythanks.io/to/aleosd',
'Source': 'https://github.com/aleosd/confj/',
},
)
|
[
"osdalex@gmail.com"
] |
osdalex@gmail.com
|
79a05ceea872187af239174ca0489743e3de39ff
|
9b1446b26e81a79c303f9799fb6a91785c7adb03
|
/.history/Code/histogram_20200120000147.py
|
72a6faadf900ceb40b16a8138e79bdf8aa3c6d00
|
[] |
no_license
|
SamirIngley/CS1.2-Tweet-Gen
|
017ea15b1113881a156ff24682828bc654eb6c81
|
bcd95fa63e05849cbf8e36230d8e31032b99daaa
|
refs/heads/master
| 2020-12-14T20:19:57.733290
| 2020-08-04T23:19:23
| 2020-08-04T23:19:23
| 234,856,234
| 0
| 0
| null | 2020-06-05T21:13:04
| 2020-01-19T07:05:55
|
Python
|
UTF-8
|
Python
| false
| false
| 311
|
py
|
def list_histo(source):
histo = []
text = source.split()
for word in text:
if word in histo:
word[0] += 1
else:
histo.append([1, word])
return histo
if __name__ == '__main__'
source = 'one fish two fish red fish blue fish'
histogram(source)
|
[
"samir.ingle7@gmail.com"
] |
samir.ingle7@gmail.com
|
1a95fedf8e6f2af300f9dad269b4c9acdf8206d5
|
9be6b980d3d1c3cddcef3a16c196c2d868f83434
|
/dummy/mysite/tutor4u/apps.py
|
738608ac4c711c8b80588f017a8352b9762d95ed
|
[] |
no_license
|
davidhzheng1996/Tutor4u
|
8e55a3418e68ba53c48de21cd2a9aaeae7d04b27
|
797d8dccc5fbfd922b20198d51a27f8626d61435
|
refs/heads/master
| 2021-05-03T11:03:32.242287
| 2018-02-07T00:49:18
| 2018-02-07T00:49:18
| 120,542,270
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
from __future__ import unicode_literals
from django.apps import AppConfig
class Tutor4UConfig(AppConfig):
name = 'tutor4u'
|
[
"31869047+davidhzheng1996@users.noreply.github.com"
] |
31869047+davidhzheng1996@users.noreply.github.com
|
28db67f9597fe79f0b28b3f9da2ec34ad2a65175
|
123cff743e613412524e43d1207c6689a8b0a6d4
|
/tests/test_user_facing.py
|
2ca2b5d9b1e3ed6645b7f815adc4604e2ad65e7f
|
[
"Apache-2.0"
] |
permissive
|
FrailLeon/CTFd
|
fbed8e028609abe98fb655d33ecc64731a31bace
|
967129a00982b9a983559c63188f8ccca0f54ff3
|
refs/heads/master
| 2020-05-20T22:28:17.079017
| 2017-03-10T06:17:17
| 2017-03-10T06:17:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,111
|
py
|
from helpers import create_ctfd, register_user, login_as_user
from CTFd.models import Teams
def test_index():
"""Does the index page return a 200 by default"""
app = create_ctfd()
with app.app_context():
with app.test_client() as client:
r = client.get('/')
assert r.status_code == 200
def test_register_user():
"""Can a user can be registered"""
app = create_ctfd()
with app.app_context():
register_user(app)
team_count = app.db.session.query(app.db.func.count(Teams.id)).first()[0]
assert team_count == 2 # There's the admin user and the created user
def test_register_duplicate_teamname():
"""A user shouldn't be able to use and already registered team name"""
app = create_ctfd()
with app.app_context():
register_user(app, name="user1", email="user1@ctfd.io", password="password")
register_user(app, name="user1", email="user2@ctfd.io", password="password")
team_count = app.db.session.query(app.db.func.count(Teams.id)).first()[0]
assert team_count == 2 # There's the admin user and the first created user
def test_register_duplicate_email():
"""A user shouldn't be able to use an already registered email address"""
app = create_ctfd()
with app.app_context():
register_user(app, name="user1", email="user1@ctfd.io", password="password")
register_user(app, name="user2", email="user1@ctfd.io", password="password")
team_count = app.db.session.query(app.db.func.count(Teams.id)).first()[0]
assert team_count == 2 # There's the admin user and the first created user
def test_user_bad_login():
"""A user should not be able to login with an incorrect password"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app, name="user", password="wrong_password")
r = client.get('/profile')
assert r.location.startswith("http://localhost/login") # We got redirected to login
def test_user_login():
"""Can a registered user can login"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/profile')
assert r.location != "http://localhost/login" # We didn't get redirected to login
assert r.status_code == 200
def test_user_isnt_admin():
"""A registered user cannot access admin pages"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/admin/graphs')
assert r.location == "http://localhost/login"
assert r.status_code == 302
def test_user_get_teams():
"""Can a registered user can load /teams"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/teams')
assert r.status_code == 200
def test_user_get_scoreboard():
"""Can a registered user can load /scoreboard"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/scoreboard')
assert r.status_code == 200
def test_user_get_scores():
"""Can a registered user can load /scores"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/scores')
assert r.status_code == 200
def test_user_get_topteams():
"""Can a registered user can load /top/10"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/top/10')
assert r.status_code == 200
def test_user_get_challenges():
"""Can a registered user can load /challenges"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/challenges')
assert r.status_code == 200
def test_user_get_chals():
"""Can a registered user can load /chals"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/chals')
assert r.status_code == 200
def test_user_get_solves_per_chal():
"""Can a registered user can load /chals/solves"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/chals/solves')
assert r.status_code == 200
def test_user_get_solves():
"""Can a registered user can load /solves"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/solves')
assert r.status_code == 200
def test_user_get_team_page():
"""Can a registered user can load their public profile (/team/2)"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/team/2')
assert r.status_code == 200
def test_user_get_profile():
"""Can a registered user can load their private profile (/profile)"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get('/profile')
assert r.status_code == 200
def test_user_get_logout():
"""Can a registered user load /logout"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
client.get('/logout', follow_redirects=True)
r = client.get('/challenges')
assert r.location == "http://localhost/login?next=challenges"
assert r.status_code == 302
def test_user_get_reset_password():
"""Can an unregistered user load /reset_password"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = app.test_client()
r = client.get('/reset_password')
assert r.status_code == 200
|
[
"noreply@github.com"
] |
FrailLeon.noreply@github.com
|
e761790d3403a3377f68042fe6454043160d8708
|
c3e1b190e59f4939a8effc10f10e46db3b945050
|
/scripts/get-store-json.py
|
02aeaf09f08b7b970cca81e502ecdf75aeae7c4e
|
[] |
no_license
|
fwindpeak/oculus-quest-store-query
|
6411ce76f3c897c8de2e6bcab2a2f255da185e20
|
97d68538b54617d765f1492227654baee2edb34c
|
refs/heads/master
| 2023-03-03T03:53:40.868192
| 2020-12-24T02:04:26
| 2020-12-24T02:04:26
| 324,071,302
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 601
|
py
|
import requests
import json
JSON_FILE_PATH="../public/oculus-quest-store.json"
url = "https://graph.oculus.com/graphql?forced_locale=zh_CN"
formData={
"access_token":"OC|1317831034909742|",
'variables':'{"sectionId":"1888816384764129","sortOrder":null,"sectionItemCount":999,"sectionCursor":null,"hmdType":"MONTEREY"}',
"doc_id":"4565044133567732"
}
print('get doc')
doc = requests.post(url,data=formData)
print('save result')
# json.dump(doc.json(),open('oculus.json','w'))
with open(JSON_FILE_PATH,'w') as json_file:
json.dump(doc.json(),json_file,ensure_ascii=False)
print('ok')
|
[
"fwindpeak@gmail.com"
] |
fwindpeak@gmail.com
|
4b56d99ab078dc5174cc5cdaa165aa30466c71d2
|
bd2ddf9e3ed52bdc3e2f21a6d84efc05019cc6b0
|
/Python/chapter2/2-3/hello-str.py
|
7a56f95da7b9a9f42161950cfc0386a5e607b5f4
|
[] |
no_license
|
YosukeKira/main
|
c59bf564e8414ed2c33c914d4df8fb8ff4906c85
|
f4331a18b2b7fc3f987dc8409e73485d2915e550
|
refs/heads/master
| 2023-01-09T19:16:57.641270
| 2020-11-09T03:56:23
| 2020-11-09T03:56:23
| 274,428,553
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 145
|
py
|
# ダブルクォートで文字列を表現
print("hello, Python")
# シングルクォートで文字列を表現
print('Hello, Python')
|
[
"yosuke.kira.12.21@gmail.com"
] |
yosuke.kira.12.21@gmail.com
|
4f5c895c1d3811c30173849dadfd82fac9b9fbfb
|
70c100bb0caf8c97cc18b37cee2e5fa118675c90
|
/sparx/bin/sxgui_cter.py
|
b32feeb7984c2b8a9a882947dd65736f6fe05472
|
[] |
no_license
|
cpsemmens/eman2
|
f45af91a99843eecc1cd8cd098bde4dda62353e1
|
2ea29af857fec774c27ab88b1c325ce7f9d3e491
|
refs/heads/master
| 2021-01-14T11:23:09.838327
| 2016-03-06T19:49:56
| 2016-03-06T19:49:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 95,077
|
py
|
#!/usr/bin/env python
#
# Author: Toshio Moriya, 12/21/2015 (toshio.moriya@mpi-dortmund.mpg.de)
#
# This software is issued under a joint BSD/GNU license. You may use the
# source code in this file under either license. However, note that the
# complete EMAN2 and SPARX software packages have some GPL dependencies,
# so you are responsible for compliance with the licenses of these packages
# if you opt to use BSD licensing. The warranty disclaimer below holds
# in either instance.
#
# This complete copyright notice must be included in any revised version of the
# source code. Additional authorship citations may be added, but existing
# author citations must be preserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
from EMAN2 import *
from EMAN2db import db_open_dict, db_close_dict, db_check_dict, db_list_dicts
from OpenGL import GL,GLUT
from math import *
import os
import sys
# from numpy import array,arange
import traceback
try:
from PyQt4 import QtCore, QtGui, QtOpenGL
from PyQt4.QtCore import Qt
from PyQt4.QtCore import QTimer
from emshape import *
from valslider import *
from emplot2d import EMPlot2DWidget
except:
print "Warning: PyQt4 must be installed"
sys.exit(1)
from sparx import *
from optparse import OptionParser
def main():
progname = os.path.basename(sys.argv[0])
usage = progname + """ cter_ctf_file
This GUI application is designed for the evaluation of micrographs using the parameters outputed by CTER.
"""
parser = OptionParser(usage, version=SPARXVERSION)
# No options!!! Does not need to call parser.add_option()
(options, args) = parser.parse_args(sys.argv[1:])
if len(args) > 2:
print "see usage " + usage
sys.exit()
from emapplication import EMApp
app=EMApp()
cter_ctf_file = None
if len(args) == 1:
cter_ctf_file = args[0]
# else: # Do nothig
gui=SXGuiCter(cter_ctf_file)
gui.show()
try:
# gui.wimage.raise_()
# gui.wfft.raise_()
gui.wplotparam.raise_()
gui.whistparam.raise_()
gui.wplotrotavgcoarse.raise_()
gui.wplotrotavgfine.raise_()
gui.raise_()
except:
print "Recieved unexpected exception in main(): ", sys.exc_info()[0]
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback)
# MRK_NOTE: 2015/12/17 Toshio Moriya
# Another way to print out exception info...
# lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
# print ''.join('!! ' + line for line in lines)
pass
app.execute()
class SXListWidget(QtGui.QListWidget):
"""Exactly like a normal list widget but intercepts a few keyboard events"""
def keyPressEvent(self,event):
if event.key() in (Qt.Key_Up,Qt.Key_Down) :
QtGui.QListWidget.keyPressEvent(self,event)
return
self.emit(QtCore.SIGNAL("keypress"),event)
class SXPlot2DWidget(EMPlot2DWidget):
def full_refresh(self):
'''
This function is called from resizeGL and from the inspector when somebody toggles the display of a line
'''
self.needupd=1
self.del_shapes(("xcross","ycross","lcross","Circle")) # for EMPlot2DInspector
self.del_shapes(("error_astig","error_def","error_ctf")) # for SXGuiCter.wplotrotavgcoarse & SXGuiCter.wplotrotavgfine
# self.del_shapes(("error_astig","error_def")) # for SXGuiCter.wplotrotavgcoarse & SXGuiCter.wplotrotavgfine
# self.del_shapes(("hist_param_shape_value","hist_param_shape_unapply_threshold_lower","hist_param_shape_apply_threshold_lower","hist_param_shape_unapply_threshold_upper", "hist_param_shape_apply_threshold_upper", "hist_param_shape_label")) # for SXGuiCter.whistparam
# self.del_shapes(("hist_param_shape_label")) # for SXGuiCter.whistparam
# self.del_shapes(("plot_param_shape_label")) # for SXGuiCter.wplotparam
def mouseReleaseEvent(self, event):
EMPlot2DWidget.mouseReleaseEvent(self,event)
if event.button()==Qt.LeftButton:
self.emit(QtCore.SIGNAL("mouseup"),event)
class SXGuiCter(QtGui.QWidget):
def __init__(self, cter_ctf_file = None):
"""Implements the CTF fitting dialog using various EMImage and EMPlot2D widgets
'data' is a list of (filename,ctf,im_1d,bg_1d,quality)
'parms' is [box size,ctf,box coord,set of excluded boxnums,quality,oversampling]
"""
try:
from emimage2d import EMImage2DWidget
except:
print "Cannot import EMAN image GUI objects (EMImage2DWidget)"
sys.exit(1)
QtGui.QWidget.__init__(self,None)
self.setWindowIcon(QtGui.QIcon(get_image_directory() + "ctf.png"))
i_enum = -1
i_enum += 1; self.idx_cter_id = i_enum # <extra> entry id
i_enum += 1; self.idx_cter_def = i_enum # defocus (ym)
i_enum += 1; self.idx_cter_cs = i_enum # Cs (mm)
i_enum += 1; self.idx_cter_vol = i_enum # voltage(kV)
i_enum += 1; self.idx_cter_apix = i_enum # pixel size (A)
i_enum += 1; self.idx_cter_bfactor = i_enum # B-factor (A^2)
i_enum += 1; self.idx_cter_ac = i_enum # amp contrast (%)
i_enum += 1; self.idx_cter_astig_amp = i_enum # astigmatism amplitude (um)
i_enum += 1; self.idx_cter_astig_ang = i_enum # astigmatism angle
i_enum += 1; self.idx_cter_sd_def = i_enum # std dev of defocus (um)
i_enum += 1; self.idx_cter_sd_astig_amp = i_enum # std dev of ast amp (A)
i_enum += 1; self.idx_cter_sd_astig_ang = i_enum # std dev of ast angle
i_enum += 1; self.idx_cter_error_def = i_enum # frequency at which signal drops by 50% due to estimated error of defocus alone (1/A)
i_enum += 1; self.idx_cter_error_astig = i_enum # frequency at which signal drops by 50% due to estimated error of defocus and astigmatism (1/A)
i_enum += 1; self.idx_cter_mic_name = i_enum # Micrograph name
i_enum += 1; self.idx_cter_pwrot_name = i_enum # <extra> CTER power spectrum rotational average file name
i_enum += 1; self.idx_cter_box_size = i_enum # <extra> window size used in cter estimation
i_enum += 1; self.idx_cter_error_ctf = i_enum # <extra> limit frequency by CTF error
i_enum += 1; self.idx_cter_max_power = i_enum # <extra> maximum power in experimental rotational average (with astigmatism)
i_enum += 1; self.idx_cter_select = i_enum # <extra> selected state
i_enum += 1; self.n_idx_cter = i_enum
self.n_idx_cter_extra = 6
i_enum = -1
i_enum += 1; self.idx_cter_item_label = i_enum
i_enum += 1; self.idx_cter_item_widget = i_enum
i_enum += 1; self.n_idx_cter_item = i_enum
# Mapping for parameter value items (line edit widgets)
self.value_map_list = [None] * self.n_idx_cter
self.value_map_list[self.idx_cter_id] = ["CTER ID", None]
self.value_map_list[self.idx_cter_def] = ["Defocus", None]
self.value_map_list[self.idx_cter_cs] = ["Cs (mm)", None]
self.value_map_list[self.idx_cter_vol] = ["Voltage (kV)", None]
self.value_map_list[self.idx_cter_apix] = ["A/pix", None]
self.value_map_list[self.idx_cter_bfactor] = ["B factor", None]
self.value_map_list[self.idx_cter_ac] = ["Amp. Contrast", None]
self.value_map_list[self.idx_cter_astig_amp] = ["Astig. Amp.", None]
self.value_map_list[self.idx_cter_astig_ang] = ["Astig. Ang.", None]
self.value_map_list[self.idx_cter_sd_def] = ["Defocus SD", None]
self.value_map_list[self.idx_cter_sd_astig_amp] = ["Astig. Amp. SD", None]
self.value_map_list[self.idx_cter_sd_astig_ang] = ["Astig. Ang. SD", None]
self.value_map_list[self.idx_cter_error_def] = ["Defocus Error", None]
self.value_map_list[self.idx_cter_error_astig] = ["Astig. Error", None]
self.value_map_list[self.idx_cter_mic_name] = ["Micrograph", None]
self.value_map_list[self.idx_cter_pwrot_name] = ["PW. Rot. File", None]
self.value_map_list[self.idx_cter_box_size] = ["CTF Box Size", None]
self.value_map_list[self.idx_cter_error_ctf] = ["CTF Error", None]
self.value_map_list[self.idx_cter_max_power] = ["Max Power", None]
self.value_map_list[self.idx_cter_select] = ["Select", None]
i_enum = -1
i_enum += 1; self.idx_rotinf_cter_id = i_enum # line number == cter id
i_enum += 1; self.idx_rotinf_freq = i_enum # spatial frequency (1/A)
i_enum += 1; self.idx_rotinf_exp_no_astig = i_enum # experimental rotational average (no astigmatism)
i_enum += 1; self.idx_rotinf_fit_no_astig = i_enum # fitted rotational average (no astigmatism)
i_enum += 1; self.idx_rotinf_exp_with_astig = i_enum # experimental rotational average (with astigmatism)
i_enum += 1; self.idx_rotinf_fit_with_astig = i_enum # fitted rotational average (with astigmatism)
i_enum += 1; self.n_idx_rotinf = i_enum
i_enum = -1
i_enum += 1; self.idx_sort_id = i_enum
i_enum += 1; self.idx_sort_def = i_enum
i_enum += 1; self.idx_sort_astig_amp = i_enum
i_enum += 1; self.idx_sort_astig_ang = i_enum
i_enum += 1; self.idx_sort_sd_def = i_enum
i_enum += 1; self.idx_sort_sd_astig_amp = i_enum
i_enum += 1; self.idx_sort_sd_astig_ang = i_enum
i_enum += 1; self.idx_sort_error_def = i_enum
i_enum += 1; self.idx_sort_error_astig = i_enum
i_enum += 1; self.idx_sort_error_ctf = i_enum
i_enum += 1; self.idx_sort_max_power = i_enum
i_enum += 1; self.n_idx_sort = i_enum
i_enum = -1
i_enum += 1; self.idx_sort_item_idx_cter = i_enum
i_enum += 1; self.n_idx_sort_item = i_enum
# Mapping for sorting item (combo box widget)
# Includes mapping from idx_sort to idx_cter
self.sort_map_list = [None] * self.n_idx_sort
self.sort_map_list[self.idx_sort_id] = [self.idx_cter_id]
self.sort_map_list[self.idx_sort_def] = [self.idx_cter_def]
self.sort_map_list[self.idx_sort_astig_amp] = [self.idx_cter_astig_amp]
self.sort_map_list[self.idx_sort_astig_ang] = [self.idx_cter_astig_ang]
self.sort_map_list[self.idx_sort_sd_def] = [self.idx_cter_sd_def]
self.sort_map_list[self.idx_sort_sd_astig_amp] = [self.idx_cter_sd_astig_amp]
self.sort_map_list[self.idx_sort_sd_astig_ang] = [self.idx_cter_sd_astig_ang]
self.sort_map_list[self.idx_sort_error_def] = [self.idx_cter_error_def]
self.sort_map_list[self.idx_sort_error_astig] = [self.idx_cter_error_astig]
self.sort_map_list[self.idx_sort_error_ctf] = [self.idx_cter_error_ctf]
self.sort_map_list[self.idx_sort_max_power] = [self.idx_cter_max_power]
i_enum = -1
i_enum += 1; self.idx_hist_def = i_enum
i_enum += 1; self.idx_hist_astig_amp = i_enum
i_enum += 1; self.idx_hist_astig_ang = i_enum
i_enum += 1; self.idx_hist_sd_def = i_enum
i_enum += 1; self.idx_hist_sd_astig_amp = i_enum
i_enum += 1; self.idx_hist_sd_astig_ang = i_enum
i_enum += 1; self.idx_hist_error_def = i_enum
i_enum += 1; self.idx_hist_error_astig = i_enum
i_enum += 1; self.idx_hist_error_ctf = i_enum
i_enum += 1; self.idx_hist_max_power = i_enum
i_enum += 1; self.n_idx_hist = i_enum
i_enum = -1
i_enum += 1; self.idx_hist_item_idx_cter = i_enum
i_enum += 1; self.idx_hist_item_idx_sort = i_enum
i_enum += 1; self.idx_hist_item_val_min = i_enum
i_enum += 1; self.idx_hist_item_val_max = i_enum
i_enum += 1; self.idx_hist_item_unapply_threshold_lower = i_enum
i_enum += 1; self.idx_hist_item_unapply_threshold_upper = i_enum
i_enum += 1; self.idx_hist_item_unapply_widget_lower = i_enum
i_enum += 1; self.idx_hist_item_unapply_widget_upper = i_enum
i_enum += 1; self.idx_hist_item_apply_threshold_lower = i_enum
i_enum += 1; self.idx_hist_item_apply_threshold_upper = i_enum
i_enum += 1; self.idx_hist_item_apply_widget_lower = i_enum
i_enum += 1; self.idx_hist_item_apply_widget_upper = i_enum
i_enum += 1; self.n_idx_hist_item = i_enum
# Mapping for histogram items (combo box widget) and threshold setting (line edit widgets)
# Includes mapping from idx_hist to idx_cter and idx_sort
self.hist_map_list = [None] * self.n_idx_hist
self.hist_map_list[self.idx_hist_def] = [self.idx_cter_def, self.idx_sort_def, 0, 5, 0, 5, None, None, 0, 5, None, None]
self.hist_map_list[self.idx_hist_astig_amp] = [self.idx_cter_astig_amp, self.idx_sort_astig_amp, 0, 1, 0, 1, None, None, 0, 1, None, None]
self.hist_map_list[self.idx_hist_astig_ang] = [self.idx_cter_astig_ang, self.idx_sort_astig_ang, 0, 180, 0, 180, None, None, 0, 180, None, None]
self.hist_map_list[self.idx_hist_sd_def] = [self.idx_cter_sd_def, self.idx_sort_sd_def, 0, 5, 0, 5, None, None, 0, 5, None, None]
self.hist_map_list[self.idx_hist_sd_astig_amp] = [self.idx_cter_sd_astig_amp, self.idx_sort_sd_astig_amp, 0, 1, 0, 1, None, None, 0, 1, None, None]
self.hist_map_list[self.idx_hist_sd_astig_ang] = [self.idx_cter_sd_astig_ang, self.idx_sort_sd_astig_ang, 0, 180, 0, 180, None, None, 0, 180, None, None]
self.hist_map_list[self.idx_hist_error_def] = [self.idx_cter_error_def, self.idx_sort_error_def, 0, 10, 0, 10, None, None, 0, 10, None, None]
self.hist_map_list[self.idx_hist_error_astig] = [self.idx_cter_error_astig, self.idx_sort_error_astig, 0, 10, 0, 10, None, None, 0, 10, None, None]
self.hist_map_list[self.idx_hist_error_ctf] = [self.idx_cter_error_ctf, self.idx_sort_error_ctf, 0, 10, 0, 10, None, None, 0, 10, None, None]
self.hist_map_list[self.idx_hist_max_power] = [self.idx_cter_max_power, self.idx_sort_max_power, 0, 99999, 0, 99999, None, None, 0, 99999, None, None]
i_enum = -1
i_enum += 1; self.idx_threshold_control_lower = i_enum
i_enum += 1; self.idx_threshold_control_upper = i_enum
i_enum += 1; self.idx_threshold_control_edit_only = i_enum
i_enum += 1; self.n_idx_threshold_control = i_enum
i_enum = -1
i_enum += 1; self.idx_threshold_control_item_label = i_enum
i_enum += 1; self.idx_threshold_control_item_color = i_enum
i_enum += 1; self.n_idx_threshold_control_item = i_enum
# Mapping for threshold control (combo box widget)
self.threshold_control_map_list = [None] * self.n_idx_threshold_control
self.threshold_control_map_list[self.idx_threshold_control_lower] = ["Lower (blue)", "blue"]
self.threshold_control_map_list[self.idx_threshold_control_upper] = ["Upper (red)", "red"]
self.threshold_control_map_list[self.idx_threshold_control_edit_only] = ["Edit Only", "black"]
i_enum = -1
i_enum += 1; self.idx_graph_exp_no_astig = i_enum
i_enum += 1; self.idx_graph_fit_no_astig = i_enum
i_enum += 1; self.idx_graph_exp_with_astig = i_enum
i_enum += 1; self.idx_graph_fit_with_astig = i_enum
i_enum += 1; self.n_idx_graph = i_enum
i_enum = -1
i_enum += 1; self.idx_graph_item_name = i_enum
i_enum += 1; self.idx_graph_item_label = i_enum
i_enum += 1; self.idx_graph_idx_rotinf = i_enum
i_enum += 1; self.idx_graph_item_widget = i_enum
i_enum += 1; self.n_idx_graph_item = i_enum
# Mapping for graph display setting (check box widgets)
self.graph_map_list = [None] * self.n_idx_graph
self.graph_map_list[self.idx_graph_exp_no_astig] = ["exp_no_astig", "Exp. No Astig (Black)", self.idx_rotinf_exp_no_astig, None]
self.graph_map_list[self.idx_graph_fit_no_astig] = ["fit_no_astig", "Fit. No Astig (Blue)", self.idx_rotinf_fit_no_astig, None]
self.graph_map_list[self.idx_graph_exp_with_astig] = ["exp_with_astig", "Exp. with Astig (Red)", self.idx_rotinf_exp_with_astig, None]
self.graph_map_list[self.idx_graph_fit_with_astig] = ["fit_with_astig", "Fit. with Astig (Greed)", self.idx_rotinf_fit_with_astig, None]
i_enum = -1
i_enum += 1; self.idx_thresholdset_unapplied = i_enum
i_enum += 1; self.idx_thresholdset_applied = i_enum
i_enum += 1; self.n_idx_thresholdset = i_enum
i_enum = -1
i_enum += 1; self.idx_thresholdset_item_label = i_enum
i_enum += 1; self.n_idx_thresholdset_item = i_enum
# Mapping for threshold set (combo box widget)
self.thresholdset_map_list = [None] * self.n_idx_thresholdset
self.thresholdset_map_list[self.idx_thresholdset_unapplied] = ["Unapplied"]
self.thresholdset_map_list[self.idx_thresholdset_applied] = ["Applied"]
self.cter_box_size = 512 # Currently, this information was not available (2016/01/29 Toshio Moriya)
self.cter_partres_file_path = None
self.cter_entry_list = None
self.cter_mic_file_path = None
self.cter_pwrot_file_path = None
self.curmicdisplay=False
self.curplotfixscale=5
self.curentry=None
self.cursort=0
self.cursortoder=False
self.cursortselect = False
self.curhist=0
self.curhistdisable=False
self.curthresholdcontrol = 0
self.curentryperbin=10
self.cursyncsort = False
self.curthresholdset=0
self.wimage=EMImage2DWidget()
self.wimage.setWindowTitle("sxgui_cter - Micrograph")
# self.wfft=EMImage2DWidget()
# self.wfft.setWindowTitle("sxgui_cter - 2D FFT")
self.wplotparam=SXPlot2DWidget()
self.wplotparam.setWindowTitle("sxgui_cter - Sort Plot")
self.whistparam=SXPlot2DWidget()
self.whistparam.setWindowTitle("sxgui_cter - Histogram")
self.wplotrotavgcoarse=SXPlot2DWidget()
self.wplotrotavgcoarse.setWindowTitle("sxgui_cter - Plot")
self.wplotrotavgfine=SXPlot2DWidget()
self.wplotrotavgfine.setWindowTitle("sxgui_cter - Plot Zoom")
self.wplotparam.connect(self.wplotparam,QtCore.SIGNAL("mouseup"),self.plotparammouseup)
self.whistparam.connect(self.whistparam,QtCore.SIGNAL("mouseup"),self.histparammouseup)
# self.wimage.connect(self.wimage,QtCore.SIGNAL("mousedown"),self.imgmousedown)
# self.wimage.connect(self.wimage,QtCore.SIGNAL("mousedrag"),self.imgmousedrag)
# self.wimage.connect(self.wimage,QtCore.SIGNAL("mouseup") ,self.imgmouseup)
# self.wfft.connect(self.wfft,QtCore.SIGNAL("mousedown"),self.fftmousedown)
# self.wfft.connect(self.wfft,QtCore.SIGNAL("mousedrag"),self.fftmousedrag)
# self.wfft.connect(self.wfft,QtCore.SIGNAL("mouseup") ,self.fftmouseup)
# self.wplotrotavgcoarse.connect(self.wplotrotavgcoarse,QtCore.SIGNAL("mousedown"),self.plotmousedown)
self.wimage.mmode="app"
# self.wfft.mmode="app"
# This object is itself a widget we need to set up
self.gbl = QtGui.QGridLayout(self)
self.gbl.setMargin(8)
self.gbl.setSpacing(6)
# --------------------------------------------------------------------------------
# Columns 1-2
# --------------------------------------------------------------------------------
grid_col = 0; col_span = 2
grid_row = 0
checkbox_label_width=160
self.pbopencter=QtGui.QPushButton("Open CTER CTF file")
self.gbl.addWidget(self.pbopencter,grid_row,grid_col)
grid_row += 1
# temp_label=QtGui.QLabel("Selection Status:",self)
# self.gbl.addWidget(temp_label,grid_row,grid_col,1,2)
grid_row += 1
self.vbnentry=ValBox(self,(0,10000),"Num. of entries",0,90)
self.vbnentry.setEnabled(False)
self.vbnentry.intonly=True
self.gbl.addWidget(self.vbnentry,grid_row,grid_col)
grid_row+=1
self.vbuncheckcounts=ValBox(self,(0,1000000),"Unchecked",0,90)
self.vbuncheckcounts.setEnabled(False)
self.vbuncheckcounts.intonly=True
self.gbl.addWidget(self.vbuncheckcounts,grid_row,grid_col)
grid_row+=1
self.vbuncheckratio=ValBox(self,(0,1.0),"Ratio",0,90)
self.vbuncheckratio.setEnabled(False)
self.gbl.addWidget(self.vbuncheckratio,grid_row,grid_col)
grid_row+=1
# temp_label=QtGui.QLabel("Electron Microscopy:",self)
# self.gbl.addWidget(temp_label,grid_row,grid_col,1,2)
grid_row+=1
self.add_value_widget(self.idx_cter_vol, 0, 500, grid_row, grid_col)
grid_row+=1
self.add_value_widget(self.idx_cter_cs, 0, 5, grid_row, grid_col)
grid_row+=1
self.add_value_widget(self.idx_cter_ac, 0, 100, grid_row, grid_col)
grid_row+=1
self.add_value_widget(self.idx_cter_apix, 0, 500, grid_row, grid_col)
grid_row+=1
self.add_value_widget(self.idx_cter_box_size, 0, 4096, grid_row, grid_col, intonly=True)
grid_row+=1
# Make space
grid_row+=1
self.cbmicdisplay=CheckBox(None,"Display Micrograph",self.curmicdisplay)
self.cbmicdisplay.setEnabled(False)
self.gbl.addWidget(self.cbmicdisplay,grid_row,grid_col)
grid_row+=1
# Make space
grid_row+=1
temp_label=QtGui.QLabel("Display Curves:",self)
self.gbl.addWidget(temp_label,grid_row,grid_col)
grid_row += 1
for idx_graph in xrange(self.n_idx_graph):
self.graph_map_list[idx_graph][self.idx_graph_item_widget]=CheckBox(None,self.graph_map_list[idx_graph][self.idx_graph_item_label],True,checkbox_label_width)
self.gbl.addWidget(self.graph_map_list[idx_graph][self.idx_graph_item_widget],grid_row,grid_col)
grid_row += 1
self.vbplotfixscale=ValBox(self,(0,99999),"Plot Fix Scale",self.curplotfixscale,90)
self.gbl.addWidget(self.vbplotfixscale,grid_row,grid_col)
grid_row+=1
# # Make space
# grid_row+=1
self.pbrefreshgraphs=QtGui.QPushButton("Refresh Graphs")
self.pbrefreshgraphs.setEnabled(False)
self.gbl.addWidget(self.pbrefreshgraphs,grid_row,grid_col)
grid_row += 1
# --------------------------------------------------------------------------------
# Columns 3-4
# --------------------------------------------------------------------------------
grid_col += col_span; col_span = 2
grid_row = 0
# plot list and plot mode combobox
row_span_entry_list = 21
self.lbentry=SXListWidget(self) # self.lbentry=e2ctf.MyListWidget(self)
self.lbentry.setSizePolicy(QtGui.QSizePolicy.Preferred,QtGui.QSizePolicy.Expanding)
self.lbentry.setMinimumWidth(220)
self.gbl.addWidget(self.lbentry,grid_row,grid_col,row_span_entry_list,col_span)
grid_row += row_span_entry_list
# --------------------------------------------------------------------------------
# Columns 5-6 (for Micrograph/CTER Entry) and 7-8 (for Histogram)
# --------------------------------------------------------------------------------
grid_col += col_span; col_span = 2
grid_row = 0
grid_col_2nd = grid_col + col_span
grid_col_3rd = grid_col_2nd + col_span
temp_label=QtGui.QLabel("Current Entry Info:",self)
self.gbl.addWidget(temp_label,grid_row,grid_col,1,2)
temp_label=QtGui.QLabel("Unapplied Thresholds:",self)
self.gbl.addWidget(temp_label,grid_row,grid_col_2nd,1,2)
temp_label=QtGui.QLabel("Applied Thresholds:",self)
self.gbl.addWidget(temp_label,grid_row,grid_col_3rd,1,2)
grid_row += 1
self.ssortedid=ValBox(self,(0,10000),"Sorted ID",0,90)
self.ssortedid.setEnabled(False)
self.ssortedid.intonly=True
self.gbl.addWidget(self.ssortedid,grid_row,grid_col)
grid_row+=1
self.add_value_widget(self.idx_cter_id, 0, 10000, grid_row, grid_col, intonly=True)
grid_row+=1
self.add_value_widget(self.idx_cter_select, 0, 1, grid_row, grid_col, intonly=True)
grid_row+=1
for idx_hist in xrange(self.n_idx_hist):
self.add_value_widget_with_threshold(idx_hist, grid_row, grid_col, grid_col_2nd, grid_col_3rd)
grid_row+=1
self.hist_map_list[0][self.idx_hist_item_unapply_widget_lower].setEnabled(True)
# self.hist_map_list[0][self.idx_hist_item_unapply_widget_upper].setEnabled(True)
self.add_value_widget(self.idx_cter_bfactor, 0, 1600, grid_row, grid_col)
grid_row+=1
# make space
# grid_row += 1
temp_label=QtGui.QLabel("Sort CTER Entries:",self)
self.gbl.addWidget(temp_label,grid_row,grid_col,1,2)
temp_label=QtGui.QLabel("Parameter for Histogram & Plot:",self)
self.gbl.addWidget(temp_label,grid_row,grid_col_2nd,1,2)
temp_label=QtGui.QLabel("Save/Load Thresholds:",self)
self.gbl.addWidget(temp_label,grid_row,grid_col_3rd,1,2)
grid_row += 1
self.ssort=QtGui.QComboBox(self)
for map_entry in self.sort_map_list:
idx_cter = map_entry[self.idx_sort_item_idx_cter]
self.ssort.addItem(self.value_map_list[idx_cter][self.idx_cter_item_label])
self.ssort.setCurrentIndex(self.cursort)
self.gbl.addWidget(self.ssort,grid_row,grid_col)
self.shist=QtGui.QComboBox(self)
for map_entry in self.hist_map_list:
idx_cter = map_entry[self.idx_hist_item_idx_cter]
self.shist.addItem(self.value_map_list[idx_cter][self.idx_cter_item_label])
self.shist.setCurrentIndex(self.curhist)
self.gbl.addWidget(self.shist,grid_row,grid_col_2nd,1,2)
self.sthresholdset=QtGui.QComboBox(self)
for map_entry in self.thresholdset_map_list:
self.sthresholdset.addItem(map_entry[self.idx_thresholdset_item_label])
self.sthresholdset.setCurrentIndex(self.curthresholdset)
self.gbl.addWidget(self.sthresholdset,grid_row,grid_col_3rd,1,2)
grid_row += 1
self.cbsortoder=CheckBox(None,"Decending",self.cursortoder)
self.gbl.addWidget(self.cbsortoder,grid_row,grid_col)
temp_label=QtGui.QLabel("Move Threshold",self)
self.gbl.addWidget(temp_label,grid_row,grid_col_2nd)
self.sthresholdcontrol=QtGui.QComboBox(self)
# self.sthresholdcontrol.setStyleSheet("color: rgb(255,0,0);") # NOTE: Toshio Moriya 2016/01/22 Unfortunately, this will over write the individual item color...
for idx_threshold_control in xrange(self.n_idx_threshold_control):
map_entry = self.threshold_control_map_list[idx_threshold_control]
self.sthresholdcontrol.addItem(map_entry[self.idx_threshold_control_item_label])
self.sthresholdcontrol.setItemData(idx_threshold_control, QtGui.QColor(map_entry[self.idx_threshold_control_item_color]), Qt.TextColorRole);
self.sthresholdcontrol.setCurrentIndex(self.curthresholdcontrol)
self.gbl.addWidget(self.sthresholdcontrol,grid_row,grid_col_2nd+1)
self.pbsavethresholdset=QtGui.QPushButton("Save")
self.pbsavethresholdset.setEnabled(False)
self.gbl.addWidget(self.pbsavethresholdset,grid_row,grid_col_3rd)
self.pbloadthresholdset=QtGui.QPushButton("Load")
self.pbloadthresholdset.setEnabled(False)
self.gbl.addWidget(self.pbloadthresholdset,grid_row,grid_col_3rd+1)
grid_row += 1
self.cbsortselect=CheckBox(None,"Sort Select",self.cursortselect)
self.gbl.addWidget(self.cbsortselect,grid_row,grid_col)
self.cbsyncsort=CheckBox(None,"Sync Sort",self.cursyncsort)
self.gbl.addWidget(self.cbsyncsort,grid_row,grid_col_2nd)
temp_label=QtGui.QLabel("Save Selection:",self)
self.gbl.addWidget(temp_label,grid_row,grid_col_3rd,1,2)
grid_row += 1
temp_label=QtGui.QLabel("Avg. counts per bin",self)
self.gbl.addWidget(temp_label,grid_row,grid_col_2nd)
self.vsentryperbin=ValBox(self,(0,10000),None,self.curentryperbin,90)
self.vsentryperbin.setIntonly(True)
self.gbl.addWidget(self.vsentryperbin,grid_row,grid_col_2nd+1)
temp_label=QtGui.QLabel("File Suffix",self)
self.gbl.addWidget(temp_label,grid_row,grid_col_3rd)
self.vfilesuffix=StringBox(self,None,"Trial00",90)
self.gbl.addWidget(self.vfilesuffix,grid_row,grid_col_3rd+1)
grid_row += 1
self.pbreaplysort=QtGui.QPushButton("Reapply Sort")
self.pbreaplysort.setEnabled(False)
self.gbl.addWidget(self.pbreaplysort,grid_row,grid_col)
self.pbapplyallthreshold=QtGui.QPushButton("Apply All Thresholds")
self.pbapplyallthreshold.setEnabled(False)
self.gbl.addWidget(self.pbapplyallthreshold,grid_row,grid_col_2nd,1,2)
self.pbsaveselection=QtGui.QPushButton("Save Selection")
self.pbsaveselection.setEnabled(False)
self.gbl.addWidget(self.pbsaveselection,grid_row,grid_col_3rd,1,2)
grid_row += 1
# this is just a spacer
self.gbl.setRowStretch(grid_row,2)
self.gbl.setColumnStretch(3,2)
# --------------------------------------------------------------------------------
# Set signal handler
# --------------------------------------------------------------------------------
QtCore.QObject.connect(self.pbopencter, QtCore.SIGNAL("clicked(bool)"),self.openCter)
QtCore.QObject.connect(self.cbmicdisplay, QtCore.SIGNAL("valueChanged"),self.newMicDisplay)
for idx_graph in xrange(self.n_idx_graph):
QtCore.QObject.connect(self.graph_map_list[idx_graph][self.idx_graph_item_widget], QtCore.SIGNAL("valueChanged"),self.updatePlotVisibility)
QtCore.QObject.connect(self.vbplotfixscale, QtCore.SIGNAL("valueChanged"),self.newPlotFixScale)
QtCore.QObject.connect(self.pbrefreshgraphs, QtCore.SIGNAL("clicked(bool)"),self.refreshGraphs)
QtCore.QObject.connect(self.lbentry,QtCore.SIGNAL("currentRowChanged(int)"),self.newEntry)
# QtCore.QObject.connect(self.lbentry,QtCore.SIGNAL("keypress"),self.entryKey)
QtCore.QObject.connect(self.lbentry,QtCore.SIGNAL("itemChanged(QListWidgetItem*)"),self.updateEntrySelect)
QtCore.QObject.connect(self.ssort,QtCore.SIGNAL("currentIndexChanged(int)"),self.newSort)
QtCore.QObject.connect(self.cbsortoder, QtCore.SIGNAL("valueChanged"),self.newSortOrder)
QtCore.QObject.connect(self.cbsortselect, QtCore.SIGNAL("valueChanged"),self.newSortSelect)
QtCore.QObject.connect(self.pbreaplysort, QtCore.SIGNAL("clicked(bool)"),self.reapplySort)
for idx_hist in xrange(self.n_idx_hist):
QtCore.QObject.connect(self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower],QtCore.SIGNAL("valueChanged"),self.newThresholdLower)
QtCore.QObject.connect(self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper],QtCore.SIGNAL("valueChanged"),self.newThresholdUpper)
# QtCore.QObject.connect(self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower],QtCore.SIGNAL("valueChanged"),self.updateHist)
# QtCore.QObject.connect(self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower],QtCore.SIGNAL("valueChanged"),self.updatePlotParam)
# QtCore.QObject.connect(self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper],QtCore.SIGNAL("valueChanged"),self.updateHist)
# QtCore.QObject.connect(self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper],QtCore.SIGNAL("valueChanged"),self.updatePlotParam)
QtCore.QObject.connect(self.shist,QtCore.SIGNAL("currentIndexChanged(int)"),self.newHist)
QtCore.QObject.connect(self.sthresholdcontrol,QtCore.SIGNAL("currentIndexChanged(int)"),self.newThresholdControl)
QtCore.QObject.connect(self.cbsyncsort, QtCore.SIGNAL("valueChanged"),self.newSyncSort)
QtCore.QObject.connect(self.vsentryperbin, QtCore.SIGNAL("valueChanged"),self.newEntryPerBin)
QtCore.QObject.connect(self.pbapplyallthreshold, QtCore.SIGNAL("clicked(bool)"),self.applyAllThresholds)
QtCore.QObject.connect(self.sthresholdset,QtCore.SIGNAL("currentIndexChanged(int)"),self.newThresholdSet)
QtCore.QObject.connect(self.pbsavethresholdset, QtCore.SIGNAL("clicked(bool)"),self.saveThresholdSet)
QtCore.QObject.connect(self.pbloadthresholdset, QtCore.SIGNAL("clicked(bool)"),self.loadThresholdSet)
QtCore.QObject.connect(self.pbsaveselection, QtCore.SIGNAL("clicked(bool)"),self.saveSelection)
self.setWindowTitle("sxgui_cter - Control Panel")
# Set default sizes & positions of windows in case this is the first time to run in this project directory
# figured these values out by printing the width and height in resize event
win_height = 512 # Let use the same height for all windows
win_height_margin = 46
main_win_width = 1200
graph_win_width = 980
img_win_width = win_height
# Top Left
win_top = 0
win_left = 0
win_width = graph_win_width
self.whistparam.qt_parent.resize(win_width,win_height)
self.whistparam.qt_parent.move(win_left,win_top)
self.wplotparam.qt_parent.resize(win_width,win_height)
self.wplotparam.qt_parent.move(win_left,win_top)
# Top Right
win_left = graph_win_width
win_width = main_win_width;
self.resize(win_width,win_height)
self.move(win_left,win_top)
# Bottom Left
win_top = win_height + win_height_margin;
win_left = 0
win_width = graph_win_width
self.wplotrotavgcoarse.qt_parent.resize(win_width,win_height)
self.wplotrotavgcoarse.qt_parent.move(win_left,win_top)
self.wplotrotavgfine.qt_parent.resize(win_width,win_height)
self.wplotrotavgfine.qt_parent.move(win_left,win_top)
# Bottom Right
# Set the image window
win_left = graph_win_width
win_width = img_win_width
img_size = 4096
scale_factor = float(win_width)/img_size
self.wimage.set_data(model_blank(img_size,img_size, bckg=1.0)) # resize does not work if no image is set
self.wimage.qt_parent.resize(win_width,win_height)
self.wimage.qt_parent.move(win_left,win_top)
self.wimage.scroll_to(-1 * img_size,-1 * img_size)
self.wimage.set_scale(scale_factor)
# Try to recover sizes & positions of windows of the previous GUI session
E2loadappwin("sxgui_cter","main",self)
E2loadappwin("sxgui_cter","plotparam",self.wplotparam.qt_parent)
E2loadappwin("sxgui_cter","histparam",self.whistparam.qt_parent)
E2loadappwin("sxgui_cter","plotcoarse",self.wplotrotavgcoarse.qt_parent)
E2loadappwin("sxgui_cter","plotfine",self.wplotrotavgfine.qt_parent)
E2loadappwin("sxgui_cter","image",self.wimage.qt_parent)
# E2loadappwin("sxgui_cter","fft",self.wfft.qt_parent)
# if self.cter_entry_list:
# # self.wfft.show()
# self.whistparam.show()
# self.wplotrotavgcoarse.show()
### This section is responsible for background updates
self.busy=False
# self.needupdate=True
self.needredisp=False
# self.procthread=None
# self.errors=None # used to communicate errors back from the reprocessing thread
self.timer=QTimer()
QtCore.QObject.connect(self.timer, QtCore.SIGNAL("timeout()"), self.timeOut)
self.timer.start(100)
# Finally, read CTER CTF file if necessary
if cter_ctf_file != None:
self.readCterCtfFile(os.path.relpath(cter_ctf_file))
def add_value_widget(self, idx_cter, val_min, val_max, grid_row, grid_col, intonly = False, label_width = 90):
param_label = self.value_map_list[idx_cter][self.idx_cter_item_label]
val_default = val_min
val_widget = ValBox(self,(val_min,val_max),param_label,val_default,label_width)
val_widget.setEnabled(False)
val_widget.intonly=intonly
self.gbl.addWidget(val_widget,grid_row,grid_col)
self.value_map_list[idx_cter][self.idx_cter_item_widget] = val_widget
def add_value_widget_with_threshold(self, idx_hist, grid_row, grid_col, grid_col_2nd, grid_col_3rd, intonly = False, label_width = 90):
val_min = self.hist_map_list[idx_hist][self.idx_hist_item_val_min]
val_max = self.hist_map_list[idx_hist][self.idx_hist_item_val_max]
# Add widget for parameter value
self.add_value_widget(self.hist_map_list[idx_hist][self.idx_hist_item_idx_cter], val_min, val_max, grid_row, grid_col, intonly, label_width)
# Add widget for unapplied thresholds
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower]=ValBox(self,(val_min,val_max),None,val_min,label_width)
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower].setEnabled(False)
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower].text.setStyleSheet("color: rgb(0,0,255);")
self.gbl.addWidget(self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower],grid_row,grid_col_2nd)
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper]=ValBox(self,(val_min,val_max),None,val_max,label_width)
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper].setEnabled(False)
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper].text.setStyleSheet("color: rgb(255,0,0);")
self.gbl.addWidget(self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper],grid_row,grid_col_2nd+1)
# Add widget for applied thresholds
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_lower]=ValBox(self,(val_min,val_max),None,val_min,label_width)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_lower].setEnabled(False)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_lower].text.setStyleSheet("color: rgb(0,0,255);")
self.gbl.addWidget(self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_lower],grid_row,grid_col_3rd)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_upper]=ValBox(self,(val_min,val_max),None,val_max,label_width)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_upper].setEnabled(False)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_upper].text.setStyleSheet("color: rgb(255,0,0);")
self.gbl.addWidget(self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_upper],grid_row,grid_col_3rd+1)
def readCterCtfFile(self, file_path):
"""Read all entries from a CTER CTF file into the list box"""
if not os.path.exists(file_path):
QtGui.QMessageBox.warning(None,"Warning","Can not fild CTER CTF File (%s). Please check the file path." % (file_path))
return
if os.path.basename(file_path).find("partres") == -1:
QtGui.QMessageBox.warning(None,"Warning","Invalid file name for CTER CTF File (%s). The file name must contain \"partres\"." % (file_path))
return
if file_path[-1*len(".txt"):] != ".txt":
QtGui.QMessageBox.warning(None,"Warning","Invalid file extension for CTER CTF File (%s). The file extension must be \".txt\"." % (file_path))
return
if os.path.dirname(file_path)[-1*len("partres"):] != "partres":
QtGui.QMessageBox.warning(None,"Warning","Invalid file path for CTER CTF File (%s). The file must be in \"partres\" directory." % (file_path))
return
new_entry_list = read_text_row(file_path)
if len(new_entry_list) == 0:
QtGui.QMessageBox.warning(self, "Warning", "The specified CTER CTF file (%S) does not contain any entry. Please try it again." % new_cter_entry_list)
return
# print "MRK_DEBUG: Detected %s entries in %s" % (len(new_entry_list), file_path)
# print "MRK_DEBUG: Num. of Columns is %d in %s" % (len(new_entry_list[0]), file_path)
if len(new_entry_list[0]) == self.n_idx_cter - self.n_idx_cter_extra:
# This CTEF file format is original one (before around 2016/01/29)
for cter_id in xrange(len(new_entry_list)):
# Add extra items first to make sure indices match
new_entry_list[cter_id] = [cter_id] + new_entry_list[cter_id] + ["", self.cter_box_size, 0.5, 0.0, 1]
# Cut off frequency components higher than CTF limit
cter_box_size = new_entry_list[cter_id][self.idx_cter_box_size]
cter_def = new_entry_list[cter_id][self.idx_cter_def]
cter_cs = new_entry_list[cter_id][self.idx_cter_cs]
cter_vol = new_entry_list[cter_id][self.idx_cter_vol]
cter_apix = new_entry_list[cter_id][self.idx_cter_apix]
cter_limit_ab_freq, cter_limit_freq = ctflimit(cter_box_size, cter_def, cter_cs, cter_vol, cter_apix)
# NOTE: 2015/12/16 Toshio Moriya
# Limiting_frequency[cycle/A]: xr[cycle/A]. Max is Nyquist frequency = 1.0[cycle]/(2*apix[A/pixel]). <UNIT: [cycle/(A/pixel)/[pixel])] => [(cycle*pixel)/(A*pixel] => [cycle/A]>
# limiting_period(Angstrom resolution)[A/cycle]: 1.0/xr[cycle/A]. Min is Nyquist period = (2*apix[A/pixel]). <UNIT: [1/(cycle/A)] = [A/cycle]>
# Width of Fourier pixel [pixel/A]: fwpix = 1.0[pixel]/(2*apix[A/pixel])/box_half[pixel] = 1[pixel]/fullsize[A]) <UNIT: [pixel/(A/pixel)/(pixel)] = [pixel*(pixel/A)*(1/pixel) = [pixel/A]>
# Limiting_absolute_frequency [cycle/pixel] int(xr/fwpix+0.5) = <Unit:[(cycle/A)/(pixel/A)] = [(cycle/A)*(A/pixel)] = [cycle/pixel]>
# return Limiting_abs_frequency [cycle/pixel]Limiting_frequency[cycle/A] <= int(xr/fwpix+0.5),xr
new_entry_list[cter_id][self.idx_cter_error_ctf] = cter_limit_freq
# Set associated pwrot file path
assert os.path.dirname(file_path).find("partres") != -1
cter_pwrot_dir = os.path.dirname(file_path).replace("partres", "pwrot")
new_cter_pwrot_file_path = os.path.join(cter_pwrot_dir, "rotinf%04d.txt" % new_entry_list[cter_id][self.idx_cter_id])
new_entry_list[cter_id][self.idx_cter_pwrot_name] = new_cter_pwrot_file_path
# Set max value of pwrot related to this micrograph
new_rotinf_table = read_text_file(new_cter_pwrot_file_path, ncol=-1)
new_entry_list[cter_id][self.idx_cter_max_power] = max(new_rotinf_table[self.idx_rotinf_exp_with_astig])
# Always set selection state to 1 (selected)
new_entry_list[cter_id][self.idx_cter_select] = 1
else:
# This CTEF file format must be current (after around 2016/01/29) or output of this script
assert len(new_entry_list[0]) == self.n_idx_cter, "MRK_DEBUG: The number of columns (%d) have to be %d or %d in %s" % (len(new_entry_list[0]), self.n_idx_cter - self.n_idx_cter_extra, self.n_idx_cter, file_path)
# now set the new status
self.cter_partres_file_path = file_path
self.cter_entry_list = new_entry_list
# Set the values and ranges of thresholds
for idx_hist in xrange(self.n_idx_hist):
idx_cter = self.hist_map_list[idx_hist][self.idx_hist_item_idx_cter]
val_min = min(self.cter_entry_list, key=lambda x:x[idx_cter])[idx_cter]
val_max = max(self.cter_entry_list, key=lambda x:x[idx_cter])[idx_cter]
self.hist_map_list[idx_hist][self.idx_hist_item_val_min] = val_min
self.hist_map_list[idx_hist][self.idx_hist_item_val_max] = val_max
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_threshold_lower] = val_min
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_threshold_upper] = val_max
# self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower].setRange(val_min, val_max)
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_lower].setValue(val_min)
# self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper].setRange(val_min, val_max)
self.hist_map_list[idx_hist][self.idx_hist_item_unapply_widget_upper].setValue(val_max)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_threshold_lower] = val_min
self.hist_map_list[idx_hist][self.idx_hist_item_apply_threshold_upper] = val_max
# self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_lower].setRange(val_min, val_max)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_lower].setValue(val_min)
# self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_upper].setRange(val_min, val_max)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_upper].setValue(val_max)
# Set disable status of histogram
if self.hist_map_list[self.curhist][self.idx_hist_item_val_min] == self.hist_map_list[self.curhist][self.idx_hist_item_val_max]:
idx_cter = self.hist_map_list[self.curhist][self.idx_hist_item_idx_cter]
param_label = self.value_map_list[idx_cter][self.idx_cter_item_label]
self.curhistdisable=True
if self.whistparam.isVisible():
self.whistparam.hide()
if self.wplotparam.isVisible():
self.wplotparam.hide()
QtGui.QMessageBox.information(self, "Information","All entries have the same selected paramter values (%s). Parameter Histogram & Plot will not be shown" % (param_label))
# Always disable micrograph display upon loading new dataset
if self.wimage.isVisible():
self.wimage.hide()
self.cbmicdisplay.setValue(False)
self.curmicdisplay = False
self.updateEntryList()
# Set the number of entries
self.vbnentry.setValue(len(self.cter_entry_list))
# Set the range of histogram bin
# self.vsentryperbin.setRange(1,len(self.cter_entry_list))
self.vsentryperbin.setValue(self.curentryperbin)
self.updateUncheckCounts()
# Enable buttons
self.pbrefreshgraphs.setEnabled(True)
self.pbreaplysort.setEnabled(True)
self.pbapplyallthreshold.setEnabled(True)
self.pbsavethresholdset.setEnabled(True)
self.pbloadthresholdset.setEnabled(True)
self.pbsaveselection.setEnabled(True)
# NOTE: 2016/01/03 Toshio Moriya
# Force update related plots for scaling delay...
self.updateHist()
self.updatePlotParam()
self.needredisp = True
mic_dir = os.path.dirname(self.cter_entry_list[0][self.idx_cter_mic_name])
if os.path.exists(mic_dir):
self.cbmicdisplay.setEnabled(True)
else:
QtGui.QMessageBox.warning(None,"Warning","Can not find the micrograph directory (%s). Please check your project directory. \n\nMicrograph display option is disabled for this session." % (mic_dir))
def openCter(self,val=None):
"""Open CTER CTF file"""
file_path = str(QtGui.QFileDialog.getOpenFileName(self, "Open CTER CTF File", options = QtGui.QFileDialog.DontUseNativeDialog))
if file_path == "": return
self.readCterCtfFile(os.path.relpath(file_path))
def updateHist(self):
if self.whistparam == None: return # it's closed/not visible
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
if self.curhistdisable == True: return # do nothing while it is hidden
val_list = []
# Create Histogram for selected paramter
idx_cter = self.hist_map_list[self.curhist][self.idx_hist_item_idx_cter]
for cter_entry in self.cter_entry_list:
val_list.append(cter_entry[idx_cter])
n_bin = 1
if len(self.cter_entry_list) < self.curentryperbin:
self.curentryperbin = len(self.cter_entry_list)
self.vsentryperbin.setValue(self.curentryperbin)
# self.vsentryperbin.setRange(1,len(self.cter_entry_list))
elif self.curentryperbin < 1:
self.curentryperbin = 1
self.vsentryperbin.setValue(self.curentryperbin)
# self.vsentryperbin.setRange(1,len(self.cter_entry_list))
n_bin = len(self.cter_entry_list)/ self.curentryperbin
assert len(val_list) >= n_bin
assert n_bin > 0
from statistics import hist_list
hist_x_list, hist_y_list = hist_list(val_list, n_bin)
# Pad with zero for better visual impression...
hist_x_list += [max(val_list)]
hist_y_list += [0]
self.whistparam.set_data((hist_x_list,hist_y_list),"hist_param",quiet=False,color=0,linetype=0,symtype=0)
# MRK_NOTE: 2015/12/17 Toshio Moriya
# This may NOT be good place to update the following information...
idx_cter = self.hist_map_list[self.curhist][self.idx_hist_item_idx_cter]
param_label = self.value_map_list[idx_cter][self.idx_cter_item_label]
param_val = self.cter_entry_list[self.curentry][idx_cter]
# shape_name = "hist_param_shape_value"
# scr_x, scr_y = self.whistparam.plot2scr(param_val, 0.0)
# self.whistparam.add_shape(shape_name,EMShape(("scrline",0,1,0,scr_x,self.whistparam.scrlim[1],scr_x,self.whistparam.scrlim[1]+self.whistparam.scrlim[3],3)))
val_min = min(hist_y_list)
val_max = max(hist_y_list)
# threshold_lower_label = "Lower(Blue)"
unapply_threshold_lower_val = self.hist_map_list[self.curhist][self.idx_hist_item_unapply_threshold_lower]
apply_threshold_lower_val = self.hist_map_list[self.curhist][self.idx_hist_item_apply_threshold_lower]
# threshold_upper_label = "Upper(Red)"
unapply_threshold_upper_val = self.hist_map_list[self.curhist][self.idx_hist_item_unapply_threshold_upper]
apply_threshold_upper_val = self.hist_map_list[self.curhist][self.idx_hist_item_apply_threshold_upper]
self.whistparam.set_data(([param_val, param_val], [val_min, val_max]),"selected_val",quiet=False,color=3)
self.whistparam.set_data(([unapply_threshold_lower_val, unapply_threshold_lower_val], [val_min, val_max]),"unapply_threshold_lower_val",quiet=False,color=1,linetype=1)
self.whistparam.set_data(([apply_threshold_lower_val, apply_threshold_lower_val], [val_min, val_max]),"apply_threshold_lower_val",quiet=False,color=1)
self.whistparam.set_data(([unapply_threshold_upper_val, unapply_threshold_upper_val], [val_min, val_max]),"unapply_threshold_upper_val",quiet=False,color=2,linetype=1)
self.whistparam.set_data(([apply_threshold_upper_val, apply_threshold_upper_val], [val_min, val_max]),"apply_threshold_upper_val",quiet=False,color=2)
# shape_name = "hist_param_shape_unapply_threshold_lower"
# scr_x, scr_y = self.whistparam.plot2scr(unapply_threshold_lower_val, 0.0)
#self.whistparam.add_shape(shape_name,EMShape(("scrline",0,0,0.5,scr_x,self.whistparam.scrlim[1],scr_x,self.whistparam.scrlim[1]+self.whistparam.scrlim[3],1)))
# shape_name = "hist_param_shape_apply_threshold_lower"
# scr_x, scr_y = self.whistparam.plot2scr(apply_threshold_lower_val, 0.0)
# self.whistparam.add_shape(shape_name,EMShape(("scrline",0,0,1,scr_x,self.whistparam.scrlim[1],scr_x,self.whistparam.scrlim[1]+self.whistparam.scrlim[3],1)))
# shape_name = "hist_param_shape_unapply_threshold_upper"
# scr_x, scr_y = self.whistparam.plot2scr(unapply_threshold_upper_val, 0.0)
# self.whistparam.add_shape(shape_name,EMShape(("scrline",0.5,0,0,scr_x,self.whistparam.scrlim[1],scr_x,self.whistparam.scrlim[1]+self.whistparam.scrlim[3],1)))
# shape_name = "hist_param_shape_apply_threshold_upper"
# scr_x, scr_y = self.whistparam.plot2scr(apply_threshold_upper_val, 0.0)
# self.whistparam.add_shape(shape_name,EMShape(("scrline",1,0,0,scr_x,self.whistparam.scrlim[1],scr_x,self.whistparam.scrlim[1]+self.whistparam.scrlim[3],1)))
# shape_name = "hist_param_shape_label"
# if self.curhist in [self.idx_hist_error_def, self.idx_hist_error_astig, self.idx_hist_error_ctf] and param_val > 0.0:
# self.whistparam.add_shape(shape_name,EMShape(("scrlabel",0,0,0,self.whistparam.scrlim[0]+30,self.whistparam.scrlim[1]+self.whistparam.scrlim[3]-18,"%s(Green) %1.3g (%1.3g), %s %1.3g, %s %1.3g"%(param_label,param_val,1/param_val,threshold_lower_label,apply_threshold_lower_val,threshold_upper_label,apply_threshold_upper_val),120.0,-1)))
# # self.whistparam.add_shape(shape_name,EMShape(("scrlabel",0,0,0,self.whistparam.scrlim[0]+30,self.whistparam.scrlim[1]+self.whistparam.scrlim[3]-18,"%s %1.5g (%1.5g)"%(param_label,param_val,1/param_val),120.0,-1)))
# else:
# self.whistparam.add_shape(shape_name,EMShape(("scrlabel",0,0,0,self.whistparam.scrlim[0]+30,self.whistparam.scrlim[1]+self.whistparam.scrlim[3]-18,"%s(Green) %1.3g, %s %1.3g, %s %1.3g"%(param_label,param_val,threshold_lower_label,apply_threshold_lower_val,threshold_upper_label,apply_threshold_upper_val),120.0,-1)))
# # self.whistparam.add_shape(shape_name,EMShape(("scrlabel",0,0,0,self.whistparam.scrlim[0]+30,self.whistparam.scrlim[1]+self.whistparam.scrlim[3]-18,"%s %1.5g"%(param_label,param_val),120.0,-1)))
self.whistparam.setAxisParms(param_label,"Image Counts")
# x_margin = (hist_x_list[-1] - hist_x_list[0]) * 0.05
# NOTE: 2016/01/02 Toshio Moriya
# Disable manual rescale for now and use autoscale
# self.whistparam.rescale(min(val_list),max(val_list),0,max(hist_y_list) * 1.05)
self.whistparam.autoscale(True)
def updatePlotParam(self):
if self.wplotparam == None: return # it's closed/not visible
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
if self.curhistdisable == True: return # do nothing while it is hidden
x_list = []
y_list = []
# Create graph for selected paramter
idx_cter = self.hist_map_list[self.curhist][self.idx_hist_item_idx_cter]
for cter_id in xrange(len(self.cter_entry_list)):
x_list.append(cter_id)
y_list.append(self.cter_entry_list[cter_id][idx_cter])
# self.wplotparam.set_data((x_list,y_list),"plot_param",quiet=False,color=0)
self.wplotparam.set_data((x_list,y_list),"plot_param",quiet=False,color=0,linetype=0,symtype=0)
# Create graph for single paramter value of selected entry
# MRK_NOTE: 2015/12/17 Toshio Moriya
# This may NOT be good place to update the following information...
idx_cter = self.hist_map_list[self.curhist][self.idx_hist_item_idx_cter]
param_label = self.value_map_list[idx_cter][self.idx_cter_item_label]
param_val = self.cter_entry_list[self.curentry][idx_cter]
# threshold_lower_label = "Lower(Blue)"
unapply_threshold_lower_val = self.hist_map_list[self.curhist][self.idx_hist_item_unapply_threshold_lower]
apply_threshold_lower_val = self.hist_map_list[self.curhist][self.idx_hist_item_apply_threshold_lower]
# threshold_upper_label = "Upper(Red)"
unapply_threshold_upper_val = self.hist_map_list[self.curhist][self.idx_hist_item_unapply_threshold_upper]
apply_threshold_upper_val = self.hist_map_list[self.curhist][self.idx_hist_item_apply_threshold_upper]
y_list = [param_val]*len(x_list)
self.wplotparam.set_data((x_list,y_list),"selected_val",quiet=False,color=3)
y_list = [unapply_threshold_lower_val]*len(x_list)
self.wplotparam.set_data((x_list,y_list),"unapply_threshold_lower_val",quiet=False,color=1,linetype=1)
y_list = [apply_threshold_lower_val]*len(x_list)
self.wplotparam.set_data((x_list,y_list),"apply_threshold_lower_val",quiet=False,color=1)
y_list = [unapply_threshold_upper_val]*len(x_list)
self.wplotparam.set_data((x_list,y_list),"unapply_threshold_upper_val",quiet=False,color=2,linetype=1)
y_list = [apply_threshold_upper_val]*len(x_list)
self.wplotparam.set_data((x_list,y_list),"apply_threshold_upper_val",quiet=False,color=2)
# shape_name = "plot_param_shape_label"
# if self.curhist in [self.idx_hist_error_def, self.idx_hist_error_astig, self.idx_hist_error_ctf] and param_val > 0.0:
# self.wplotparam.add_shape(shape_name,EMShape(("scrlabel",0,0,0,self.wplotparam.scrlim[0]+30,self.wplotparam.scrlim[1]+self.wplotparam.scrlim[3]-18,"%s(Green) %1.3g (%1.3g), %s %1.3g, %s %1.3g"%(param_label,param_val,1/param_val,threshold_lower_label,apply_threshold_lower_val,threshold_upper_label,apply_threshold_upper_val),120.0,-1)))
# # self.wplotparam.add_shape(shape_name,EMShape(("scrlabel",0,0,0,self.wplotparam.scrlim[0]+30,self.wplotparam.scrlim[1]+self.wplotparam.scrlim[3]-18,"%s(Green) %1.5g (%1.5g)"%(param_label,param_val,1/param_val),120.0,-1)))
# else:
# self.wplotparam.add_shape(shape_name,EMShape(("scrlabel",0,0,0,self.wplotparam.scrlim[0]+30,self.wplotparam.scrlim[1]+self.wplotparam.scrlim[3]-18,"%s(Green) %1.3g, %s %1.3g, %s %1.3g"%(param_label,param_val,threshold_lower_label,apply_threshold_lower_val,threshold_upper_label,apply_threshold_upper_val),120.0,-1)))
# # self.wplotparam.add_shape(shape_name,EMShape(("scrlabel",0,0,0,self.wplotparam.scrlim[0]+30,self.wplotparam.scrlim[1]+self.wplotparam.scrlim[3]-18,"%s(Green) %1.5g"%(param_label,param_val),120.0,-1)))
self.wplotparam.setAxisParms("Sorted Image ID", param_label)
# NOTE: 2016/01/02 Toshio Moriya
# Use autoscale for now
self.wplotparam.autoscale(True)
def updatePlot(self):
if self.wplotrotavgcoarse == None: return # it's closed/not visible
if self.wplotrotavgfine == None: return # it's closed/not visible
if self.cter_pwrot_file_path == None: return # no cter entry is selected
# Now update the plots
if not os.path.exists(self.cter_pwrot_file_path):
QtGui.QMessageBox.warning(None,"Warning","Can not find file cter_pwrot_file_path (%s). Please check the contents of pwrot directory" % (self.cter_pwrot_file_path))
return
self.rotinf_table = read_text_file(self.cter_pwrot_file_path, ncol=-1)
# print "MRK_DEBUG: Last entry of the 1st colum should be a micrograph name %s which is same as " % os.path.basename(self.rotinf_table[0][-1])
mic_basename_rotinf = os.path.basename(self.rotinf_table[0][-1]) # last entry of 1st colum should be associated micrograph
mic_basename_partres = os.path.basename(self.cter_entry_list[self.curentry][self.idx_cter_mic_name])
if mic_basename_rotinf != mic_basename_partres:
QtGui.QMessageBox.warning(None,"Warning","Micrograph name (%s) in %s is not same as name (%s) in %s " % (mic_basename_rotinf, os.path.basename(self.cter_pwrot_file_path), mic_basename_partres, os.path.basename(self.cter_partres_file_path)))
return
# global_min = float("inf")
# global_max = float("-inf")
for idx_graph in xrange(self.n_idx_graph):
self.wplotrotavgcoarse.set_data((self.rotinf_table[self.idx_rotinf_freq],self.rotinf_table[self.graph_map_list[idx_graph][self.idx_graph_idx_rotinf]]),self.graph_map_list[idx_graph][self.idx_graph_item_name],quiet=False,color=idx_graph)
self.wplotrotavgfine.set_data((self.rotinf_table[self.idx_rotinf_freq],self.rotinf_table[self.graph_map_list[idx_graph][self.idx_graph_idx_rotinf]]),self.graph_map_list[idx_graph][self.idx_graph_item_name],quiet=False,color=idx_graph)
# val_min = min(self.rotinf_table[self.graph_map_list[idx_graph][self.idx_graph_idx_rotinf]])
# val_max = max(self.rotinf_table[self.graph_map_list[idx_graph][self.idx_graph_idx_rotinf]])
# if global_min > val_min:
# global_min = val_min
# if global_max < val_max:
# global_max = val_max
# NOTE: 2016/01/02 Toshio Moriya
# Disable manual rescale for now and use autoscale
# self.wplotrotavgcoarse.rescale(self.rotinf_table[self.idx_rotinf_freq][0],self.rotinf_table[self.idx_rotinf_freq][-1],0.0,1.0)
self.wplotrotavgcoarse.autoscale(True)
self.wplotrotavgfine.rescale(self.rotinf_table[self.idx_rotinf_freq][0],self.rotinf_table[self.idx_rotinf_freq][-1],0.0,self.curplotfixscale)
nyquist_freq = self.rotinf_table[self.idx_rotinf_freq][-1]
# print "MRK_DEBUG: nyquist_freq = %1.5g" % nyquist_freq
error_name = "error_astig"
error_label = "Astig. Limit"
error_freq = self.cter_entry_list[self.curentry][self.idx_cter_error_astig]
# print "MRK_DEBUG: %s= %1.5g" % (error_name, error_freq)
if error_freq > 0.0 and error_freq <= nyquist_freq:
error_scr_x, error_scr_y = self.wplotrotavgcoarse.plot2scr(error_freq, 0.0)
self.wplotrotavgcoarse.add_shape(error_name,EMShape(("scrline",0,0,0.5,error_scr_x,self.wplotrotavgcoarse.scrlim[1],error_scr_x,self.wplotrotavgcoarse.scrlim[1]+self.wplotrotavgcoarse.scrlim[3],1)))
# self.wplotrotavgcoarse.set_data(([error_freq, error_freq], [global_min, global_max]),"astig_error_freq_limit",quiet=False,color=0,linetype=0)
self.wplotrotavgcoarse.add_shape("%s_freq"%(error_name),EMShape(("scrlabel",0,0,0,error_scr_x-260,self.wplotrotavgcoarse.scrlim[1]+self.wplotrotavgcoarse.scrlim[3]-18,"%s %1.5g (%1.5g)"%(error_label,error_freq,1.0/error_freq),120.0,-1)))
error_scr_x, error_scr_y = self.wplotrotavgfine.plot2scr(error_freq, 0.0)
self.wplotrotavgfine.add_shape(error_name,EMShape(("scrline",0,0,0.5,error_scr_x,self.wplotrotavgfine.scrlim[1],error_scr_x,self.wplotrotavgfine.scrlim[1]+self.wplotrotavgfine.scrlim[3],1)))
# self.wplotrotavgfine.set_data(([error_freq, error_freq], [global_min, global_max]),"astig_error_freq_limit",quiet=False,color=0,linetype=0)
self.wplotrotavgfine.add_shape("%s_freq"%(error_name),EMShape(("scrlabel",0,0,0,error_scr_x-260,self.wplotrotavgfine.scrlim[1]+self.wplotrotavgfine.scrlim[3]-18,"%s %1.5g (%1.5g)"%(error_label,error_freq,1.0/error_freq),120.0,-1)))
error_name = "error_def"
error_label = "Defocus Limit"
error_freq = self.cter_entry_list[self.curentry][self.idx_cter_error_def]
# print "MRK_DEBUG: %s= %1.5g" % (error_name, error_freq)
if error_freq > 0.0 and error_freq <= nyquist_freq:
error_scr_x, error_scr_y = self.wplotrotavgcoarse.plot2scr(error_freq, 0.0)
self.wplotrotavgcoarse.add_shape(error_name,EMShape(("scrline",0.5,0,0,error_scr_x,self.wplotrotavgcoarse.scrlim[1],error_scr_x,self.wplotrotavgcoarse.scrlim[1]+self.wplotrotavgcoarse.scrlim[3],1)))
# self.wplotrotavgcoarse.set_data(([error_freq, error_freq], [global_min, global_max]),"defocus_error_freq_limit",quiet=False,color=0,linetype=0)
self.wplotrotavgcoarse.add_shape("%s_freq"%(error_name),EMShape(("scrlabel",0,0,0,error_scr_x-260,self.wplotrotavgcoarse.scrlim[1]+self.wplotrotavgcoarse.scrlim[3]-36,"%s %1.5g (%1.5g)"%(error_label,error_freq,1.0/error_freq),120.0,-1)))
error_scr_x, error_scr_y = self.wplotrotavgfine.plot2scr(error_freq, 0.0)
self.wplotrotavgfine.add_shape(error_name,EMShape(("scrline",0.5,0,0,error_scr_x,self.wplotrotavgfine.scrlim[1],error_scr_x,self.wplotrotavgfine.scrlim[1]+self.wplotrotavgfine.scrlim[3],1)))
# self.wplotrotavgfine.set_data(([error_freq, error_freq], [global_min, global_max]),"defocus_error_freq_limit",quiet=False,color=0,linetype=0)
self.wplotrotavgfine.add_shape("%s_freq"%(error_name),EMShape(("scrlabel",0,0,0,error_scr_x-260,self.wplotrotavgfine.scrlim[1]+self.wplotrotavgfine.scrlim[3]-36,"%s %1.5g (%1.5g)"%(error_label,error_freq,1.0/error_freq),120.0,-1)))
error_name = "error_ctf"
error_label = "CTF Limit"
error_freq = self.cter_entry_list[self.curentry][self.idx_cter_error_ctf]
# print "MRK_DEBUG: %s= %1.5g" % (error_name, error_freq)
if error_freq > 0.0 and error_freq <= nyquist_freq:
error_scr_x, error_scr_y = self.wplotrotavgcoarse.plot2scr(error_freq, 0.0)
self.wplotrotavgcoarse.add_shape(error_name,EMShape(("scrline",0,0.5,0,error_scr_x,self.wplotrotavgcoarse.scrlim[1],error_scr_x,self.wplotrotavgcoarse.scrlim[1]+self.wplotrotavgcoarse.scrlim[3],1)))
# self.wplotrotavgcoarse.set_data(([error_freq, error_freq], [global_min, global_max]),"ctf_freq_limit")
self.wplotrotavgcoarse.add_shape("%s_freq"%(error_name),EMShape(("scrlabel",0,0,0,error_scr_x-260,self.wplotrotavgcoarse.scrlim[1]+self.wplotrotavgcoarse.scrlim[3]-54,"%s %1.5g (%1.5g)"%(error_label,error_freq,1.0/error_freq),120.0,-1)))
error_scr_x, error_scr_y = self.wplotrotavgfine.plot2scr(error_freq, 0.0)
self.wplotrotavgfine.add_shape(error_name,EMShape(("scrline",0,0.5,0,error_scr_x,self.wplotrotavgfine.scrlim[1],error_scr_x,self.wplotrotavgfine.scrlim[1]+self.wplotrotavgfine.scrlim[3],1)))
# self.wplotrotavgfine.set_data(([error_freq, error_freq], [global_min, global_max]),"ctf_freq_limit")
self.wplotrotavgfine.add_shape("%s_freq"%(error_name),EMShape(("scrlabel",0,0,0,error_scr_x-260,self.wplotrotavgfine.scrlim[1]+self.wplotrotavgfine.scrlim[3]-54,"%s %1.5g (%1.5g)"%(error_label,error_freq,1.0/error_freq),120.0,-1)))
self.wplotrotavgcoarse.setAxisParms("frequency (1/"+ "$\AA$" +")","power spectrum")
self.wplotrotavgfine.setAxisParms("frequency (1/"+ "$\AA$" +")","power spectrum")
self.updatePlotVisibility()
def updateEntryList(self):
"""Updated entry list box after sorting of CTER entries based on current setting."""
# sort CTER entry list
assert (self.cter_entry_list != None)
self.cter_entry_list = sorted(self.cter_entry_list, key=lambda x: x[self.sort_map_list[self.cursort][self.idx_sort_item_idx_cter]], reverse=self.cursortoder)
if self.cursortselect:
# Additionaly, sort cter entry list by select state
self.cter_entry_list = sorted(self.cter_entry_list, key=lambda x: x[self.idx_cter_select])
# else: # Do nothing
# Refresh entry list box
self.lbentry.clear()
newItemflags = Qt.ItemFlags(Qt.ItemIsSelectable)|Qt.ItemFlags(Qt.ItemIsEnabled)|Qt.ItemFlags(Qt.ItemIsUserCheckable)
for cter_entry in self.cter_entry_list:
newItem = QtGui.QListWidgetItem(os.path.basename(cter_entry[self.idx_cter_mic_name]))
newItem.setFlags(newItemflags)
if cter_entry[self.idx_cter_select] == 1:
newItem.setCheckState(Qt.Checked)
else:
assert(cter_entry[self.idx_cter_select] == 0)
newItem.setCheckState(Qt.Unchecked)
self.lbentry.addItem(newItem)
# self.lbentry.addItem(os.path.basename(cter_entry[self.idx_cter_mic_name]))
self.newEntry(0)
self.lbentry.setCurrentRow(0)
def updateMicImg(self):
if not self.curmicdisplay: return # Micrograph display is disabled
if not os.path.exists(self.cter_mic_file_path):
QtGui.QMessageBox.warning(None,"Warning","Can not find micrograph (%s). Please check your micrograph directory. \n\nA blank image is shown." % (self.cter_mic_file_path))
mic_img = EMData() # Set empty image...
img_size = 4096 # Use the most typical image size?!!!
mic_img = model_blank(img_size,img_size, bckg=1.0)
else:
mic_img = EMData(self.cter_mic_file_path) # read the image from disk
self.wimage.set_data(mic_img)
self.wimage.setWindowTitle("sxgui_cter - Micrograph - %s, %s" % (os.path.basename(self.cter_entry_list[self.curentry][self.idx_cter_mic_name]), os.path.basename(self.cter_pwrot_file_path)))
def newMicDisplay(self,val=None):
"""Change micrograph display status."""
assert(self.cbmicdisplay.getEnabled() == True)
if self.curmicdisplay == val: return
# now set the new status
self.curmicdisplay = val
if self.curmicdisplay and not self.wimage.isVisible():
self.wimage.show()
self.updateMicImg()
elif not self.curmicdisplay and self.wimage.isVisible():
self.wimage.hide()
def newEntry(self,currow):
"""called when a new data set is selected from the CTER Entry list box."""
assert(self.cter_partres_file_path != None)
assert(self.cter_entry_list != None)
# always update the current row of cter entry list
# to get associated micrograph path and pwrot file path
self.curentry = currow # row can be the same even after resorting of the cter entry list
# Get associated pwrot file path of current entry
new_cter_pwrot_file_path = self.cter_entry_list[self.curentry][self.idx_cter_pwrot_name]
# Get associated micrograph path of current entry
new_cter_mic_file_path = self.cter_entry_list[self.curentry][self.idx_cter_mic_name]
# Changing row does not always change the pwrot file path after resorting of the cter entry list
# If same, skip the following processes
if self.cter_pwrot_file_path == new_cter_pwrot_file_path:
assert(self.cter_mic_file_path == new_cter_mic_file_path)
return
# now set the new item
assert(self.cter_pwrot_file_path != new_cter_pwrot_file_path)
self.cter_pwrot_file_path = new_cter_pwrot_file_path
assert(self.cter_mic_file_path != new_cter_mic_file_path)
self.cter_mic_file_path = new_cter_mic_file_path
# print "MRK_DEBUG: Row No. %d (CTER Entry No. %d) is selected from cter entry list box" % (self.curentry, self.cter_entry_list[self.curentry][self.idx_cter_id])
self.ssortedid.setValue(self.curentry,True)
for idx_cter in xrange(self.n_idx_cter):
if idx_cter not in [self.idx_cter_mic_name, self.idx_cter_pwrot_name]:
self.value_map_list[idx_cter][self.idx_cter_item_widget].setValue(self.cter_entry_list[self.curentry][idx_cter],True)
# Use red font to indicate the value is not between applied threshold ranage
for idx_hist in xrange(self.n_idx_hist):
lower_threshold = self.hist_map_list[idx_hist][self.idx_hist_item_apply_threshold_lower]
upper_threshold = self.hist_map_list[idx_hist][self.idx_hist_item_apply_threshold_upper]
idx_cter = self.hist_map_list[idx_hist][self.idx_hist_item_idx_cter]
param_val = self.cter_entry_list[self.curentry][idx_cter]
if lower_threshold <= param_val and param_val <= upper_threshold:
self.value_map_list[idx_cter][self.idx_cter_item_widget].text.setStyleSheet("color: rgb(0,0,0);")
elif param_val < lower_threshold:
self.value_map_list[idx_cter][self.idx_cter_item_widget].text.setStyleSheet("color: rgb(0,0,255);")
else:
assert(upper_threshold < param_val)
self.value_map_list[idx_cter][self.idx_cter_item_widget].text.setStyleSheet("color: rgb(255,0,0);")
# self.wfft.setWindowTitle("sxgui_cter - 2D FFT - "+fsp.split("/")[-1])
self.wplotrotavgcoarse.setWindowTitle("sxgui_cter - Plot - %s, %s" % (os.path.basename(self.cter_entry_list[self.curentry][self.idx_cter_mic_name]), os.path.basename(self.cter_pwrot_file_path)))
self.wplotrotavgfine.setWindowTitle("sxgui_cter - Plot Zoom- %s, %s" % (os.path.basename(self.cter_entry_list[self.curentry][self.idx_cter_mic_name]), os.path.basename(self.cter_pwrot_file_path)))
self.cter_mic_file_path = new_cter_mic_file_path
# Now update the image
self.updateMicImg()
self.needredisp = True
def updateEntrySelect(self, entry):
"""called when check status of an cter entry in list box is changed."""
assert(self.cter_partres_file_path != None)
assert(self.cter_entry_list != None)
newSelect = 1
if entry.checkState() == Qt.Unchecked:
newSelect = 0
entry_row = self.lbentry.row(entry)
self.cter_entry_list[entry_row][self.idx_cter_select] = newSelect
if self.curentry == entry_row:
self.value_map_list[self.idx_cter_select][self.idx_cter_item_widget].setValue(self.cter_entry_list[self.curentry][self.idx_cter_select],True)
self.updateUncheckCounts()
def updateUncheckCounts(self):
"""called whenever checked status of cter entries change."""
assert(self.cter_partres_file_path != None)
assert(self.cter_entry_list != None)
assert(len(self.cter_entry_list) > 0)
n_entry = len(self.cter_entry_list)
uncheck_counts = n_entry
for cter_entry in self.cter_entry_list:
uncheck_counts -= cter_entry[self.idx_cter_select]
assert(uncheck_counts >= 0 and uncheck_counts <= n_entry)
self.vbuncheckcounts.setValue(uncheck_counts,True)
self.vbuncheckratio.setValue(float(uncheck_counts)/n_entry,True)
def reapplySort(self,item = None):
"""Called when reapply button is clicked."""
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
self.updateEntryList()
def newSort(self,cursort):
"""Sort CTER entries by selected parameter values."""
if self.cursort == cursort: return
# now set the new item
self.cursort = cursort
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
self.updateEntryList()
def newSortOrder(self, sortoder):
"""Change sorting order of CTER entries."""
if self.cursortoder == sortoder: return
# now set the new status
self.cursortoder = sortoder
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
self.updateEntryList()
def newSortSelect(self, sortselect):
"""Change sort select status of CTER entries."""
if self.cursortselect == sortselect: return
# now set the new status
self.cursortselect = sortselect
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
self.updateEntryList()
def newThresholdLower(self):
threshold_lower = self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].getValue()
if threshold_lower < self.hist_map_list[self.curhist][self.idx_hist_item_val_min]:
threshold_lower = self.hist_map_list[self.curhist][self.idx_hist_item_val_min]
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setValue(threshold_lower)
elif threshold_lower > self.hist_map_list[self.curhist][self.idx_hist_item_val_max]:
threshold_lower = self.hist_map_list[self.curhist][self.idx_hist_item_val_max]
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setValue(threshold_lower)
# else: # Do nothing
# now set the new threshold
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_threshold_lower] = threshold_lower
self.needredisp=True
def newThresholdUpper(self):
threshold_upper = self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].getValue()
if threshold_upper < self.hist_map_list[self.curhist][self.idx_hist_item_val_min]:
threshold_upper = self.hist_map_list[self.curhist][self.idx_hist_item_val_min]
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setValue(threshold_upper)
elif threshold_upper > self.hist_map_list[self.curhist][self.idx_hist_item_val_max]:
threshold_upper = self.hist_map_list[self.curhist][self.idx_hist_item_val_max]
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setValue(threshold_upper)
# else: # Do nothing
# now set the new threshold
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_threshold_upper] = threshold_upper
self.needredisp=True
def newHist(self,currow):
"called when a new row is selected from the Histogram list box"
if self.curhist == currow: return
# Disable old item
if self.curthresholdcontrol == self.idx_threshold_control_lower:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setEnabled(False)
elif self.curthresholdcontrol == self.idx_threshold_control_upper:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setEnabled(False)
else:
assert(self.curthresholdcontrol == self.idx_threshold_control_edit_only)
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setEnabled(False)
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setEnabled(False)
# now set the new item and enalble it
self.curhist=currow
# print "MRK_DEBUG: Row No. %d is selected from histogram list box" % (self.curhist)
# Check if the all selected parameter values are same
if self.hist_map_list[self.curhist][self.idx_hist_item_val_min] == self.hist_map_list[self.curhist][self.idx_hist_item_val_max]:
idx_cter = self.hist_map_list[self.curhist][self.idx_hist_item_idx_cter]
param_label = self.value_map_list[idx_cter][self.idx_cter_item_label]
self.curhistdisable=True
if self.whistparam.isVisible():
self.whistparam.hide()
if self.wplotparam.isVisible():
self.wplotparam.hide()
QtGui.QMessageBox.information(self, "Information","All entries have the same selected paramter values (%s). Parameter Histogram & Plot will not be shown" % (param_label))
else:
if self.curthresholdcontrol == self.idx_threshold_control_lower:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setEnabled(True)
elif self.curthresholdcontrol == self.idx_threshold_control_upper:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setEnabled(True)
else:
assert(self.curthresholdcontrol == self.idx_threshold_control_edit_only)
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setEnabled(True)
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setEnabled(True)
idx_cter = self.hist_map_list[self.curhist][self.idx_hist_item_idx_cter]
param_label = self.value_map_list[idx_cter][self.idx_cter_item_label]
self.whistparam.setWindowTitle("sxgui_cter - %s Histogram" % (param_label))
self.wplotparam.setWindowTitle("sxgui_cter - %s Sort Plot" % (param_label))
if self.cursyncsort == True:
idx_sort = self.hist_map_list[self.curhist][self.idx_hist_item_idx_sort]
if (idx_sort != self.cursort):
self.newSort(idx_sort)
self.ssort.setCurrentIndex(idx_sort)
# else: assert(idx_sort == self.cursort) # Do nothing
# else: assert(self.cursyncsort == False) # Do nothing
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
self.curhistdisable=False
if not self.whistparam.isVisible():
self.whistparam.show()
if not self.wplotparam.isVisible():
self.wplotparam.show()
# NOTE: 2016/01/03 Toshio Moriya
# Force update related plots for scaling delay...
self.updateHist()
self.updatePlotParam()
if self.cursyncsort == True:
self.updateEntryList()
self.needredisp = True
def newThresholdControl(self, currow):
"called when a new row is selected from the Threshold Control list box"
if self.curthresholdcontrol == currow: return
# Disable old item
if self.curthresholdcontrol == self.idx_threshold_control_lower:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setEnabled(False)
elif self.curthresholdcontrol == self.idx_threshold_control_upper:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setEnabled(False)
else:
assert(self.curthresholdcontrol == self.idx_threshold_control_edit_only)
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setEnabled(False)
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setEnabled(False)
# now set the new item and enalble it
self.curthresholdcontrol=currow
# print "MRK_DEBUG: Row No. %d is selected from histogram list box" % (self.curhist)
if self.curthresholdcontrol == self.idx_threshold_control_lower:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setEnabled(True)
elif self.curthresholdcontrol == self.idx_threshold_control_upper:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setEnabled(True)
else:
assert(self.curthresholdcontrol == self.idx_threshold_control_edit_only)
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setEnabled(True)
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setEnabled(True)
def newSyncSort(self, syncsort):
"""Change scyn sort enable state."""
if self.cursyncsort == syncsort: return
# now set the new status
self.cursyncsort = syncsort
self.ssort.setEnabled(not self.cursyncsort)
if self.cursyncsort == True:
idx_sort = self.hist_map_list[self.curhist][self.idx_hist_item_idx_sort]
if (idx_sort != self.cursort):
self.newSort(idx_sort)
self.ssort.setCurrentIndex(idx_sort)
# else: assert(idx_sort == self.cursort) # Do nothing
# else: assert(self.cursyncsort == False) # Do nothing
def newEntryPerBin(self,curentryperbin):
if self.curentryperbin == curentryperbin: return
# now set the new entry per bin
self.curentryperbin = curentryperbin
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
# NOTE: 2016/01/03 Toshio Moriya
# Force update related plots for scaling delay...
self.updateHist()
self.updatePlotParam()
self.needredisp = True
def applyAllThresholds(self,item = None):
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
reply = QtGui.QMessageBox.question(self, "Warning", "Applying all threshold setting will wipe the previous selection states including manual setting. Do you really want to continue?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.No:
return
# Set set the select status of all cter entries based on the threshold values
for cter_entry in self.cter_entry_list:
new_select_state = 1
for idx_hist in xrange(self.n_idx_hist):
threshold_lower = self.hist_map_list[idx_hist][self.idx_hist_item_unapply_threshold_lower]
threshold_upper = self.hist_map_list[idx_hist][self.idx_hist_item_unapply_threshold_upper]
self.hist_map_list[idx_hist][self.idx_hist_item_apply_threshold_lower] = threshold_lower
self.hist_map_list[idx_hist][self.idx_hist_item_apply_threshold_upper] = threshold_upper
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_lower].setValue(threshold_lower)
self.hist_map_list[idx_hist][self.idx_hist_item_apply_widget_upper].setValue(threshold_upper)
idx_cter = self.hist_map_list[idx_hist][self.idx_hist_item_idx_cter]
param_val = cter_entry[idx_cter]
if param_val < threshold_lower or threshold_upper < param_val:
new_select_state = 0
# else: # Do nothing
cter_entry[self.idx_cter_select] = new_select_state
self.updateEntryList()
self.updateUncheckCounts()
def newThresholdSet(self, currow):
"called when a new row is selected from the Threshold Set list box"
if self.curthresholdset == currow: return
# now set the new item and enalble it
self.curthresholdset=currow
def writeThresholdSet(self, file_path_out, idx_thresholdset):
assert(self.cter_partres_file_path != None)
assert(self.cter_entry_list != None)
file_out = open(file_path_out,"w")
# Write lines to check consistency upon loading
file_out.write("# @@@@@ gui_cter thresholds - ")
# file_out.write(EMANVERSION + " (CVS" + CVSDATESTAMP[6:-2] +")")
file_out.write(EMANVERSION + " (GITHUB: " + DATESTAMP +")" )
file_out.write(" @@@@@ \n")
file_out.write("# Associated CTER CTF File == %s\n" % (self.cter_partres_file_path))
file_out.write("# Saved Threshold Set == %s\n" % (self.thresholdset_map_list[idx_thresholdset][self.idx_thresholdset_item_label]))
file_out.write("# [Paramter Id] [Paramter Name] [Lower Threshold] [Upper Threshold]\n")
# Assigne the index of target threshold values
idx_threshold_lower = self.idx_hist_item_unapply_threshold_lower
idx_threshold_upper = self.idx_hist_item_unapply_threshold_upper
if idx_thresholdset == self.idx_thresholdset_applied:
idx_threshold_lower = self.idx_hist_item_apply_threshold_lower
idx_threshold_upper = self.idx_hist_item_apply_threshold_upper
for idx_hist in xrange(self.n_idx_hist):
map_entry = self.hist_map_list[idx_hist]
idx_cter = map_entry[self.idx_hist_item_idx_cter]
param_label = self.value_map_list[idx_cter][self.idx_cter_item_label]
# NOTE: 2016/01/26 Toshio Moriya
# Use the precision for double to minimise precision loss by save & load operations
file_out.write("%2d %s == %1.15g %1.15g \n" % (idx_hist, param_label, map_entry[idx_threshold_lower], map_entry[idx_threshold_upper]))
file_out.close()
def readThresholdSet(self, file_path_in, idx_thresholdset):
assert(self.cter_partres_file_path != None)
assert(self.cter_entry_list != None)
file_in = open(file_path_in,"r")
# Check if this parameter file is threshold
line_in = file_in.readline()
if line_in.find("@@@@@ gui_cter thresholds") != -1:
# loop through the rest of lines
for line_in in file_in:
if line_in[0] == "#":
continue
tokens_in = line_in.split("==")
assert(len(tokens_in) == 2)
tokens_label = tokens_in[0].split()
assert(len(tokens_label) >= 2)
idx_hist = int(tokens_label[0]) # index of hist_map_list
map_entry = self.hist_map_list[idx_hist]
tokens_val = tokens_in[1].split()
assert(len(tokens_val) == 2)
threshold_lower = float(tokens_val[0])
threshold_upper = float(tokens_val[1])
map_entry[self.idx_hist_item_unapply_threshold_lower] = threshold_lower
map_entry[self.idx_hist_item_unapply_threshold_upper] = threshold_upper
map_entry[self.idx_hist_item_unapply_widget_lower].setValue(threshold_lower)
map_entry[self.idx_hist_item_unapply_widget_upper].setValue(threshold_upper)
self.newThresholdLower()
self.newThresholdUpper()
if idx_thresholdset == self.idx_thresholdset_applied:
self.applyAllThresholds()
else:
QtGui.QMessageBox.warning(self, "Warning", "The specified file is not threshold file.")
file_in.close()
def saveThresholdSet(self,item = None):
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
title_string = "Save %s Thresholds" % self.thresholdset_map_list[self.curthresholdset][self.idx_thresholdset_item_label]
file_path_out = str(QtGui.QFileDialog.getSaveFileName(self, title_string, options = QtGui.QFileDialog.DontUseNativeDialog))
if file_path_out == "": return
self.writeThresholdSet(os.path.relpath(file_path_out), self.curthresholdset)
def loadThresholdSet(self,item = None):
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
reply = QtGui.QMessageBox.question(self, "Warning", "Loading thresholds will wipe the previous threshold setting. Do you really want to continue?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.No:
return
title_string = "Load %s Thresholds" % self.thresholdset_map_list[self.curthresholdset][self.idx_thresholdset_item_label]
file_path_in = str(QtGui.QFileDialog.getOpenFileName(self, title_string, options = QtGui.QFileDialog.DontUseNativeDialog))
if file_path_in == "": return
self.readThresholdSet(os.path.relpath(file_path_in), self.curthresholdset)
def saveSelection(self,item = None):
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
assert(os.path.basename(self.cter_partres_file_path).find("partres") != -1)
assert(self.cter_partres_file_path[-1*len(".txt"):] == ".txt")
assert(os.path.dirname(self.cter_partres_file_path)[-1*len("partres"):] == "partres")
file_suffix = self.vfilesuffix.getValue()
file_path_out_select = os.path.join(os.path.dirname(self.cter_partres_file_path), "%s_partres_select.txt" % (file_suffix))
file_path_out_discard = os.path.join(os.path.dirname(self.cter_partres_file_path), "%s_partres_discard.txt" % (file_suffix))
file_path_out_mic_select = os.path.join(os.path.dirname(self.cter_partres_file_path), "%s_micrographs_select.txt" % (file_suffix))
file_path_out_mic_discard = os.path.join(os.path.dirname(self.cter_partres_file_path), "%s_micrographs_discard.txt" % (file_suffix))
file_path_out_thresholds = os.path.join(os.path.dirname(self.cter_partres_file_path), "%s_thresholds.txt" % (file_suffix))
existing_file_path = None
if os.path.exists(file_path_out_select):
existing_file_path = file_path_out_select
elif os.path.exists(file_path_out_discard):
existing_file_path = file_path_out_discard
elif os.path.exists(file_path_out_mic_select):
existing_file_path = file_path_out_mic_select
elif os.path.exists(file_path_out_mic_discard):
existing_file_path = file_path_out_mic_discard
elif os.path.exists(file_path_out_thresholds):
existing_file_path = file_path_out_thresholds
# else: # Do nothing
if existing_file_path != None:
reply = QtGui.QMessageBox.question(self, "Warning", "The file (%s) already exists. Do you want to overwrite the file?" % (existing_file_path), QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.No:
return
# Save selection in CTER Format
file_out_select = open(file_path_out_select,"w")
file_out_discard = open(file_path_out_discard,"w")
save_cter_entry_list = sorted(self.cter_entry_list, key=lambda x: x[self.idx_cter_id])
for cter_entry in save_cter_entry_list:
file_out = file_out_select
if cter_entry[self.idx_cter_select] == 0:
file_out = file_out_discard
# else: assert(cter_entry[self.idx_cter_select] == 1) # do nothing
# # Save with the original format (before around 2016/01/29)
# for idx_cter in xrange(self.n_idx_cter):
# if idx_cter in [self.idx_cter_id, self.idx_cter_pwrot_name, self.idx_cter_box_size, self.idx_cter_error_ctf, self.idx_cter_max_power, self.idx_cter_select]:
# # Do nothing
# continue
# elif idx_cter in [self.idx_cter_mic_name]:
# file_out.write(" %s" % cter_entry[idx_cter])
# else:
# file_out.write(" %12.5g" % cter_entry[idx_cter])
# file_out.write("\n")
# Save with the original format (before around 2016/01/29)
for idx_cter in xrange(self.n_idx_cter):
if idx_cter in [self.idx_cter_mic_name, self.idx_cter_pwrot_name]:
file_out.write(" %s" % cter_entry[idx_cter])
else:
file_out.write(" %12.5g" % cter_entry[idx_cter])
file_out.write("\n")
file_out_select.close()
file_out_discard.close()
# Save selection in Micrograph List Format
file_out_mic_select = open(file_path_out_mic_select,"w")
file_out_mic_discard = open(file_path_out_mic_discard,"w")
for cter_entry in save_cter_entry_list:
file_out = file_out_mic_select
if cter_entry[self.idx_cter_select] == 0:
file_out = file_out_mic_discard
# else: assert(cter_entry[self.idx_cter_select] == 1) # do nothing
file_out.write(" %s\n" % cter_entry[self.idx_cter_mic_name])
file_out_mic_select.close()
file_out_mic_discard.close()
# Save the associated applied threshold
self.writeThresholdSet(file_path_out_thresholds, self.idx_thresholdset_applied)
QtGui.QMessageBox.information(self, "Information","The following files are saved in %s:\n\nCTER CTF List - Selected: %s\n\nCTER CTF List - Discarded: %s\n\nMicrograph - Selected: %s\n\nMicrograph - Discarded: %s\n\nApplied Threshold Set: %s" % (os.path.dirname(self.cter_partres_file_path), file_path_out_select, file_path_out_discard, file_path_out_mic_select, file_path_out_mic_discard, file_path_out_thresholds))
def timeOut(self):
if self.busy: return
# Redisplay before spawning thread for more interactive display
if self.needredisp:
try:
self.redisplay()
except:
print "Recieved unexpected exception from redisplay() in timeOut(): "
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback)
# MRK_NOTE: 2015/12/17 Toshio Moriya
# Another way to print out exception info...
# lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
# print ''.join('!! ' + line for line in lines)
pass
def redisplay(self):
self.needredisp=False
self.busy=True
if self.cter_entry_list != None:
# if not self.wfft.isVisible():
# self.wfft.show()
if not self.whistparam.isVisible() and not self.curhistdisable:
self.whistparam.show()
if not self.wplotparam.isVisible() and not self.curhistdisable:
self.wplotparam.show()
if not self.wplotrotavgcoarse.isVisible():
self.wplotrotavgcoarse.show()
if not self.wplotrotavgfine.isVisible():
self.wplotrotavgfine.show()
if not self.wimage.isVisible() and self.curmicdisplay:
self.wimage.show()
# if self.cter_entry_list != None:
# self.wimage.raise_()
# self.wfft.raise_()
# self.wplotrotavgcoarse.raise_()
# self.whistparam.raise_()
self.updateHist()
self.updatePlotParam()
self.updatePlot()
self.busy=False
# def entryKey(self,event):
# if event.key()>=Qt.Key_0 and event.key()<=Qt.Key_9 :
# q=int(event.key())-Qt.Key_0
# self.squality.setValue(q)
# elif event.key() == Qt.Key_Left:
# self.sdef.setValue(self.sdef.getValue()-0.03)
# elif event.key() == Qt.Key_Right:
# self.sdef.setValue(self.sdef.getValue()+0.03)
# elif event.key()==Qt.Key_I :
# print "MRK_DEBUG: Not used now"
# self.doImport()
# elif event.key()==Qt.Key_U :
# print "MRK_DEBUG: Not used now"
# self.unImport()
# elif event.key()==Qt.Key_F :
# print "MRK_DEBUG: Not used now"
# self.doRefit()
def closeEvent(self,event):
E2saveappwin("sxgui_cter","main",self)
if self.cter_entry_list != None:
E2saveappwin("sxgui_cter","plotparam",self.wplotparam.qt_parent)
E2saveappwin("sxgui_cter","histparam",self.whistparam.qt_parent)
E2saveappwin("sxgui_cter","plotcoarse",self.wplotrotavgcoarse.qt_parent)
E2saveappwin("sxgui_cter","plotfine",self.wplotrotavgfine.qt_parent)
E2saveappwin("sxgui_cter","image",self.wimage.qt_parent)
# E2saveappwin("sxgui_cter","fft",self.wfft.qt_parent)
event.accept()
QtGui.qApp.exit(0)
def updatePlotVisibility(self,val=None):
if self.wplotrotavgcoarse == None: return # it's closed/not visible
if self.wplotrotavgfine == None: return # it's closed/not visible
if self.cter_pwrot_file_path == None: return # no cter entry is selected
for idx_graph in xrange(self.n_idx_graph):
item_widget = self.graph_map_list[idx_graph][self.idx_graph_item_widget]
name = self.graph_map_list[idx_graph][self.idx_graph_item_name]
if self.wplotrotavgcoarse.visibility[name] != item_widget.getValue():
self.wplotrotavgcoarse.visibility[name] = item_widget.getValue()
self.wplotrotavgcoarse.full_refresh()
self.wplotrotavgcoarse.updateGL()
for idx_graph in xrange(self.n_idx_graph):
item_widget = self.graph_map_list[idx_graph][self.idx_graph_item_widget]
name = self.graph_map_list[idx_graph][self.idx_graph_item_name]
if self.wplotrotavgfine.visibility[name] != item_widget.getValue():
self.wplotrotavgfine.visibility[name] = item_widget.getValue()
self.wplotrotavgfine.full_refresh()
self.wplotrotavgfine.updateGL()
def newPlotFixScale(self,curplotfixscale):
if self.curplotfixscale == curplotfixscale: return
# now set the new entry per bin
self.curplotfixscale = curplotfixscale
if self.cter_partres_file_path == None: return # no cter ctf file is selected
if self.cter_entry_list == None: return # no cter ctf file is selected
# NOTE: 2016/01/03 Toshio Moriya
# Force update related plots for scaling delay...
# self.updatePlotParam()
self.needredisp = True
def refreshGraphs(self,item):
self.needredisp = True
def plotparammouseup(self,event):
if self.curthresholdcontrol == self.idx_threshold_control_edit_only:
return
# Swap control if shift button is pressed
is_not_reverse_control = True
modifiers = event.modifiers()
if modifiers & QtCore.Qt.ShiftModifier:
is_not_reverse_control = False
plot_x,plot_y=self.wplotparam.scr2plot(event.x(),event.y())
if self.curthresholdcontrol == self.idx_threshold_control_lower:
if is_not_reverse_control:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setValue(plot_y)
self.newThresholdLower()
else:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setValue(plot_y)
self.newThresholdUpper()
else:
assert(self.curthresholdcontrol == self.idx_threshold_control_upper)
if is_not_reverse_control:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setValue(plot_y)
self.newThresholdUpper()
else:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setValue(plot_y)
self.newThresholdLower()
def histparammouseup(self,event):
if self.curthresholdcontrol == self.idx_threshold_control_edit_only:
return
# Swap control if shift button is pressed
is_not_reverse_control = True
modifiers = event.modifiers()
if modifiers & QtCore.Qt.ShiftModifier:
is_not_reverse_control = False
hist_x,hist_y=self.whistparam.scr2plot(event.x(),event.y())
if self.curthresholdcontrol == self.idx_threshold_control_lower:
if is_not_reverse_control:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setValue(hist_x)
self.newThresholdLower()
else:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setValue(hist_x)
self.newThresholdUpper()
else:
assert(self.curthresholdcontrol == self.idx_threshold_control_upper)
if is_not_reverse_control:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_upper].setValue(hist_x)
self.newThresholdUpper()
else:
self.hist_map_list[self.curhist][self.idx_hist_item_unapply_widget_lower].setValue(hist_x)
self.newThresholdLower()
if __name__ == "__main__":
main()
|
[
"moriya@m14081a.client.mpi-dortmund.mpg.de"
] |
moriya@m14081a.client.mpi-dortmund.mpg.de
|
469d32d1dde946bd9338717e45a700cd2e19a8da
|
f6033ffeeef233e719f0dfdf246ab351c1f2e0be
|
/final_webspider.py
|
342ad4c59097cd7f43df90a7cebabc5ee7f53a76
|
[] |
no_license
|
aoiheaven/pixiv_webspider
|
d8161216d1308bd3b0cceb07c92fc213cfc3c659
|
56da366f2a72c1ed918ab75b5306d45e06978830
|
refs/heads/master
| 2022-04-01T15:56:44.221228
| 2020-02-05T09:25:58
| 2020-02-05T09:25:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,167
|
py
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#----------------------------------------------------------------------------------------------
# Title: WebSpiderForPixivTargetOnly
# Version: 0.98
# Author: AoiHeaven(MINGJIAN WANG)
# Date: 12/16/17
# Platform & Language: Mac OS & Python 2.7
# Input: None
# Function: Save the batch of pictures from PixivIllustrator automatically
# Reference: https://www.cnblogs.com/fightfordream/p/6421498.html
#----------------------------------------------------------------------------------------------
import requests
from bs4 import BeautifulSoup
import os
import time
import re
import random
import sys
reload(sys)
sys.setdefaultencoding('utf8')
#avoid the Error of decoding
se = requests.session()
class Pixiv():
def __init__(self):
self.base_url = 'https://accounts.pixiv.net/login?lang=zh&source=pc&view_type=page&ref=wwwtop_accounts_index'
self.login_url = 'https://accounts.pixiv.net/api/login?lang=zh'
self.target_url = 'https://www.pixiv.net/member_illust.php?id=2000392&tag=%E6%9D%B1%E6%96%B9&type=all&p='
#the target website, the end of the url, "&type=all&p=" means the rank of the pages! Attention Please!
self.main_url = 'https://www.pixiv.net'
self.headers = {
'Referer': 'https://accounts.pixiv.net/login?lang=zh&source=pc&view_type=page&ref=wwwtop_accounts_index',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) '
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'
}
self.pixiv_id = ''
self.password = ''
self.post_key = []
self.return_to = 'http://www.pixiv.net/'
self.load_path = '/Users/wangmingjian/Pictures/python_pixiv_webspider/carbon_image'
self.ip_list = []
def login(self):
post_key_html = se.get(self.base_url, headers=self.headers).text
post_key_soup = BeautifulSoup(post_key_html, 'lxml')
self.post_key = post_key_soup.find('input')['value']
# acquire the postkey from the web feedback
data = {
'pixiv_id': self.pixiv_id,
'password': self.password,
'return_to': self.return_to,
'post_key': self.post_key
}
se.post(self.login_url, data=data, headers=self.headers)
def get_proxy(self):
html = requests.get('http://haoip.cc/tiqu.htm')
ip_list_temp = re.findall(r'r/>(.*?)<b', html.text, re.S)
for ip in ip_list_temp:
i = re.sub('\n', '', ip)
self.ip_list.append(i.strip())
print(i.strip())
''' 会被反爬,改成使用代理
def get_tml(self, url):
response = se.get(url, headers=self.headers)
return response
'''
def get_html(self, url, timeout, proxy=None, num_entries=5):
if proxy is None:
try:
return se.get(url, headers=self.headers, timeout=timeout)
except:
if num_entries > 0:
print('获取网页出错,5秒后将会重新获取倒数第', num_entries, '次')
time.sleep(5)
return self.get_html(url, timeout, num_entries = num_entries - 1)
else:
print('开始使用代理')
time.sleep(5)
ip = ''.join(str(random.choice(self.ip_list))).strip()
now_proxy = {'http': ip}
return self.get_html(url, timeout, proxy = now_proxy)
else:
try:
return se.get(url, headers=self.headers, proxies=proxy, timeout=timeout)
except:
if num_entries > 0:
print('正在更换代理,5秒后将会重新获取第', num_entries, '次')
time.sleep(5)
ip = ''.join(str(random.choice(self.ip_list))).strip()
now_proxy = {'http': ip}
return self.get_html(url, timeout, proxy = now_proxy, num_entries = num_entries - 1)
else:
print('使用代理失败,取消使用代理')
return self.get_html(url, timeout)
def get_img(self, html, page_num):
li_soup = BeautifulSoup(html, 'lxml') # 传入第page_num页的html
li_list = li_soup.find_all('li', attrs={'class', 'image-item'}) # 找到li所在位置
#li_list = li_soup.find_all('li class=\"image-item\"') #demo
print('get_list succeed')
#print(li_soup)
#print(li_list)
for li in li_list:
href = li.find('a')['href'] # 直接提取第一个href
#print('get_href succeed')
print(href)
jump_to_url = self.main_url + href # 跳转到目标的url
#print('get_jump_to_url succeed')
jump_to_html = self.get_html(jump_to_url, 3).text # 获取图片的html
#print('get_jump_to_html succeed')
img_soup = BeautifulSoup(jump_to_html, 'lxml')
img_info = img_soup.find('div', attrs={'class', 'works_display'})\
.find('div', attrs={'class', '_layout-thumbnail ui-modal-trigger'})
# 找到目标位置的信息
if img_info is None: # 有些找不到url,如果不continue会报错
continue
self.download_img(img_info, jump_to_url, page_num) # 去下载这个图片
def download_img(self, img_info, href, page_num):
title = img_info.find('img')['alt'] # 提取标题
src = img_info.find('img')['src'] # 提取图片位置
src_headers = self.headers
src_headers['Referer'] = href # 增加一个referer,否则会403,referer就像上面登陆一样找
try:
html = requests.get(src, headers=src_headers)
img = html.content
except: # 有时候会发生错误导致不能获取图片.直接跳过这张图吧
print('获取该图片失败')
return False
title = title.replace('?', '_').replace('/', '_').replace('\\', '_').replace('*', '_').replace('|', '_')\
.replace('>', '_').replace('<', '_').replace(':', '_').replace('"', '_').strip()
# 去掉那些不能在文件名里面的.记得加上strip()去掉换行
if os.path.exists(os.path.join(self.load_path, str(page_num), title + '.jpg')):
for i in range(1, 100):
if not os.path.exists(os.path.join(self.load_path, str(page_num), title + str(i) + '.jpg')):
title = title + str(i)
break
# 如果重名了,就加上一个数字
print('正在保存名字为: ' + title + ' 的图片')
with open(title + '.jpg', 'ab') as f: # 图片要用b
f.write(img)
print('保存该图片完毕')
def mkdir(self, path):
path = path.strip()
is_exist = os.path.exists(os.path.join(self.load_path, path))
if not is_exist:
print('创建一个名字为 ' + path + ' 的文件夹')
os.makedirs(os.path.join(self.load_path, path))
os.chdir(os.path.join(self.load_path, path))
return True
else:
print('名字为 ' + path + ' 的文件夹已经存在')
os.chdir(os.path.join(self.load_path, path))
return False
def work(self):
self.login()
for page_num in range(1, 10): #===>修改成9页,爬周榜之类的随意页数
path = str(page_num) # 每一页就开一个文件夹
self.mkdir(path) # 创建文件夹
print(self.target_url + str(page_num))
now_html = self.get_html(self.target_url + str(page_num), 3) # 获取页码
self.get_img(now_html.text, page_num) # 获取图片
print('第 {page} 页保存完毕'.format(page=page_num))
time.sleep(1) # 防止太快被反(本来设定是sleep(2))
pixiv = Pixiv()
pixiv.work()
|
[
"devilqmcq@gmail.com"
] |
devilqmcq@gmail.com
|
08181b9dd59d9758e825019d3f9602d76e20b20e
|
2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02
|
/PyTorch/contrib/cv/semantic_segmentation/HRnet-OCR/loss/rmi_utils.py
|
e66da6bac3e74361bc03788f446191958bbbb517
|
[
"BSD-3-Clause",
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Ascend/ModelZoo-PyTorch
|
4c89414b9e2582cef9926d4670108a090c839d2d
|
92acc188d3a0f634de58463b6676e70df83ef808
|
refs/heads/master
| 2023-07-19T12:40:00.512853
| 2023-07-17T02:48:18
| 2023-07-17T02:48:18
| 483,502,469
| 23
| 6
|
Apache-2.0
| 2022-10-15T09:29:12
| 2022-04-20T04:11:18
|
Python
|
UTF-8
|
Python
| false
| false
| 6,900
|
py
|
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
# This code is adapted from: https://github.com/ZJULearning/RMI
# python 2.X, 3.X compatibility
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import torch
import torch.nn.functional as F
__all__ = ['map_get_pairs', 'log_det_by_cholesky']
def map_get_pairs(labels_4D, probs_4D, radius=3, is_combine=True):
"""get map pairs
Args:
labels_4D : labels, shape [N, C, H, W]
probs_4D : probabilities, shape [N, C, H, W]
radius : the square radius
Return:
tensor with shape [N, C, radius * radius, H - (radius - 1), W - (radius - 1)]
"""
# pad to ensure the following slice operation is valid
#pad_beg = int(radius // 2)
#pad_end = radius - pad_beg
# the original height and width
label_shape = labels_4D.size()
h, w = label_shape[2], label_shape[3]
new_h, new_w = h - (radius - 1), w - (radius - 1)
# https://pytorch.org/docs/stable/nn.html?highlight=f%20pad#torch.nn.functional.pad
#padding = (pad_beg, pad_end, pad_beg, pad_end)
#labels_4D, probs_4D = F.pad(labels_4D, padding), F.pad(probs_4D, padding)
# get the neighbors
la_ns = []
pr_ns = []
#for x in range(0, radius, 1):
for y in range(0, radius, 1):
for x in range(0, radius, 1):
la_now = labels_4D[:, :, y:y + new_h, x:x + new_w]
pr_now = probs_4D[:, :, y:y + new_h, x:x + new_w]
la_ns.append(la_now)
pr_ns.append(pr_now)
if is_combine:
# for calculating RMI
pair_ns = la_ns + pr_ns
p_vectors = torch.stack(pair_ns, dim=2)
return p_vectors
else:
# for other purpose
la_vectors = torch.stack(la_ns, dim=2)
pr_vectors = torch.stack(pr_ns, dim=2)
return la_vectors, pr_vectors
def map_get_pairs_region(labels_4D, probs_4D, radius=3, is_combine=0, num_classeses=21):
"""get map pairs
Args:
labels_4D : labels, shape [N, C, H, W].
probs_4D : probabilities, shape [N, C, H, W].
radius : The side length of the square region.
Return:
A tensor with shape [N, C, radiu * radius, H // radius, W // raidius]
"""
kernel = torch.zeros([num_classeses, 1, radius, radius]).type_as(probs_4D)
padding = radius // 2
# get the neighbours
la_ns = []
pr_ns = []
for y in range(0, radius, 1):
for x in range(0, radius, 1):
kernel_now = kernel.clone()
kernel_now[:, :, y, x] = 1.0
la_now = F.conv2d(labels_4D, kernel_now, stride=radius, padding=padding, groups=num_classeses)
pr_now = F.conv2d(probs_4D, kernel_now, stride=radius, padding=padding, groups=num_classeses)
la_ns.append(la_now)
pr_ns.append(pr_now)
if is_combine:
# for calculating RMI
pair_ns = la_ns + pr_ns
p_vectors = torch.stack(pair_ns, dim=2)
return p_vectors
else:
# for other purpose
la_vectors = torch.stack(la_ns, dim=2)
pr_vectors = torch.stack(pr_ns, dim=2)
return la_vectors, pr_vectors
return
def log_det_by_cholesky(matrix):
"""
Args:
matrix: matrix must be a positive define matrix.
shape [N, C, D, D].
Ref:
https://github.com/tensorflow/tensorflow/blob/r1.13/tensorflow/python/ops/linalg/linalg_impl.py
"""
# This uses the property that the log det(A) = 2 * sum(log(real(diag(C))))
# where C is the cholesky decomposition of A.
chol = torch.cholesky(matrix)
#return 2.0 * torch.sum(torch.log(torch.diagonal(chol, dim1=-2, dim2=-1) + 1e-6), dim=-1)
return 2.0 * torch.sum(torch.log(torch.diagonal(chol, dim1=-2, dim2=-1) + 1e-8), dim=-1)
def batch_cholesky_inverse(matrix):
"""
Args: matrix, 4-D tensor, [N, C, M, M].
matrix must be a symmetric positive define matrix.
"""
chol_low = torch.cholesky(matrix, upper=False)
chol_low_inv = batch_low_tri_inv(chol_low)
return torch.matmul(chol_low_inv.transpose(-2, -1), chol_low_inv)
def batch_low_tri_inv(L):
"""
Batched inverse of lower triangular matrices
Args:
L : a lower triangular matrix
Ref:
https://www.pugetsystems.com/labs/hpc/PyTorch-for-Scientific-Computing
"""
n = L.shape[-1]
invL = torch.zeros_like(L)
for j in range(0, n):
invL[..., j, j] = 1.0 / L[..., j, j]
for i in range(j + 1, n):
S = 0.0
for k in range(0, i + 1):
S = S - L[..., i, k] * invL[..., k, j].clone()
invL[..., i, j] = S / L[..., i, i]
return invL
def log_det_by_cholesky_test():
"""
test for function log_det_by_cholesky()
"""
a = torch.randn(1, 4, 4)
a = torch.matmul(a, a.transpose(2, 1))
print(a)
res_1 = torch.logdet(torch.squeeze(a))
res_2 = log_det_by_cholesky(a)
print(res_1, res_2)
def batch_inv_test():
"""
test for function batch_cholesky_inverse()
"""
a = torch.randn(1, 1, 4, 4)
a = torch.matmul(a, a.transpose(-2, -1))
print(a)
res_1 = torch.inverse(a)
res_2 = batch_cholesky_inverse(a)
print(res_1, '\n', res_2)
def mean_var_test():
x = torch.randn(3, 4)
y = torch.randn(3, 4)
x_mean = x.mean(dim=1, keepdim=True)
x_sum = x.sum(dim=1, keepdim=True) / 2.0
y_mean = y.mean(dim=1, keepdim=True)
y_sum = y.sum(dim=1, keepdim=True) / 2.0
x_var_1 = torch.matmul(x - x_mean, (x - x_mean).t())
x_var_2 = torch.matmul(x, x.t()) - torch.matmul(x_sum, x_sum.t())
xy_cov = torch.matmul(x - x_mean, (y - y_mean).t())
xy_cov_1 = torch.matmul(x, y.t()) - x_sum.matmul(y_sum.t())
print(x_var_1)
print(x_var_2)
print(xy_cov, '\n', xy_cov_1)
if __name__ == '__main__':
batch_inv_test()
|
[
"wangjiangben@huawei.com"
] |
wangjiangben@huawei.com
|
6c5014f6f02fdf02d6ce5f1d6903a83d55880334
|
1a8e56f3de28e36ba6af1d2f5a17169af5e2576c
|
/learners/a2c.py
|
4ed7e329ed25bed015b09e3fa0b221cac403d158
|
[] |
no_license
|
talolard/rlstocks
|
71b57c53a93ee0bf05716f19064ced7653ab4b24
|
8ed7893aeb4ba3dc4491fb89fcbab9630f2f99f7
|
refs/heads/master
| 2020-04-16T13:58:47.298611
| 2019-01-14T11:42:08
| 2019-01-14T11:42:08
| 165,650,018
| 21
| 14
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,221
|
py
|
import tensorflow as tf
import tensorflow_probability as tfp
import numpy as np
class PolicyEstimator():
"""
Policy Function approximator. Actions are weightings over a portfolio of n stocks + cash
"""
def __init__(self, learning_rate=0.01, scope="policy_estimator",num_stocks=2,period=10):
with tf.variable_scope(scope):
self.price_history_in = tf.placeholder(tf.float32, [period, num_stocks],
"prices") # HIST prices, current, and position
self.price_history = tf.reshape(self.price_history_in, [period * num_stocks])
self.portfolio= tf.placeholder(tf.float32, [num_stocks+1,], "prices") # HIST prices, current, and position
self.target = tf.placeholder(dtype=tf.float32, name="target") # Position size
state = tf.concat([self.price_history,self.portfolio],axis=0)
state = tf.expand_dims(state,0)
# This is just linear classifier
l1 = tf.contrib.layers.fully_connected(
inputs=state,
num_outputs=32,
activation_fn=tf.nn.tanh,
weights_initializer=tf.initializers.glorot_uniform)
l1 = tf.contrib.layers.layer_norm(l1)
l2 = tf.contrib.layers.fully_connected(
inputs=l1,
num_outputs=32,
activation_fn=tf.nn.tanh,
weights_initializer=tf.initializers.glorot_uniform)
l2 = tf.contrib.layers.layer_norm(l1+l2)
self.alphas= tf.contrib.layers.fully_connected(
inputs=l2,
num_outputs=num_stocks+1,
activation_fn=tf.nn.relu,
weights_initializer=tf.initializers.glorot_uniform)
self.alphas +=1
self.dirichlet = tfp.distributions.Dirichlet(self.alphas)
self.action = self.dirichlet._sample_n(1)
self.action = tf.squeeze(self.action)
# Loss and train op
self.loss = -self.dirichlet.log_prob(self.action) * self.target
# Add cross entropy cost to encourage exploration
# self.loss -= 1e-1 * self.dirichlet.entropy()
self.optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
self.train_op = self.optimizer.minimize(
self.loss, global_step=tf.contrib.framework.get_global_step())
def predict(self, state_dict, sess=None):
sess = sess or tf.get_default_session()
return sess.run(self.action, {
self.price_history_in: state_dict['prices'],
self.portfolio:state_dict['portfolio']
})
def update(self, state_dict, target, action, sess=None):
sess = sess or tf.get_default_session()
feed_dict = {self.price_history_in: state_dict['prices'],
self.portfolio:state_dict['portfolio']
, self.target: target, self.action: action}
_, loss = sess.run([self.train_op, self.loss], feed_dict)
return loss
class ValueEstimator():
"""
Value Function approximator. EE.g. the critic
"""
def __init__(self, learning_rate=0.01, scope="value_estimator", num_stocks=2, period=10):
with tf.variable_scope(scope):
self.price_history_in = tf.placeholder(tf.float32, [ period,num_stocks],
"prices") # HIST prices, current, and position
self.price_history = tf.reshape(self.price_history_in,[period*num_stocks])
self.portfolio = tf.placeholder(tf.float32, [num_stocks + 1, ],
"prices") # HIST prices, current, and position
self.target = tf.placeholder(dtype=tf.float32, name="target") # Position size
state = tf.concat([self.price_history, self.portfolio],axis=0)
state = tf.expand_dims(state,0)
state = tf.contrib.layers.layer_norm(state)
# This is just linear classifier
l1 = tf.contrib.layers.fully_connected(
inputs=tf.expand_dims(state, 0),
num_outputs=16,
activation_fn=tf.nn.relu,
weights_initializer=tf.initializers.glorot_uniform)
l1 = tf.contrib.layers.layer_norm(l1)
self.estimate = tf.contrib.layers.fully_connected(
inputs=l1,
num_outputs=1,
activation_fn=tf.nn.tanh,
weights_initializer=tf.initializers.glorot_uniform)
self.estimate = tf.squeeze(self.estimate)
self.loss = tf.squared_difference(self.estimate, self.target)
self.optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
self.train_op = self.optimizer.minimize(
self.loss, global_step=tf.contrib.framework.get_global_step())
def predict(self, state_dict, sess=None):
sess = sess or tf.get_default_session()
return sess.run(self.estimate, {
self.price_history_in: state_dict['prices'],
self.portfolio: state_dict['portfolio']
})
def update(self, state_dict, target, sess=None):
sess = sess or tf.get_default_session()
feed_dict = {self.price_history_in: state_dict['prices'],
self.portfolio: state_dict['portfolio']
, self.target: target, }
_, loss = sess.run([self.train_op, self.loss], feed_dict)
return loss
import collections
import itertools
def reinforce(estimator_policy, estimator_value,price_func,EnvFactory, num_episodes, discount_factor=1.0, length=1000,num_stocks=3,lookback=10):
"""
REINFORCE (Monte Carlo Policy Gradient) Algorithm. Optimizes the policy
function approximator using policy gradient.
Args:
env: OpenAI environment.
estimator_policy: Policy Function to be optimized
estimator_value: Value function approximator, used as a baseline
num_episodes: Number of episodes to run for
discount_factor: Time-discount factor
Returns:
An EpisodeStats object with two numpy arrays for episode_lengths and episode_rewards.
"""
# Keeps track of useful statistics
Transition = collections.namedtuple("Transition", ["state", "action", "reward", "next_state", "done"])
envs = []
for i_episode in range(num_episodes):
# Reset the environment and pick the fisrst action
env = EnvFactory(price_func,num_stocks,length,starting_value=1000,lookback=lookback)
env.time = lookback +1 # Make sure enough prices in the buffer
episode = []
state, reward, done = env.step(np.array([0]*num_stocks+[1])) # Set portfolio to all cash
# One step in the environment
eps = 0#(1-1/(i_episode+1))**2
for t in itertools.count():
# Take a step
nextPortfolio = estimator_policy.predict(state)
if np.random.binomial(1,eps):
nextPortfolio = np.random.dirichlet([0.1]*num_stocks+[0.2])
next_state, reward, done = env.step(nextPortfolio)
# Keep track of the transition
episode.append(Transition(
state=state, action=nextPortfolio, reward=reward, next_state=next_state, done=done))
# Calculate TD Target
value_next = estimator_value.predict(next_state)
td_target = reward + discount_factor * value_next
td_error = td_target - estimator_value.predict(state)
# Update the value estimator
val_loss = estimator_value.update(state, td_target)
# Update the policy estimator
# using the td error as our advantage estimate
pol_loss = estimator_policy.update(state, td_error, nextPortfolio)
# Print out which step we're on, useful for debugging.
print("\rStep {} @ Episode {}/{}, {} - {} - ".format(
t, i_episode + 1, num_episodes,val_loss,pol_loss ), end="")
if t > length or done:
break
state = next_state
print("Val {}".format(env.account_value))
yield env
return envs
|
[
"talolard@gmail.com"
] |
talolard@gmail.com
|
1627febc64096a819a2aec1aae029007383fd203
|
d55aeffe163ac7b3c4f4e43979e15c03ef34f61e
|
/stdlib/python2.5/compileall.py
|
b21d95f82c94ac092444ce6e930a753d1b81aebd
|
[
"MIT",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-python-cwi",
"Python-2.0",
"GPL-1.0-or-later"
] |
permissive
|
kmod/icbd
|
ed7487d41ad2089b3cb1e1f2212f419d868cb8bd
|
9636564eb3993afa07c6220d589bbd1991923d74
|
refs/heads/master
| 2022-11-24T01:33:14.831751
| 2016-11-13T00:40:55
| 2016-11-13T00:40:55
| 18,765,115
| 7
| 5
|
MIT
| 2022-11-21T17:11:18
| 2014-04-14T15:09:36
|
Python
|
UTF-8
|
Python
| false
| false
| 5,283
|
py
|
"""Module/script to "compile" all .py files to .pyc (or .pyo) file.
When called as a script with arguments, this compiles the directories
given as arguments recursively; the -l option prevents it from
recursing into directories.
Without arguments, if compiles all modules on sys.path, without
recursing into subdirectories. (Even though it should do so for
packages -- for now, you'll have to deal with packages separately.)
See module py_compile for details of the actual byte-compilation.
"""
import os
import sys
import py_compile
__all__ = ["compile_dir","compile_path"]
def compile_dir(dir, maxlevels=10, ddir=None,
force=0, rx=None, quiet=0):
"""Byte-compile all modules in the given directory tree.
Arguments (only dir is required):
dir: the directory to byte-compile
maxlevels: maximum recursion level (default 10)
ddir: if given, purported directory name (this is the
directory name that will show up in error messages)
force: if 1, force compilation, even if timestamps are up-to-date
quiet: if 1, be quiet during compilation
"""
if not quiet:
print 'Listing', dir, '...'
try:
names = os.listdir(dir)
except os.error:
print "Can't list", dir
names = []
names.sort()
success = 1
for name in names:
fullname = os.path.join(dir, name)
if ddir is not None:
dfile = os.path.join(ddir, name)
else:
dfile = None
if rx is not None:
mo = rx.search(fullname)
if mo:
continue
if os.path.isfile(fullname):
head, tail = name[:-3], name[-3:]
if tail == '.py':
cfile = fullname + (__debug__ and 'c' or 'o')
ftime = os.stat(fullname).st_mtime
try: ctime = os.stat(cfile).st_mtime
except os.error: ctime = 0
if (ctime > ftime) and not force: continue
if not quiet:
print 'Compiling', fullname, '...'
try:
ok = py_compile.compile(fullname, None, dfile, True)
except KeyboardInterrupt:
raise KeyboardInterrupt
except py_compile.PyCompileError,err:
if quiet:
print 'Compiling', fullname, '...'
print err.msg
success = 0
except IOError, e:
print "Sorry", e
success = 0
else:
if ok == 0:
success = 0
elif maxlevels > 0 and \
name != os.curdir and name != os.pardir and \
os.path.isdir(fullname) and \
not os.path.islink(fullname):
if not compile_dir(fullname, maxlevels - 1, dfile, force, rx, quiet):
success = 0
return success
def compile_path(skip_curdir=1, maxlevels=0, force=0, quiet=0):
"""Byte-compile all module on sys.path.
Arguments (all optional):
skip_curdir: if true, skip current directory (default true)
maxlevels: max recursion level (default 0)
force: as for compile_dir() (default 0)
quiet: as for compile_dir() (default 0)
"""
success = 1
for dir in sys.path:
if (not dir or dir == os.curdir) and skip_curdir:
print 'Skipping current directory'
else:
success = success and compile_dir(dir, maxlevels, None,
force, quiet=quiet)
return success
def main():
"""Script main program."""
import getopt
try:
opts, args = getopt.getopt(sys.argv[1:], 'lfqd:x:')
except getopt.error, msg:
print msg
print "usage: python compileall.py [-l] [-f] [-q] [-d destdir] " \
"[-x regexp] [directory ...]"
print "-l: don't recurse down"
print "-f: force rebuild even if timestamps are up-to-date"
print "-q: quiet operation"
print "-d destdir: purported directory name for error messages"
print " if no directory arguments, -l sys.path is assumed"
print "-x regexp: skip files matching the regular expression regexp"
print " the regexp is search for in the full path of the file"
sys.exit(2)
maxlevels = 10
ddir = None
force = 0
quiet = 0
rx = None
for o, a in opts:
if o == '-l': maxlevels = 0
if o == '-d': ddir = a
if o == '-f': force = 1
if o == '-q': quiet = 1
if o == '-x':
import re
rx = re.compile(a)
if ddir:
if len(args) != 1:
print "-d destdir require exactly one directory argument"
sys.exit(2)
success = 1
try:
if args:
for dir in args:
if not compile_dir(dir, maxlevels, ddir,
force, rx, quiet):
success = 0
else:
success = compile_path()
except KeyboardInterrupt:
print "\n[interrupt]"
success = 0
return success
if __name__ == '__main__':
exit_status = int(not main())
sys.exit(exit_status)
|
[
"kevmod@gmail.com"
] |
kevmod@gmail.com
|
1545e96bbe9cf81b0d127a0adf253cf60d3d651f
|
4771e0beace4a7ba40b3135f749a9cef176291d3
|
/mozio_app/migrations/0004_alter_provider_email.py
|
172f782ecd9760ff53d889de70d4aebb9bf4956d
|
[] |
no_license
|
Olamidun/mozio
|
8b0b0151ee873684ab7fc7e50a887198945225ce
|
fd8dfb19e2dae1db116d0e183e5e6b24e18263ab
|
refs/heads/master
| 2023-06-21T09:10:31.629210
| 2021-07-22T17:22:12
| 2021-07-22T17:22:12
| 388,116,028
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 466
|
py
|
# Generated by Django 3.2.5 on 2021-07-21 22:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mozio_app', '0003_provider_email'),
]
operations = [
migrations.AlterField(
model_name='provider',
name='email',
field=models.EmailField(default='babayega@gmail.com', max_length=254, unique=True),
preserve_default=False,
),
]
|
[
"kolapoolamidun@gmail.com"
] |
kolapoolamidun@gmail.com
|
7b9b67436fd2a69964c80526b776d635c6a16e01
|
b133ca39198761fd0f6e6e13da0bdd563a223f2e
|
/orderspot/migrations/0002_auto_20200601_1612.py
|
196d7df3eae47e2cf8d15987d467d18fabd2f46f
|
[] |
no_license
|
Niko24x/OrderSpot-web
|
d75534f0d0f499238ec3d91f374ea5d6bb1e4cee
|
f49241760391a389b0bc222ee8aa3e3b91a6adb8
|
refs/heads/master
| 2023-05-06T00:34:53.531544
| 2021-05-13T04:46:42
| 2021-05-13T04:46:42
| 268,131,891
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,037
|
py
|
# Generated by Django 3.0.3 on 2020-06-01 22:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('orderspot', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='encabezadopedido',
name='usuario',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='detallepedido',
name='pedido',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='orderspot.EncabezadoPedido'),
),
migrations.AddField(
model_name='detallepedido',
name='producto',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='orderspot.Producto'),
),
]
|
[
"jcojom@udv.edu.gt"
] |
jcojom@udv.edu.gt
|
5aaa95303a3f1a04d3034effc3cb95d6d7066b04
|
c16edb928fe1cef190f48a8a82a7bbf14b96fac8
|
/venv/Scripts/pip3-script.py
|
5a264ab2d63c6a7a4d209b4e3c3b5399c534dcd6
|
[] |
no_license
|
jjkim01/flask-RF-thread-TempAlerter
|
fb284ce858af249c2f00c57de339453ae9473c6b
|
5aff34ff4b331e6c0980cf7600392c035a80be69
|
refs/heads/master
| 2022-07-15T03:44:40.772126
| 2020-05-09T06:16:41
| 2020-05-09T06:16:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 415
|
py
|
#!C:\Users\LGUser\PycharmProjects\embadded\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
|
[
"girls9759@naver.com"
] |
girls9759@naver.com
|
a9e2f8acd0bcfeb442ac861559e8fb1159104383
|
8cbb4ad2eea115cf227274700ddb768a02a91117
|
/armorDetect.py
|
d76ee2014f3d633be2eda0eaf56190fb8a5e0d2a
|
[] |
no_license
|
zzhtg/robovirsual
|
7670d8d7de463c1b00f9062a15ab224a33e365de
|
b19d0f1afac4657b54319c9c841ea16e65e1b0bd
|
refs/heads/master
| 2020-04-08T04:08:58.271725
| 2019-05-19T05:00:52
| 2019-05-19T05:00:52
| 159,004,888
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,237
|
py
|
# coding=utf-8
import cv2
import numpy as np
import math
import SvmTrain as st
from scipy.spatial.distance import pdist
from scipy.spatial.distance import squareform
length_threshold = 1.0
width_threshold = 1.0
aspect_threshold = [0.9, 7.0]
ortho_threshold = [0.2, 0.2, 0.9]
target_num = None
ortho_mode = False
bet_mode = False
error_text = False
class Armor():
def __init__(self, pos, dist, length, width, aspect, ortho, num, lightL, lightR):
self.pos = pos
[a, b, x, y] = self.pos
self.mid = [math.ceil((a + x) / 2), math.ceil((b + y) / 2)] # 装甲中心坐标
self.dist = dist # 距离
self.length = length # 灯条长度比
self.width = width # 灯条宽度比
self.aspect = aspect # 装甲长宽比
self.ortho = ortho # 正交率
self.digit = num # 识别出来的数字
self.lightL = lightL # 左灯条类
self.lightR = lightR # 右灯条类
def show(self, frame, kalman, KalmanPreview = False):
kalman.predict(frame, self.pos, KalmanPreview)
def length_dif_det(l_left, l_right):
"""
输入:l_left(左灯条长度)、l_right(右灯条长度)
功能:检测当前灯条组合是否符合长度差距条件
输出:True 或者 False 以及 长度宽度比 """
length_dif = abs(l_left-l_right) / max(l_left, l_right)
return length_dif <= length_threshold, length_dif
def width_dif_det(w_left, w_right):
"""
输入:w_left(左灯条宽度)w_right(右灯条宽度)
功能:检测当前灯条组合是否符合宽度差距条件
输出:True 或者 False 以及 宽度差距比
"""
w_left, w_right = [i+1 for i in [w_left, w_right]]
width_dif = abs(w_left-w_right) / max(w_left, w_right)
return width_dif <= width_threshold, width_dif
def armor_aspect_det(x_l, y_l, x_r, y_r, l_l, l_r, w_l, w_r):
"""
输入:最小矩形拟合信息(x、y、w、h)L(左灯条)(x、y、w、h、)R(右灯条)
功能:检测当前灯条组合是否符合横纵比条件
输出:True 或者 False 以及 装甲横纵比
"""
armor_aspect = math.sqrt((y_r-y_l)**2 + (x_r-x_l)**2) / max(l_l, l_r, w_l, w_r)
return aspect_threshold[1] >= armor_aspect >= aspect_threshold[0], armor_aspect
angle_bias = 0
def ortho_pixel(frame, l_pixel, r_pixel):
"""
输入:左右灯条的四个点坐标
功能:找到左右灯条和中心矢量的对应两个坐标
输出:中心线、左灯条、右灯条两个坐标
"""
lxmid = np.average(l_pixel[0:4, 0])
lymid = np.average(l_pixel[0:4, 1])
rxmid = np.average(r_pixel[0:4, 0])
rymid = np.average(r_pixel[0:4, 1])
l_l = squareform(pdist(l_pixel, metric="euclidean")) # 将distA数组变成一个矩阵
rr = squareform(pdist(r_pixel, metric="euclidean"))
ano_l = np.argsort(l_l) # 对于左灯条的第0个顶点而言,找出能够组成短边的另一个点的序号
ano_r = np.argsort(rr) # 对于左灯条的第0个顶点而言,找出能够组成短边的另一个点的序号
directlxmid, directlymid = (l_pixel[0, :] + l_pixel[ano_l[0, 1], :]) / 2
directrxmid, directrymid = (r_pixel[0, :] + r_pixel[ano_r[0, 1], :]) / 2
vec_mid = [[lxmid, lymid], [rxmid, rymid]] # 中心连接矢量
vec_light_l = [[lxmid, lymid], [directlxmid, directlymid]] # 灯条1的方向矢量
vec_light_r = [[rxmid, rymid], [directrxmid, directrymid]] # 灯条2的方向矢量
if ortho_mode: # debug 划线及输出
cv2.line(frame, tuple(vec_mid[0]), tuple(vec_mid[1]), (255, 0, 0), 5)
cv2.line(frame, tuple(vec_light_l[0]), tuple(vec_light_l[1]), (255, 0, 0), 5)
cv2.line(frame, tuple(vec_light_r[0]), tuple(vec_light_r[1]), (255, 0, 0), 5)
return vec_mid, vec_light_l, vec_light_r
def ortho_angle(vec_mid, vec_light_l, vec_light_r):
"""
输入:获取左右灯条和中心线的两个坐标
功能:计算两个灯条的中心连接矢量和两个灯条方向矢量的正交性
输出:True or False , 左右灯条方向矢量与中心连接矢量的夹角
"""
global angle_bias
vec_mid = [vec_mid[0][i] - vec_mid[1][i] for i in range(2)]
vec_light_l = [vec_light_l[0][i] - vec_light_l[1][i] for i in range(2)]
vec_light_r = [vec_light_r[0][i] - vec_light_r[1][i] for i in range(2)]
abs_c = math.sqrt(vec_mid[0]**2 + vec_mid[1]**2)
abs_l = math.sqrt(vec_light_l[0]**2 + vec_light_l[1]**2)
abs_r = math.sqrt(vec_light_r[0]**2 + vec_light_r[1]**2)
# 测距, 使用 k /(图像长度 / 实际长度 + 图像长度 / 实际宽度) / 2
long_rate = abs_c / 13
short_rate = (abs_l + abs_r) / 4.5
dist = 3500.0 / (long_rate + short_rate)
# print("long= ", long_rate, "short = ", short_rate, "dis = ", )
inl = (vec_mid[0] * vec_light_l[0] + vec_mid[1] * vec_light_l[1]) # 内积
inr = (vec_mid[0] * vec_light_r[0] + vec_mid[1] * vec_light_r[1])
inp = (vec_light_l[0]*vec_light_r[0] + vec_light_l[1] * vec_light_r[1])
angle_l = inl / (abs_c * abs_l) # 左向量与中心向量的夹角
angle_r = inr / (abs_c * abs_r) # 右向量与中心向量的夹角
angle_p = inp / (abs_l * abs_r) # 左右向量夹角
angle_bias = (math.atan2(vec_mid[0], vec_mid[1]) / math.pi * 180.0)
return_flag = (abs(angle_l) < ortho_threshold[0] and
abs(angle_r) < ortho_threshold[1] and
abs(angle_p) > ortho_threshold[2])
# 范围 60~120度, 两个灯条都满足
return return_flag, angle_l, angle_r, angle_p, (dist, long_rate, short_rate)
def between_light_detect(frame, armor, lightcenter, left, right, lens):
for index in range(lens):
if(index is left or index is right):
continue
x, y, l, s = lightcenter[index]
midx = math.ceil(x + l / 2.0)
midy = math.ceil(y + s / 2.0)
x1, y1, x2, y2 = armor.pos
lx = math.ceil(x1 + l / 2.0)
rx = math.ceil(x2 - l / 2.0)
uy = math.ceil(y1 + s / 2.0)
dy = math.ceil(y2 - s / 2.0)
if(rx >= midx >= lx and uy >= midy >= dy):
if(bet_mode): # 如果装甲中间有夹多余的灯条,那么就pass
cv2.rectangle(frame, (lx, uy), (rx, dy), (255, 0, 0), 2)
return (midx, midy), (lx, uy, rx, dy)
return True
def armor_detect(svm, frame, lightgroup, num_preview = False, train_mode=False, file="F:\\traindata\\", ):
"""
输入:group(可能是灯条的矩形最小边界拟合信息)
功能:一一对比矩形、找到可能的灯条组合作为装甲
输出:area(可能是装甲的矩形【长宽、左上角坐标,左右灯条长宽平均值】的列表)
"""
image = frame.copy()
armorgroup = []
lens = len(lightgroup)
lightcenter = [light.rect for light in lightgroup]
num = 0
for left in range(lens):
for right in range(left + 1, lens):
if(lightgroup[left].rect[0] > lightgroup[right].rect[0]):
left, right = right, left
[x_l, y_l, long_l, short_l] = lightgroup[left].rect
[x_r, y_r, long_r, short_r] = lightgroup[right].rect
l_, length_dif = length_dif_det(long_l, long_r) # 长度差距判断:两灯条的长度差 / 长一点的那个长度 < 36%
if not l_:
if(error_text):
print("length_dif=", length_dif)
continue
w_, width_dif = width_dif_det(short_l, short_r) # 宽度差距判断:两灯条的宽度差 / 长一点的那个长度 < 68%
if not w_:
if(error_text):
print("width_dif=", width_dif)
continue
a_, armor_aspect = armor_aspect_det(x_l, y_l, x_r, y_r, long_l, long_r, short_l, short_r) # 横纵比判断:2.7~4.5
if not a_:
if(error_text):
print("armor_aspect=", armor_aspect)
continue
l_pixel = cv2.boxPoints(lightgroup[left].raw)
r_pixel = cv2.boxPoints(lightgroup[right].raw)
# 第一个y值最大,第三个y值最小,第二个x值最小,第四个x值最大
vec_mid, vec_light_l, vec_light_r = ortho_pixel(frame, l_pixel, r_pixel)
o_, ortho_l_value, ortho_r_value, angle_p, dist_group = ortho_angle(vec_mid, vec_light_l, vec_light_r)
if not o_:
if(error_text):
print("ortho_l_value=", ortho_l_value,"ortho_r_value=", ortho_r_value, "angle_p=", angle_p)
continue
x = sorted(np.append(l_pixel[0:4, 0], r_pixel[0:4, 0]))
y = sorted(np.append(l_pixel[0:4, 1], r_pixel[0:4, 1]))
pos = [i for i in [x[0], y[0], x[7], y[7]]]
# digit detection
h = (long_l+long_r)/2
x1, y1, x2, y2 = int((y[0] + y[7]) / 2 - h), int((y[0] + y[7]) / 2 + h), int(
(x[0] + x[7]) / 2 - h * 0.75), int((x[0] + x[7]) / 2 + h * 0.75)
min_digit = 0
max_right_digit = image.shape[0]
max_left_digit = image.shape[1]
x1 = min_digit if x1 < min_digit else x1 # 使用三元运算符改写
y1 = max_right_digit if y1 > max_right_digit else y1
x2 = min_digit if x2 < min_digit else x2
y2 = max_left_digit if y2 > max_left_digit else y2
digit = image[x1: y1, x2: y2]
if sum(np.shape(digit)) == 0:
continue
hog_trait, traininput = st.image2hog(digit, preview = num_preview)
if not train_mode: # 如果开启了训练模式,会读取设定保存的文件目录,然后识别时不经过数字判断
num = st.predictShow(svm, traininput)
num = int(num[0][0])
if(target_num is not None):
n_ = (target_num == num)
if not n_:
if(error_text):
print("wrong digit=", num)
continue
else:
st.saveimage(traininput, filename=file)
# distance output
(dist, long_rate, short_rate) = dist_group
if pos is not None:
armor = Armor(pos, dist, length_dif, width_dif, armor_aspect,
[ortho_l_value, ortho_r_value, angle_p],
num, lightgroup[left], lightgroup[right])
_bet = between_light_detect(frame, armor, lightcenter, left, right, lens)
if(_bet is not True):
if(error_text):
l_mid, ar_edge = _bet
print("mid = ", l_mid, "edge = ", ar_edge)
continue
armorgroup.append(armor)
return armorgroup
|
[
"593153252@qq.com"
] |
593153252@qq.com
|
8ff86885f156741487b3bd1a808969caad6e73fb
|
126066f8a66bd9281e4d8b842ca00e7adcaebe3f
|
/blog/urls.py
|
a432bb7def16dddd342fcd1211d6611c33519550
|
[] |
no_license
|
aboro72/django-cms
|
ac0b5dd6af30a86ee89b2ef588c7c2b20976f2e2
|
d3efce7e41b8428b1a0185198fec1c028d797b47
|
refs/heads/master
| 2022-12-04T09:31:41.878657
| 2020-08-29T09:03:26
| 2020-08-29T09:03:26
| 291,138,451
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 241
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.post_list, name='post_list'),
path('post/new/', views.post_new, name='post_new'),
path('post/<int:pk>/edit/', views.post_edit, name='post_edit'),
]
|
[
"andreas.borowczak@gmail.com"
] |
andreas.borowczak@gmail.com
|
9973e17b9c37f8a72cfc896fa87a85e051bb9684
|
dc6c0579965531c894a5fe3d63dccd59d44c6322
|
/app/forms.py
|
d9b876aabca5d694ce06e1853a802c2b32a1487d
|
[] |
no_license
|
oknono/microblog
|
70b4000e224de9a8220e13616ab1f5b97a7f78a1
|
2e0f075159c503044711aab6bb4d0172818cf356
|
refs/heads/master
| 2016-09-11T02:57:02.614572
| 2015-07-14T20:03:12
| 2015-07-14T20:03:12
| 38,762,798
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 449
|
py
|
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class EditForm(Form):
nickname = StringField('nickname', validators=[DataRequired()])
about_me = TextAreaField('about_me', validators=[Length(min=0, max=140)])
|
[
"kirsten@eml.cc"
] |
kirsten@eml.cc
|
c0298ed8bf93683a929611355e7be53e13e7e1ab
|
af8a486282ec7cc56da81c35bd72b6cd23762f58
|
/morse_run.py
|
b42a01f5a632955bafc5a6b9745384b82b4f6a9f
|
[] |
no_license
|
niharvaidya0212/Eye-Tracking
|
48f9b214618018082059c52623e7af101df00e8f
|
d4b3dba27e121ca3825b3fafb1b59b0d9bb3ff67
|
refs/heads/main
| 2023-05-06T07:21:57.817730
| 2021-05-25T17:07:14
| 2021-05-25T17:07:14
| 370,769,026
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,025
|
py
|
# USAGE
# python detect_blinks.py --shape-predictor shape_predictor_68_face_landmarks.dat --video blink_detection_demo.mp4
# python detect_blinks.py --shape-predictor shape_predictor_68_face_landmarks.dat
# import the necessary packages
from scipy.spatial import distance as dist
from imutils.video import FileVideoStream
from imutils.video import VideoStream
from imutils import face_utils
import numpy as np
import argparse
import imutils
import time
import dlib
import cv2
from spellchecker import SpellChecker
spell = SpellChecker()
from tkinter import *
from PIL import ImageTk , Image
import os
f = open(r"C:\xampp\htdocs\text.txt", "w")
f.write(" **Incorrect input** ")
f.close()
import array
global ar,arr,al,s
ar1=''
arr = [None]*0
ar=''
tempar=''
al=''
s=''
def main(ar,arr,al,s):
lang={'01':'A', '1000':'B', '1010':'C', '100':'D', '0':'E', '0010':'F', '110':'G', '0000':'H', '00':'I', '0111':'J', '101':'K', '0100':'L', '11':'M', '10':'N', '111':'O', '0110':'P', '1101':'Q', '010':'R', '000':'S', '1':'T', '001':'U', '0001':'V', '011':'W', '1001':'X', '1011':'Y', '1100':'Z'}
hospital={'0':'YES', '00':'NO', '1':'HELP', '11':'PAIN', '01':'Call DOCTOR', '10':'Water', '000':'WATER', '001':'MEDICIN', '010':'Lights'}
tempar=ar
print('arr: ',tempar)
if ar[-2:2]!='BB':
if ar[-1]=='B':
ar+='B'
else:
ar+='BB'
choice=1
if choice ==1:
arr = ar.split('BB')
print('arr: ',arr)
print('after split BB : ',arr)
a=len(arr)-1
print('length of a: ', a)
i=0
temp1=0
while (i<a):
arrr= arr[i].split('B')
print('after split B : ',arrr)
j=0
print('arrr : ',arrr)
b=len(arrr)
print('lenght of b :',b)
print(b)
while (j<b):
key=arrr[j]
if key in lang.keys():
print(key ,'= ', lang[key])
temp=lang[key]
s=s+temp
else:
print('not present')
j=j+1
print ("\n\n",s)
al=al+s
s=''
#misspelled = spell.unknown([s])
#for word in misspelled:
# print(spell.correction(word))
# print(spell.candidates(word))
print("\t")
i=i+1
al=al+'_'
print(al)
f = open(r"C:\xampp\htdocs\text.txt","w")
f.write(al)
f.close()
ar=tempar
def eye_aspect_ratio(eye):
# compute the euclidean distances between the two sets of
# vertical eye landmarks (x, y)-coordinates
A = dist.euclidean(eye[1], eye[5])
B = dist.euclidean(eye[2], eye[4])
# compute the euclidean distance between the horizontal
# eye landmark (x, y)-coordinates
C = dist.euclidean(eye[0], eye[3])
# compute the eye aspect ratio
ear = (A + B) / (2.0 * C)
# return the eye aspect ratio
return ear
#to chech the morse code
def checkKey(lang, key):
if key in lang.keys():
print(key ,'= ', lang[key])
else:
print('not present')
def checkKey1(hospital, key):
if key in lang.keys():
print(key ,'= ', hospital[key])
else:
print('not present')
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--shape-predictor", required=True,
help="path to facial landmark predictor")
ap.add_argument("-v", "--video", type=str, default="",
help="path to input video file")
args = vars(ap.parse_args())
# define two constants, one for the eye aspect ratio to indicate
# blink and then a second constant for the number of consecutive
# frames the eye must be below the threshold
EYE_AR_THRESH = 0.25
EYE_AR_SHORT = 2
EYE_AR_LONG = 7
EYE_OPEN = 100
# initialize the frame counters and the total number of blinks
COUNTER = 0
COUNT_OPEN = 0
TOTAL_SHORT = 0
TOTAL_LONG = 0
array_count = 0
check = 0
check_open = 0
b_count =0
'''
print("\nEnter 1 for normal english language")
print("\nEnter 2 for hospital environment")
choice = int(input("\n\nEnter the choice : "))
'''
# initialize dlib's face detector (HOG-based) and then create
# the facial landmark predictor
print("[INFO] loading facial landmark predictor...")
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor(args["shape_predictor"])
# grab the indexes of the facial landmarks for the left and
# right eye, respectively
(lStart, lEnd) = face_utils.FACIAL_LANDMARKS_IDXS["left_eye"]
(rStart, rEnd) = face_utils.FACIAL_LANDMARKS_IDXS["right_eye"]
# start the video stream thread
print("[INFO] starting video stream thread...")
vs = FileVideoStream(args["video"]).start()
fileStream = True
vs = VideoStream(src=0).start()
# vs = VideoStream(usePiCamera=True).start()
fileStream = False
time.sleep(1.0)
#creatin array
# loop over frames from the video stream
while True:
# if this is a file video stream, then we need to check if
# there any more frames left in the buffer to process
if fileStream and not vs.more():
break
# grab the frame from the threaded video file stream, resize
# it, and convert it to grayscale
# channels)
frame = vs.read()
frame = imutils.resize(frame, width=450)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# detect faces in the grayscale frame
rects = detector(gray, 0)
# loop over the face detections
for rect in rects:
# determine the facial landmarks for the face region, then
# convert the facial landmark (x, y)-coordinates to a NumPy
# array
shape = predictor(gray, rect)
shape = face_utils.shape_to_np(shape)
# extract the left and right eye coordinates, then use the
# coordinates to compute the eye aspect ratio for both eyes
leftEye = shape[lStart:lEnd]
rightEye = shape[rStart:rEnd]
leftEAR = eye_aspect_ratio(leftEye)
rightEAR = eye_aspect_ratio(rightEye)
# average the eye aspect ratio together for both eyes
ear = (leftEAR + rightEAR) / 2.0
# compute the convex hull for the left and right eye, then
# visualize each of the eyes
leftEyeHull = cv2.convexHull(leftEye)
rightEyeHull = cv2.convexHull(rightEye)
cv2.drawContours(frame, [leftEyeHull], -1, (255, 255, 0), 1)
cv2.drawContours(frame, [rightEyeHull], -1, (255, 255, 0), 1)
# check to see if the eye aspect ratio is below the blink
# threshold, and if so, increment the blink frame counter
if ear < EYE_AR_THRESH:
COUNTER += 1
check=1
# otherwise, the eye aspect ratio is not below the blink
# threshold
else:
# if the eyes were closed for a sufficient number of
# then increment the total number of blinks
if ((COUNTER >= EYE_AR_SHORT) and (COUNTER <=EYE_AR_LONG)):
TOTAL_SHORT += 1
print ("\n0")
check = 0
if(check==0):
ar1='0'
ar=ar+ar1
array_count+=1
check = 1
b_count=0
if(COUNTER > EYE_AR_LONG):
TOTAL_LONG +=1
print ("\n1")
if(check!=0):
ar1='1'
ar=ar+ar1
array_count+=1
check = 1
b_count=0
# reset the eye frame counter
COUNTER = 0
if ear > EYE_AR_THRESH:
COUNT_OPEN +=1
check_open=0
if((COUNT_OPEN >= EYE_OPEN) and (check_open == 0)):
print("\n Break")
ar1='B'
b_count+=1
ar=ar+ar1
array_count+=1
check_open=1
COUNT_OPEN = 0
# draw the total number of blinks on the frame along with
# the computed eye aspect ratio for the frame
cv2.putText(frame, "Blink Count long: {}".format(TOTAL_LONG), (30, 30),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2)
cv2.putText(frame, "Blink Count short: {}".format(TOTAL_SHORT), (30, 60),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2)
cv2.putText(frame, "Ratio: {:.2f}".format(ear), (300, 30),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (40, 80, 255), 2)
# show the frame
if ((ar1=='B') and (b_count==1)) :
main(ar,arr,al,s)
b_count=0
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
'''elif choice == 2:
arr = ar.split('B')
print(arr)
a=len(arr)-1
i=0
while (i<a):
key=arr[i]
if key in hospital.keys():
print(key ,'= ', hospital[key])
temp=lang[key]
s=s+temp
else:
print('not present')
i=i+1
'''
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
# E A
|
[
"noreply@github.com"
] |
niharvaidya0212.noreply@github.com
|
a5361bba467087d19174d8f5e8a1dc0982168167
|
0549ab5794c5b71b239abf73434cf0ab20ca52a4
|
/Lists Advanced/4_Even_Numbers.py
|
6dc7d919a3052d63dd13d476c97c2a7decf51036
|
[] |
no_license
|
Mishakaveli1994/Python_Fundamentals_Jan
|
14bea38c1e331694070ecc35a563f624c07084b8
|
2351adf1aea643f77cb36b465ea7bc4e5ea1674b
|
refs/heads/master
| 2022-07-20T01:32:51.088136
| 2020-05-26T07:05:28
| 2020-05-26T07:05:28
| 262,810,605
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
string_input = input().split(', ')
indices_list = []
for i, x in enumerate(string_input):
if int(x) % 2 == 0:
indices_list.append(int(i))
print(indices_list)
|
[
"mihata1994@yahoo.com"
] |
mihata1994@yahoo.com
|
81429ec17764429c8af3907584cecd63eec53166
|
da7c3c350c44c7d540aa1072b1c332e150ed2506
|
/server.py
|
9fa6e49d0786d888a92cb2e5a1491f803cfe361a
|
[] |
no_license
|
Alansdjn/secure-robotic-delivery-system-pub
|
507f1ff853df1aa4cd7fc90dbff75b974681f229
|
9f957625c3534db50ddbd4c00b6841b4da441fa4
|
refs/heads/main
| 2023-08-13T22:12:37.793893
| 2021-09-19T23:04:35
| 2021-09-19T23:04:35
| 394,980,986
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 636
|
py
|
import os
import time
import random
import socket
random.seed(200207124)
def generate_pins(length=4):
digits = []
for i in range(length):
digits += [str(random.randint(0, 9))]
return digits
print('Server start ...')
time.sleep(1)
pins = generate_pins()
customer_pins = {}
s = socket.socket()
ipaddr = '192.168.31.194'
port = 12345
print(ipaddr, ':', port)
s.bind((ipaddr, port))
s.listen(5)
while True:
c,addr = s.accept()
print('Connected address: %s:%s' % addr)
c.send(','.join(pins).encode('utf-8'))
print('Send PIN numbers [%s] ...: %s:%s' % (''.join(pins), addr[0], addr[1]))
c.close()
|
[
"alanwangw@gmail.com"
] |
alanwangw@gmail.com
|
0007c9eb296160763d9d7c0a89ddf096c3f2b126
|
4c86881bd200e9a15b71b733174b3abc1924bf0d
|
/libSBML/src/bindings/python/test/annotation/TestCVTerms_newSetters.py
|
27c791d2ae94184f5d79ada5f41f29b4a33e8ffe
|
[] |
no_license
|
mgaldzic/copasi_api
|
500f0d6da8349c611bafb8d8301af61fc07bb1b2
|
51b284295c2238da9c346999ff277c6883b8ca52
|
refs/heads/master
| 2021-01-01T06:04:39.888201
| 2011-09-16T05:29:21
| 2011-09-16T05:29:21
| 2,281,690
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,094
|
py
|
#
# @file TestCVTerms_newSetters.py
# @brief CVTerms unit tests
#
# @author Akiya Jouraku (Python conversion)
# @author Sarah Keating
#
# $Id$
# $HeadURL$
#
# ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ======
#
# DO NOT EDIT THIS FILE.
#
# This file was generated automatically by converting the file located at
# src/annotation/test/TestCVTerms_newSetters.c
# using the conversion program dev/utilities/translateTests/translateTests.pl.
# Any changes made here will be lost the next time the file is regenerated.
#
# -----------------------------------------------------------------------------
# This file is part of libSBML. Please visit http://sbml.org for more
# information about SBML, and the latest version of libSBML.
#
# Copyright 2005-2010 California Institute of Technology.
# Copyright 2002-2005 California Institute of Technology and
# Japan Science and Technology Corporation.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation. A copy of the license agreement is provided
# in the file named "LICENSE.txt" included with this software distribution
# and also available online as http://sbml.org/software/libsbml/license.html
# -----------------------------------------------------------------------------
import sys
import unittest
import libsbml
class TestCVTerms_newSetters(unittest.TestCase):
def test_CVTerm_addResource(self):
term = libsbml.CVTerm(libsbml.MODEL_QUALIFIER)
resource = "GO6666";
self.assert_( term != None )
self.assert_( term.getQualifierType() == libsbml.MODEL_QUALIFIER )
i = term.addResource( "")
self.assert_( i == libsbml.LIBSBML_OPERATION_FAILED )
xa = term.getResources()
self.assert_( xa.getLength() == 0 )
i = term.addResource(resource)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
xa = term.getResources()
self.assert_( xa.getLength() == 1 )
self.assert_(( "rdf:resource" == xa.getName(0) ))
self.assert_(( "GO6666" == xa.getValue(0) ))
_dummyList = [ term ]; _dummyList[:] = []; del _dummyList
pass
def test_CVTerm_removeResource(self):
term = libsbml.CVTerm(libsbml.MODEL_QUALIFIER)
resource = "GO6666";
self.assert_( term != None )
self.assert_( term.getQualifierType() == libsbml.MODEL_QUALIFIER )
term.addResource(resource)
xa = term.getResources()
self.assert_( xa.getLength() == 1 )
i = term.removeResource( "CCC")
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
xa = term.getResources()
self.assert_( xa.getLength() == 1 )
i = term.removeResource(resource)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
xa = term.getResources()
self.assert_( xa.getLength() == 0 )
_dummyList = [ term ]; _dummyList[:] = []; del _dummyList
pass
def test_CVTerm_setBiolQualifierType(self):
term = libsbml.CVTerm(libsbml.BIOLOGICAL_QUALIFIER)
self.assert_( term != None )
self.assert_( term.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER )
self.assert_( term.getModelQualifierType() == libsbml.BQM_UNKNOWN )
self.assert_( term.getBiologicalQualifierType() == libsbml.BQB_UNKNOWN )
i = term.setBiologicalQualifierType(libsbml.BQB_IS)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( term.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER )
self.assert_( term.getBiologicalQualifierType() == libsbml.BQB_IS )
self.assert_( term.getModelQualifierType() == libsbml.BQM_UNKNOWN )
i = term.setQualifierType(libsbml.MODEL_QUALIFIER)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( term.getQualifierType() == libsbml.MODEL_QUALIFIER )
self.assert_( term.getModelQualifierType() == libsbml.BQM_UNKNOWN )
self.assert_( term.getBiologicalQualifierType() == libsbml.BQB_UNKNOWN )
i = term.setBiologicalQualifierType(libsbml.BQB_IS)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( term.getQualifierType() == libsbml.MODEL_QUALIFIER )
self.assert_( term.getModelQualifierType() == libsbml.BQM_UNKNOWN )
self.assert_( term.getBiologicalQualifierType() == libsbml.BQB_UNKNOWN )
_dummyList = [ term ]; _dummyList[:] = []; del _dummyList
pass
def test_CVTerm_setModelQualifierType(self):
term = libsbml.CVTerm(libsbml.MODEL_QUALIFIER)
self.assert_( term != None )
self.assert_( term.getQualifierType() == libsbml.MODEL_QUALIFIER )
self.assert_( term.getModelQualifierType() == libsbml.BQM_UNKNOWN )
self.assert_( term.getBiologicalQualifierType() == libsbml.BQB_UNKNOWN )
i = term.setModelQualifierType(libsbml.BQM_IS)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( term.getQualifierType() == libsbml.MODEL_QUALIFIER )
self.assert_( term.getModelQualifierType() == libsbml.BQM_IS )
self.assert_( term.getBiologicalQualifierType() == libsbml.BQB_UNKNOWN )
i = term.setQualifierType(libsbml.BIOLOGICAL_QUALIFIER)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( term.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER )
self.assert_( term.getModelQualifierType() == libsbml.BQM_UNKNOWN )
self.assert_( term.getBiologicalQualifierType() == libsbml.BQB_UNKNOWN )
i = term.setModelQualifierType(libsbml.BQM_IS)
self.assert_( i == libsbml.LIBSBML_INVALID_ATTRIBUTE_VALUE )
self.assert_( term.getQualifierType() == libsbml.BIOLOGICAL_QUALIFIER )
self.assert_( term.getBiologicalQualifierType() == libsbml.BQB_UNKNOWN )
self.assert_( term.getModelQualifierType() == libsbml.BQM_UNKNOWN )
_dummyList = [ term ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestCVTerms_newSetters))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
|
[
"mgaldzic@gmail.com"
] |
mgaldzic@gmail.com
|
dc3d839fb29269db0f2106bb1c64efaf106fe075
|
78562b018ca08f6dd2e1f58db9033a297e3e017a
|
/main.py
|
bf5fb62179f6c5049039dea7ef526c999cb2f362
|
[
"MIT"
] |
permissive
|
et84121/fju-information-capability-test
|
ea008bbb4c74d325fb290031c120048ad7a325ac
|
c309b85c98f51e20e90a405d9dcfa863bb067b73
|
refs/heads/master
| 2020-09-03T01:35:13.800206
| 2017-06-15T06:19:35
| 2017-06-15T06:19:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,097
|
py
|
import pyexcel as pe
import pyexcel_xlsx
import random
class question:
def __init__(self,number,type,difficulty,q,ans,option1,option2,option3,option4):
self.q_number = number
self.q_type = type
self.q_difficulty = difficulty
self.q = str(q)
self.ans = str(ans)
self.option1 = str(option1)
self.option2 = str(option2)
self.option3 = str(option3)
self.option4 = str(option4)
def debug(self):
print("題號:"+self.q_number+" 難度:"+self.q_difficulty)
def print_question(self):
print("")
print("題號:"+self.q_number+" 難度:"+self.q_difficulty+" 題型:"+self.q_type)
print("Q:"+self.q)
print("選項:")
print("1:" + self.option1)
print("2:" + self.option2)
print("3:" + self.option3)
print("4:" + self.option4)
print("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ans:"+self.ans)
book = pe.get_book(file_name="exam.xlsx")
sheet = book.sheet_by_index(0)
questions= list()
diff = list()
diff.append(list()) #hard
diff.append(list()) #mid
diff.append(list()) #easy
for index in range(0,750):
questions.append(question(
number = sheet[index*7,1],
type = sheet[index*7,8],
difficulty = sheet[index*7,11],
q = sheet[index*7+1,1],
ans = sheet[index*7+2,1],
option1 = sheet[index*7+3,1],
option2 = sheet[index*7+4,1],
option3 = sheet[index*7+5,1],
option4 = sheet[index*7+6,1]
))
if(questions[-1].q_difficulty == "難"):
diff[0].append(index)
elif(questions[-1].q_difficulty == "中"):
diff[1].append(index)
else:
diff[2].append(index)
practice_number = int(input("請輸入你要練習之題目數量"))
practice_diff = int(input("請輸入你要練習之難度 1->難 2->中 3->簡單"))
for index in random.sample(diff[practice_diff-1],practice_number):
questions[index].print_question()
|
[
"noreply@github.com"
] |
et84121.noreply@github.com
|
5b2c32574721768279af3a674ce286f505f76ebf
|
6105cc72335915319cce696a7da86ed9b2da04dd
|
/geocode.py
|
3613ed95430dc87a3d98ffe7ef386d6f3a9e6721
|
[] |
no_license
|
maxerickson/mischools
|
744111d5cb61123b88d093d786143e38ed56d96c
|
1ebd099fa202b17fb2e6fea2a2569cd79aa5632b
|
refs/heads/master
| 2020-04-19T13:08:28.373502
| 2019-09-14T17:41:51
| 2019-09-14T17:41:51
| 168,210,400
| 1
| 1
| null | 2019-08-22T15:30:36
| 2019-01-29T18:59:06
|
Python
|
UTF-8
|
Python
| false
| false
| 1,791
|
py
|
import shutil
import csv
import time
import datetime
import requests
stamp=datetime.datetime.now().strftime("%Y-%m-%d_%H:%M:%S")
shutil.copy('geocodes.csv', './.backups/geocodes-{}.csv'.format(stamp))
TIMEOUT=300
nominatim_endpoint='https://nominatim.openstreetmap.org/search'
session=requests.Session()
session.headers['User-Agent'] = 'https://github.com/maxerickson/mischools'
data=dict()
with open("geocodes.csv") as infile:
csvin=csv.reader(infile)
for row in csvin:
data[row[0]]=(row[1],row[2],row[3])
with open("formatted.csv") as infile:
csvin=csv.DictReader(infile)
count=0
for row in csvin:
try:
params=dict()
params['street']=row['addr:housenumber']+' '+row['addr:street']
params['city']=row['addr:city']
params['state']=row['addr:state']
params['postalcode']=row['addr:postcode']
params['format']='jsonv2'
params['countrycodes']='us'
# prepare request to get url
req=requests.Request('GET', nominatim_endpoint, params=params)
prepped = session.prepare_request(req)
# retrieve result if it isn't in the cache
if prepped.url not in data:
print(prepped.url)
response=session.send(prepped, timeout=TIMEOUT)
data[prepped.url]=row['name'],row['addr:city'],response.text
time.sleep(3)
count+=1
if count==1000:
break
except Exception as e:
print(e)
break
with open("geocodes.csv", 'w') as outfile:
csvout=csv.writer(outfile)
for key,item in data.items():
csvout.writerow([key]+list(item))
|
[
"maxerickson@gmail.com"
] |
maxerickson@gmail.com
|
3c6c435679cbf0d98cd600c3a9cea763b4465dc3
|
2467bf4a059f4b138889af7392f546acf625eafb
|
/venv/Scripts/pip-script.py
|
8c8f36db5dba408019aecfb8063e1e8ed370f300
|
[] |
no_license
|
hyummys/clockalarm
|
ba9305143648f5f41268720af2bedebb3e3091cc
|
7024e683bc94705de74e0e009e9838673dbb8405
|
refs/heads/master
| 2022-11-27T04:12:39.621108
| 2020-07-29T01:01:06
| 2020-07-29T01:01:06
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 414
|
py
|
#!C:\Users\ajpuo\PycharmProjects\clockalarm\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
|
[
"44600736+LeBronJames4me@users.noreply.github.com"
] |
44600736+LeBronJames4me@users.noreply.github.com
|
3c11b9466efc2e88f6efce29c65362048f3bb3dc
|
27f8a580d333d36d3ac5178763160ab54d190e53
|
/app.py
|
e58eba624115378c973799b8d5ab025101b63ba1
|
[] |
no_license
|
Rishabh-Pagaria/Score-Predictor
|
f2b3708351239b58f5b5ae0500763b5c55b22b9f
|
1e2129025ca747e26c754a25573a9407cbb418c1
|
refs/heads/master
| 2023-01-09T08:48:03.464050
| 2020-11-07T03:49:57
| 2020-11-07T03:49:57
| 299,893,583
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 848
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 30 15:31:48 2020
@author: Rishabh Pagaria
"""
import numpy as np
from flask import Flask, request, jsonify, render_template
import pickle
app = Flask(__name__)
model = pickle.load(open('model.pkl','rb'))
@app.route('/')
def home():
return render_template('index.html')
@app.route('/predict',methods=['POST'])
def predict():
# For rendering results on HTML GUI
int_features = [float(x) for x in request.form.values()]
final_features = [np.array(int_features)]
prediction = model.predict(final_features)
output = round(prediction[0][0],2)
return render_template('index.html', prediction_text = "Score Generated for no. of hours you have studied : {}".format(output))
if __name__ == "__main__":
app.run(debug = True)
|
[
"noreply@github.com"
] |
Rishabh-Pagaria.noreply@github.com
|
5bd522f752bbd68f27eb3d5f9909445338b3c0e0
|
66736ddadc48a1550d202e96f492dd08d80da6bd
|
/Patterns/Downward_Half-Pyramid_Pattern.py
|
eaae1e0df0b7e98b009a466f678164f068b5b44d
|
[
"MIT"
] |
permissive
|
Kushal997-das/HackPython-21
|
ab410deb06a2c8056497aa70139d9f184f834409
|
d67054cd69c22f212a6ab7bef10673cff8cd065b
|
refs/heads/main
| 2023-08-10T20:15:23.571729
| 2021-10-03T05:07:15
| 2021-10-03T05:07:15
| 412,979,692
| 1
| 0
| null | 2021-10-03T04:57:36
| 2021-10-03T04:57:36
| null |
UTF-8
|
Python
| false
| false
| 160
|
py
|
def pattern(n):
for i in range(n, -1, -1):
for j in range(0, i + 1):
print("* ", end="")
print("\r")
pattern(5)
|
[
"noreply@github.com"
] |
Kushal997-das.noreply@github.com
|
634ba8b435ddcce30c0ce671e3ddcf758b881414
|
a7c4d7d0a47bd0e61311cf45f885a82a2000408f
|
/bigtop-packages/src/charm/spark/layer-spark/lib/charms/layer/bigtop_spark.py
|
52f5e97d4a582fe8fd0fa1a1bfbd596ffe661562
|
[
"Apache-2.0",
"FreeBSD-DOC",
"MIT",
"DOC"
] |
permissive
|
aatibudhi/bigtop
|
6b9c4ee2ca209930bdf91d31083a2333191117bb
|
dfd47eb51f4b642c6e39446d0d63a903a925eb9e
|
refs/heads/master
| 2021-01-13T13:30:47.066157
| 2016-11-01T20:36:51
| 2016-11-01T20:43:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,270
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from jujubigdata import utils
from path import Path
from charms.layer.apache_bigtop_base import Bigtop
from charms.reactive import is_state
from charms import layer
from charmhelpers.core import hookenv, host, unitdata
from charmhelpers.fetch.archiveurl import ArchiveUrlFetchHandler
class Spark(object):
"""
This class manages Spark.
"""
def __init__(self):
self.dist_config = utils.DistConfig(
data=layer.options('hadoop-client'))
# translate our execution_mode into the appropriate --master value
def get_master_url(self, spark_master_host):
mode = hookenv.config()['spark_execution_mode']
zk_units = unitdata.kv().get('zookeeper.units', [])
master = None
if mode.startswith('local') or mode == 'yarn-cluster':
master = mode
elif mode == 'standalone' and not zk_units:
master = 'spark://{}:7077'.format(spark_master_host)
elif mode == 'standalone' and zk_units:
master_ips = [p[1] for p in unitdata.kv().get('sparkpeer.units')]
nodes = []
for ip in master_ips:
nodes.append('{}:7077'.format(ip))
nodes_str = ','.join(nodes)
master = 'spark://{}'.format(nodes_str)
elif mode.startswith('yarn'):
master = 'yarn-client'
return master
def get_roles(self):
roles = ['spark-worker', 'spark-client']
zk_units = unitdata.kv().get('zookeeper.units', [])
if is_state('leadership.is_leader') or zk_units:
roles.append('spark-master')
roles.append('spark-history-server')
return roles
def install_benchmark(self):
install_sb = hookenv.config()['spark_bench_enabled']
sb_dir = '/home/ubuntu/spark-bench'
if install_sb:
if not unitdata.kv().get('spark_bench.installed', False):
if utils.cpu_arch() == 'ppc64le':
sb_url = hookenv.config()['spark_bench_ppc64le']
else:
# TODO: may need more arch cases (go with x86 sb for now)
sb_url = hookenv.config()['spark_bench_x86_64']
Path(sb_dir).rmtree_p()
au = ArchiveUrlFetchHandler()
au.install(sb_url, '/home/ubuntu')
# #####
# Handle glob if we use a .tgz that doesn't expand to sb_dir
# sb_archive_dir = glob('/home/ubuntu/spark-bench-*')[0]
# SparkBench expects to live in ~/spark-bench, so put it there
# Path(sb_archive_dir).rename(sb_dir)
# #####
unitdata.kv().set('spark_bench.installed', True)
unitdata.kv().flush(True)
else:
Path(sb_dir).rmtree_p()
unitdata.kv().set('spark_bench.installed', False)
unitdata.kv().flush(True)
def setup(self):
self.dist_config.add_users()
self.dist_config.add_dirs()
self.install_demo()
self.open_ports()
def setup_hdfs_logs(self):
# create hdfs storage space for history server
dc = self.dist_config
events_dir = dc.path('spark_events')
events_dir = 'hdfs://{}'.format(events_dir)
utils.run_as('hdfs', 'hdfs', 'dfs', '-mkdir', '-p', events_dir)
utils.run_as('hdfs', 'hdfs', 'dfs', '-chown', '-R', 'ubuntu:spark',
events_dir)
return events_dir
def configure(self, available_hosts, zk_units, peers):
"""
This is the core logic of setting up spark.
Two flags are needed:
* Namenode exists aka HDFS is there
* Resource manager exists aka YARN is ready
both flags are infered from the available hosts.
:param dict available_hosts: Hosts that Spark should know about.
"""
unitdata.kv().set('zookeeper.units', zk_units)
unitdata.kv().set('sparkpeer.units', peers)
unitdata.kv().flush(True)
if not unitdata.kv().get('spark.bootstrapped', False):
self.setup()
unitdata.kv().set('spark.bootstrapped', True)
self.install_benchmark()
master_ip = utils.resolve_private_address(available_hosts['spark-master'])
hosts = {
'spark': master_ip,
}
dc = self.dist_config
events_log_dir = 'file://{}'.format(dc.path('spark_events'))
if 'namenode' in available_hosts:
hosts['namenode'] = available_hosts['namenode']
events_log_dir = self.setup_hdfs_logs()
if 'resourcemanager' in available_hosts:
hosts['resourcemanager'] = available_hosts['resourcemanager']
roles = self.get_roles()
override = {
'spark::common::master_url': self.get_master_url(master_ip),
'spark::common::event_log_dir': events_log_dir,
'spark::common::history_log_dir': events_log_dir,
}
if zk_units:
zks = []
for unit in zk_units:
ip = utils.resolve_private_address(unit['host'])
zks.append("%s:%s" % (ip, unit['port']))
zk_connect = ",".join(zks)
override['spark::common::zookeeper_connection_string'] = zk_connect
else:
override['spark::common::zookeeper_connection_string'] = ""
bigtop = Bigtop()
bigtop.render_site_yaml(hosts, roles, override)
bigtop.trigger_puppet()
# There is a race condition here.
# The work role will not start the first time we trigger puppet apply.
# The exception in /var/logs/spark:
# Exception in thread "main" org.apache.spark.SparkException: Invalid master URL: spark://:7077
# The master url is not set at the time the worker start the first time.
# TODO(kjackal): ...do the needed... (investiate,debug,submit patch)
bigtop.trigger_puppet()
if 'namenode' not in available_hosts:
# Local event dir (not in HDFS) needs to be 777 so non-spark
# users can write job history there. It needs to be g+s so
# spark (in the spark group) can read non-spark user entries.
dc.path('spark_events').chmod(0o2777)
self.patch_worker_master_url(master_ip)
def patch_worker_master_url(self, master_ip):
'''
Patch the worker startup script to use the full master url istead of contracting it.
The master url is placed in the spark-env.sh so that the startup script will use it.
In HA mode the master_ip is set to be the local_ip instead of the one the leader
elects. This requires a restart of the master service.
'''
master_url = self.get_master_url(master_ip)
zk_units = unitdata.kv().get('zookeeper.units', [])
if master_url.startswith('spark://'):
if zk_units:
master_ip = hookenv.unit_private_ip()
spark_env = '/etc/spark/conf/spark-env.sh'
utils.re_edit_in_place(spark_env, {
r'.*SPARK_MASTER_URL.*': "export SPARK_MASTER_URL={}".format(master_url),
r'.*SPARK_MASTER_IP.*': "export SPARK_MASTER_IP={}".format(master_ip),
}, append_non_matches=True)
self.inplace_change('/etc/init.d/spark-worker',
'spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT',
'$SPARK_MASTER_URL')
host.service_restart('spark-master')
host.service_restart('spark-worker')
def inplace_change(self, filename, old_string, new_string):
# Safely read the input filename using 'with'
with open(filename) as f:
s = f.read()
if old_string not in s:
return
# Safely write the changed content, if found in the file
with open(filename, 'w') as f:
s = s.replace(old_string, new_string)
f.write(s)
def install_demo(self):
'''
Install sparkpi.sh to /home/ubuntu (executes SparkPI example app)
'''
demo_source = 'scripts/sparkpi.sh'
demo_target = '/home/ubuntu/sparkpi.sh'
Path(demo_source).copy(demo_target)
Path(demo_target).chmod(0o755)
Path(demo_target).chown('ubuntu', 'hadoop')
def start(self):
if unitdata.kv().get('spark.uprading', False):
return
# stop services (if they're running) to pick up any config change
self.stop()
# always start the history server, start master/worker if we're standalone
host.service_start('spark-history-server')
if hookenv.config()['spark_execution_mode'] == 'standalone':
host.service_start('spark-master')
host.service_start('spark-worker')
def stop(self):
if not unitdata.kv().get('spark.installed', False):
return
# Only stop services if they're running
if utils.jps("HistoryServer"):
host.service_stop('spark-history-server')
if utils.jps("Master"):
host.service_stop('spark-master')
if utils.jps("Worker"):
host.service_stop('spark-worker')
def open_ports(self):
for port in self.dist_config.exposed_ports('spark'):
hookenv.open_port(port)
def close_ports(self):
for port in self.dist_config.exposed_ports('spark'):
hookenv.close_port(port)
|
[
"kevin.monroe@canonical.com"
] |
kevin.monroe@canonical.com
|
5cd8606acfee773d4a34b6d741b22bbaa7f43f75
|
982aaf33b8b45d192fc0900a00dd4c329c1fcd99
|
/mss/friendship/models.py
|
3e126627879a1c2ba343a4b2a0fe5fd92c19d365
|
[] |
no_license
|
victorpantoja/mobile-social-share-admin
|
be988e7984d076faeea620fdb21941a3b07f617f
|
ceac41a39d4b5dc32a5c04cedfe0e7609495130f
|
refs/heads/master
| 2021-03-12T23:33:41.349778
| 2012-05-19T18:50:22
| 2012-05-19T18:50:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 569
|
py
|
#-*- coding: utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
class Friendship(models.Model):
id = models.AutoField(primary_key=True, db_column="friendship_id")
user = models.ForeignKey(User, related_name='user_user', null=False, blank=False)
friend = models.ForeignKey(User, related_name='friend_user', db_column="friend_id", null=False, blank=False)
date = models.DateTimeField(db_column="created_dt")
class Meta:
db_table = "friendship"
def __unicode__(self):
return self.user.username
|
[
"victor.pantoja@gmail.com"
] |
victor.pantoja@gmail.com
|
85f77195381837733f1fbd659a7799c97930f6c7
|
09d6bd535070fdfb68848e773091dc666fa8e1df
|
/gonculator/settings.py
|
1c0121adcabc80e1bcbcae68a06be8670f35417e
|
[] |
no_license
|
mjlabe/gonculator
|
2c82d0bc0e1fe2a952f5ca3711c58e4c49b430f2
|
36ab36cc4aec86a4fb8747514ea19c5da2fe301a
|
refs/heads/master
| 2022-12-15T01:48:50.161041
| 2020-04-06T12:02:27
| 2020-04-06T12:02:27
| 211,941,318
| 0
| 0
| null | 2022-12-04T14:06:44
| 2019-09-30T19:40:21
|
CSS
|
UTF-8
|
Python
| false
| false
| 3,503
|
py
|
"""
Django settings for gonculator project.
Generated by 'django-admin startproject' using Django 2.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'f^^%93*7ri+md=cxa^qf^zl7nu%wid3x^dt#%8wail@4ck8cg9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'quotes.apps.GoncAppConfig',
'django_select2',
'django_filters',
'datatableview',
'djangoql',
'actstream',
'nested_admin',
'crispy_forms',
'debug_toolbar',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
ROOT_URLCONF = 'gonculator.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'gonculator.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/New_York'
USE_I18N = True
USE_L10N = True
USE_TZ = True
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, '../', '../', 'static')
|
[
"mjlabe@gmail.com"
] |
mjlabe@gmail.com
|
b0fd7c05a79ffe61a92a4c4bcb8650c855e18931
|
f576f0ea3725d54bd2551883901b25b863fe6688
|
/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_cloud_endpoints_operations.py
|
76ded61339946dff0fc8965f5fe95829851395f6
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
Azure/azure-sdk-for-python
|
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
|
c2ca191e736bb06bfbbbc9493e8325763ba990bb
|
refs/heads/main
| 2023-09-06T09:30:13.135012
| 2023-09-06T01:08:06
| 2023-09-06T01:08:06
| 4,127,088
| 4,046
| 2,755
|
MIT
| 2023-09-14T21:48:49
| 2012-04-24T16:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 103,532
|
py
|
# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._cloud_endpoints_operations import (
build_afs_share_metadata_certificate_public_keys_request,
build_create_request,
build_delete_request,
build_get_request,
build_list_by_sync_group_request,
build_post_backup_request,
build_post_restore_request,
build_pre_backup_request,
build_pre_restore_request,
build_restoreheartbeat_request,
build_trigger_change_detection_request,
)
from .._vendor import MicrosoftStorageSyncMixinABC
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class CloudEndpointsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.storagesync.aio.MicrosoftStorageSync`'s
:attr:`cloud_endpoints` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
async def _create_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.CloudEndpointCreateParameters, IO],
**kwargs: Any
) -> Optional[_models.CloudEndpoint]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.CloudEndpoint]]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "CloudEndpointCreateParameters")
request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
deserialized = self._deserialize("CloudEndpoint", pipeline_response)
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
)
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("str", response.headers.get("Retry-After"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
@overload
async def begin_create(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: _models.CloudEndpointCreateParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.CloudEndpoint]:
"""Create a new CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint resource. Required.
:type parameters: ~azure.mgmt.storagesync.models.CloudEndpointCreateParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_create(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.CloudEndpoint]:
"""Create a new CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint resource. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_create(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.CloudEndpointCreateParameters, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.CloudEndpoint]:
"""Create a new CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint resource. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.storagesync.models.CloudEndpointCreateParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpoint]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_initial( # type: ignore
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
deserialized = self._deserialize("CloudEndpoint", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
**kwargs: Any
) -> _models.CloudEndpoint:
"""Get a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CloudEndpoint or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CloudEndpoint
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpoint]
request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
deserialized = self._deserialize("CloudEndpoint", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
)
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Retry-After"] = self._deserialize("str", response.headers.get("Retry-After"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if cls:
return cls(pipeline_response, None, response_headers)
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Delete a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
@distributed_trace
def list_by_sync_group(
self, resource_group_name: str, storage_sync_service_name: str, sync_group_name: str, **kwargs: Any
) -> AsyncIterable["_models.CloudEndpoint"]:
"""Get a CloudEndpoint List.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CloudEndpoint or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.CloudEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpointArray]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_sync_group_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_sync_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("CloudEndpointArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_sync_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints"} # type: ignore
async def _pre_backup_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.BackupRequest, IO],
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "BackupRequest")
request = build_pre_backup_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._pre_backup_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if cls:
return cls(pipeline_response, None, response_headers)
_pre_backup_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prebackup"} # type: ignore
@overload
async def begin_pre_backup(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: _models.BackupRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Pre Backup a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Required.
:type parameters: ~azure.mgmt.storagesync.models.BackupRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_pre_backup(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Pre Backup a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_pre_backup(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.BackupRequest, IO],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Pre Backup a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Is either a model type or a IO type. Required.
:type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._pre_backup_initial( # type: ignore
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_pre_backup.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prebackup"} # type: ignore
async def _post_backup_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.BackupRequest, IO],
**kwargs: Any
) -> Optional[_models.PostBackupResponse]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PostBackupResponse]]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "BackupRequest")
request = build_post_backup_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._post_backup_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
deserialized = self._deserialize("PostBackupResponse", pipeline_response)
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_post_backup_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postbackup"} # type: ignore
@overload
async def begin_post_backup(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: _models.BackupRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.PostBackupResponse]:
"""Post Backup a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Required.
:type parameters: ~azure.mgmt.storagesync.models.BackupRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_post_backup(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.PostBackupResponse]:
"""Post Backup a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_post_backup(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.BackupRequest, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.PostBackupResponse]:
"""Post Backup a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Is either a model type or a IO type. Required.
:type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PostBackupResponse]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._post_backup_initial( # type: ignore
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
deserialized = self._deserialize("PostBackupResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_post_backup.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postbackup"} # type: ignore
async def _pre_restore_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.PreRestoreRequest, IO],
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PreRestoreRequest")
request = build_pre_restore_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._pre_restore_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if cls:
return cls(pipeline_response, None, response_headers)
_pre_restore_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prerestore"} # type: ignore
@overload
async def begin_pre_restore(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: _models.PreRestoreRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Pre Restore a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Required.
:type parameters: ~azure.mgmt.storagesync.models.PreRestoreRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_pre_restore(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Pre Restore a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_pre_restore(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.PreRestoreRequest, IO],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Pre Restore a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.storagesync.models.PreRestoreRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._pre_restore_initial( # type: ignore
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_pre_restore.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prerestore"} # type: ignore
@distributed_trace_async
async def restoreheartbeat( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
**kwargs: Any
) -> None:
"""Restore Heartbeat a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_restoreheartbeat_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.restoreheartbeat.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if cls:
return cls(pipeline_response, None, response_headers)
restoreheartbeat.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/restoreheartbeat"} # type: ignore
async def _post_restore_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.PostRestoreRequest, IO],
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PostRestoreRequest")
request = build_post_restore_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._post_restore_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if cls:
return cls(pipeline_response, None, response_headers)
_post_restore_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postrestore"} # type: ignore
@overload
async def begin_post_restore(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: _models.PostRestoreRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Post Restore a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Required.
:type parameters: ~azure.mgmt.storagesync.models.PostRestoreRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_post_restore(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Post Restore a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_post_restore(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.PostRestoreRequest, IO],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Post Restore a given CloudEndpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.storagesync.models.PostRestoreRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._post_restore_initial( # type: ignore
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_post_restore.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postrestore"} # type: ignore
async def _trigger_change_detection_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.TriggerChangeDetectionParameters, IO],
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "TriggerChangeDetectionParameters")
request = build_trigger_change_detection_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._trigger_change_detection_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
if cls:
return cls(pipeline_response, None, response_headers)
_trigger_change_detection_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/triggerChangeDetection"} # type: ignore
@overload
async def begin_trigger_change_detection(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: _models.TriggerChangeDetectionParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Triggers detection of changes performed on Azure File share connected to the specified Azure
File Sync Cloud Endpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Trigger Change Detection Action parameters. Required.
:type parameters: ~azure.mgmt.storagesync.models.TriggerChangeDetectionParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_trigger_change_detection(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Triggers detection of changes performed on Azure File share connected to the specified Azure
File Sync Cloud Endpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Trigger Change Detection Action parameters. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_trigger_change_detection(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
parameters: Union[_models.TriggerChangeDetectionParameters, IO],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Triggers detection of changes performed on Azure File share connected to the specified Azure
File Sync Cloud Endpoint.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Trigger Change Detection Action parameters. Is either a model type or a IO
type. Required.
:type parameters: ~azure.mgmt.storagesync.models.TriggerChangeDetectionParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._trigger_change_detection_initial( # type: ignore
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_trigger_change_detection.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/triggerChangeDetection"} # type: ignore
@distributed_trace_async
async def afs_share_metadata_certificate_public_keys(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
**kwargs: Any
) -> _models.CloudEndpointAfsShareMetadataCertificatePublicKeys:
"""Get the AFS file share metadata signing certificate public keys.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CloudEndpointAfsShareMetadataCertificatePublicKeys or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CloudEndpointAfsShareMetadataCertificatePublicKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-06-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpointAfsShareMetadataCertificatePublicKeys]
request = build_afs_share_metadata_certificate_public_keys_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.afs_share_metadata_certificate_public_keys.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
response_headers["x-ms-correlation-request-id"] = self._deserialize(
"str", response.headers.get("x-ms-correlation-request-id")
)
deserialized = self._deserialize("CloudEndpointAfsShareMetadataCertificatePublicKeys", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
afs_share_metadata_certificate_public_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/afsShareMetadataCertificatePublicKeys"} # type: ignore
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
0be0b50ef79793d99ee6b5a39593e6029f1ee82f
|
b81fe5fdc3267af0648d665b9a855e9f6852af21
|
/wallp/source/comic/xkcd.py
|
be62555ff34ca481a4cffd49349358e49081539b
|
[
"MIT"
] |
permissive
|
amol9/wallp
|
724bad747fdfaccba2d3069ce1f8959d68c5d879
|
14211492f0ab9247f67c92be4d3ef680c309eddf
|
refs/heads/master
| 2022-12-14T23:00:28.248726
| 2022-12-01T15:39:27
| 2022-12-01T15:39:27
| 32,454,219
| 18
| 6
| null | 2016-11-06T14:50:15
| 2015-03-18T11:11:07
|
Python
|
UTF-8
|
Python
| false
| false
| 3,390
|
py
|
from datetime import datetime
import json
from redlib.api.web import HtmlParser
from redlib.api.net import AbsUrl, RelUrl
from asq.initiators import query
from ...util.printer import printer
from ..base import SourceError, SourceParams, Source
from ..images import Images
from ..http_helper import HttpHelper
from ..html_helper import HtmlHelper
from ..trace import Trace
from ..image import Image
class XkcdParams(SourceParams):
name = 'xkcd'
def __init__(self, latest=False, query=None, year=None):
self.query = query
self.year = year
self.latest = latest
self.hash_params = ['name']
class Xkcd(Source):
name = 'xkcd'
params_cls = XkcdParams
archive_url = 'http://xkcd.com/archive'
base_url = 'http://xkcd.com'
json_suffix = 'info.0.json'
author = 'Randall Munroe'
def __init__(self):
self._trace = Trace()
self._http = HttpHelper()
def get_image(self, params):
self._params = params or XkcdParams()
def select_latest():
return self.get_latest()
custom_select = select_latest if self._params.latest else None
self._images = Images(self._params, cache=True, cache_timeout='1w', image_alias='comic', custom_select=custom_select, trace=self._trace)
self._images.add_db_filter(lambda i, d : i.context_url is None or not d.seen_by_context_url(i.context_url))
self._images.add_list_filter(lambda i, l : i.context_url is None or
len(query(l).where(lambda li : li.context_url == i.context_url).to_list()) == 0)
if not self._params.latest:
self._images.add_select_filter(self.get_comic_image_url)
if not self._images.available():
self.scrape()
return self._http.download_image(self._images, self._trace)
def scrape(self):
html_text = self._http.get(self.archive_url, msg='getting archive')
html = HtmlHelper()
etree = html.get_etree(html_text)
links = etree.findall(".//div[@id='middleContainer']//a")
cb = printer.printf('comics', '?', col_updt=True)
c = 0
for link in links:
image = Image()
image.context_url = self.base_url + (link.attrib.get('href') or html.parse_error('link href'))
image.title = link.text
image.user = self.author
date = link.attrib.get('title') or html.parse_error('link title')
image.date = self.parse_date(date)
image.title += image.date.strftime(' (%d %b %Y)')
self._images.add(image)
c += 1
cb.col_updt_cb(2, str(c))
cb.col_updt_cp()
def parse_date(self, date_str):
try:
date = datetime.strptime(date_str, '%Y-%m-%d')
return date
except ValueError as e:
html.parse_error(str(e))
def get_comic_image_url(self, image):
json_text = self._http.get(image.context_url + self.json_suffix, msg='getting comic info')
info = json.loads(json_text)
image.url = info.get('img')
image.description = info.get('alt')
return image
def get_latest(self):
json_text = self._http.get(self.base_url + '/' + self.json_suffix, msg='getting comic info')
info = json.loads(json_text)
self._trace.add_step('latest comic', self.base_url + '/' + str(info.get('num')))
image = Image()
image.url = info.get('img')
image.date = self.parse_date('%s-%s-%s'%(info.get('year'), info.get('month'), info.get('day')))
image.title = info.get('title') + image.date.strftime(' (%d %b %Y)')
image.user = self.author
image.description = info.get('alt')
return image
def get_trace(self):
return self._trace.steps
|
[
"babaiscool@gmail.com"
] |
babaiscool@gmail.com
|
c722ea117d7c6e8cf87b128587c55242f651a4af
|
0327452b1087a936798f40df10410fca9ac72c16
|
/Laba_6/2.py
|
d48a56b0302b222ea797dc61f6ba2ed406d7eb27
|
[] |
no_license
|
yKuzmenko740/KNUTE-lb-works-
|
2dfc39f372678ea3145eb2c2ba1658b8976d3bf1
|
412ea96d20b37efb4226af037c665caa802aa330
|
refs/heads/master
| 2022-11-14T00:13:06.475410
| 2020-07-03T12:34:36
| 2020-07-03T12:34:36
| 276,895,409
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 459
|
py
|
class Human:
def __init__(self, name, gender, mood):
self.name = name
self.gender = gender
self.mood = mood
def __del__(self):
print(f"{self.name, self.gender, self.mood}:видалено ")
def show(self):
return f"Мене звати {self.name}. Я - прикольна {self.gender}.У мене чудовий {self.mood}."
a = Human("Даша","дівчина","чудовий настрій")
del a
|
[
"y.kuzmenko740@gmail.com"
] |
y.kuzmenko740@gmail.com
|
fd745b61c71cd6c80f86c414007d5443b06143e2
|
bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d
|
/lib/surface/spanner/instance_configs/describe.py
|
9c3fd6587dfd4ef2d4fc0bc77a9723840ee6f1f7
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
05fbb473d629195f25887fc5bfaa712f2cbc0a24
|
392abf004b16203030e6efd2f0af24db7c8d669e
|
refs/heads/master
| 2023-08-31T05:40:41.317697
| 2023-08-23T18:23:16
| 2023-08-23T18:23:16
| 335,182,594
| 9
| 2
|
NOASSERTION
| 2022-10-29T20:49:13
| 2021-02-02T05:47:30
|
Python
|
UTF-8
|
Python
| false
| false
| 2,300
|
py
|
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for spanner instance configs describe."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.spanner import instance_configs
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.spanner import flags
class Describe(base.DescribeCommand):
"""Describe a Cloud Spanner instance configuration."""
detailed_help = {
'EXAMPLES':
textwrap.dedent("""\
To describe an instance config named regional-us-central1, run:
$ {command} regional-us-central1
To describe an instance config named nam-eur-asia1, run:
$ {command} nam-eur-asia1
"""),
}
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Please add arguments in alphabetical order except for no- or a clear-
pair for that argument which can follow the argument itself.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument(
'config',
metavar='INSTANCE_CONFIG',
completer=flags.InstanceConfigCompleter,
help='Cloud Spanner instance config.')
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
return instance_configs.Get(args.config)
|
[
"cloudsdk.mirror@gmail.com"
] |
cloudsdk.mirror@gmail.com
|
51d19387e090a5f98a802f829b266d0eaced5be0
|
2cb7a232129f597498ee7ebea016bc9dcf6c4552
|
/string_operations.py
|
8443db9b245d4d34b52fc242212eaf158f05457a
|
[] |
no_license
|
TaneliEDRM/GIT_demo
|
09106d875cd2e0d0970b9e662d5a2330f3dc4c6c
|
5da80bc8de9ca33aeab2e766e1d17759027a9181
|
refs/heads/master
| 2020-12-13T21:33:51.995323
| 2020-08-11T07:46:34
| 2020-08-11T07:46:34
| 234,537,314
| 0
| 0
| null | 2020-08-11T07:46:36
| 2020-01-17T11:45:03
|
Python
|
UTF-8
|
Python
| false
| false
| 152
|
py
|
def split_string(string, separator):
l = string.split(separator)
l = [float(i) for i in l]
return l
s = split_string("1,3", ",")
print(s)
|
[
"taneli.leppanen@edrmedeso.com"
] |
taneli.leppanen@edrmedeso.com
|
7b175df0883dd0db8ddd52398f902f330a381f89
|
01b6608e96022e1d0a56c6d84b7c50ee6cf97a85
|
/mpdnp2.py
|
d1d5207d38a60483e27b84fdd5daeddd43d55754
|
[] |
no_license
|
quasarj/mpdnp2
|
d660b702d7b7d3e0ce76d3067e6b40dd49f8ecb5
|
a591d44343180fc955be7b396cc3c2139be05212
|
refs/heads/master
| 2021-01-17T05:56:39.956055
| 2013-10-20T23:08:53
| 2013-10-20T23:08:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,491
|
py
|
"""
:Author: Henning Hasemann <hhasemann [at] web [dot] de>
30-01-2009 -- Adapted for Weechat 0.3.0 By Bonzodog
(bonzodog01 [at] gmail [dot] com)
:What it does:
This plugin lets you inform all users in the current
channel about the song which music-player-daemon (MPD)
is currently playing.
:Usage:
/mpdnp - Display file mpd is playing to current channel.
:Configuration Variables:
============= ==========================================
Variable name Meaning
============= ==========================================
host The host where your mpd runs
port The port to connect to mpd (usually 6600)
format How this script should display
whats going on.
You may use the following variables here:
$artist, $title_or_file,
$length_min, $length_sec, $pct,
$pos_min, $pos_sec
Released under GPL licence.
"""
todo = """
- maybe support sending commands to mpd.
- maybe make condional display
(displaying some characters only
if preceding or trailing variables are set)
"""
import weechat as wc
import mpd
import re
from os.path import basename, splitext
import getsong
default_fmt = "/me 's MPD plays: {artist} - {title_or_file} ({url})"
wc.register("mpdnp2", "Quasar Jarosz", "0.6", "GPL", "np for mpd, 2", "", "")
def np(data, buffer, args):
"""
Send information about the currently
played song to the channel.
"""
host = wc.config_get_plugin("host")
port = int(wc.config_get_plugin("port"))
client = mpd.MPDClient()
client.connect(host, port)
song = client.currentsong()
# insert artist, title, album, track, path
song_search_name = "{} - {}".format(song['artist'], song['title'])
url = getsong.get_song_url(song_search_name)
song.update({
"title_or_file": song['title'] or splitext(basename(song['file']))[0],
"url": url,
})
song_text = wc.config_get_plugin("format").format(**song)
wc.command(wc.current_buffer(), song_text)
def dbgnp(data, buffer, args):
try:
return np(data, buffer, args)
except Exception, e:
print e
wc.hook_command("mpdnp", "now playing", "", np.__doc__, "", "np", "")
findvar = re.compile(r'[^\\]\$([a-z_]+)(\b|[^a-z_])')
default = {
"host": "localhost",
"port": "6600",
"format": default_fmt,
}
for k, v in default.items():
if not wc.config_get_plugin(k):
wc.config_set_plugin(k, v)
|
[
"quasar@ja.rosz.org"
] |
quasar@ja.rosz.org
|
cb5c7e5cf8cbbcc5852550d50725c8865eb0097b
|
38bbbfb8259f40406ec9072b19f03e03e6508855
|
/13.py
|
128d6ffae7020f1c608b88d018b65b67dfab54f4
|
[] |
no_license
|
heinerud/aoc20
|
e05bce78263fa8a70386fd85e137eda663df794d
|
3b051ccc42a1876b2dd9e82002ed1f6f2c86abbb
|
refs/heads/master
| 2023-02-18T19:15:24.470610
| 2020-12-22T21:16:02
| 2020-12-22T21:16:02
| 317,550,205
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,043
|
py
|
import math
import itertools
def next_departure(time, buses):
bus = None
t_min = float("inf")
for b in buses:
t = b * math.ceil(time / b)
if t < t_min:
t_min = t
bus = b
return bus, t_min
def match_series(series):
series.sort(key=lambda x: x[0], reverse=True)
t = 0
step = 1
for offset, bus in series:
while (t + offset) % bus:
t += step
step *= bus
return t
if __name__ == "__main__":
with open("13.in") as f:
input = [x.strip() for x in f.readlines()]
# Part 1
time = int(input[0])
buses = []
for x in input[1].split(","):
if x == "x":
continue
buses.append(int(x))
bus, departure = next_departure(time, buses)
print((departure - time) * bus)
# Part 2
series = []
for i, x in enumerate(input[1].split(",")):
if x == "x":
continue
else:
x = int(x)
series.append((i, x))
print(match_series(series))
|
[
"joel.heinerud@gmail.com"
] |
joel.heinerud@gmail.com
|
b19c3297f18041ce61a0f0c8c250f5fe332a8085
|
228fdf044c9e4aabec7b4929e8f99bd6e49369aa
|
/app.py
|
ba062f51dbf871b178f8f687a9c6ebc3194378be
|
[
"MIT"
] |
permissive
|
deepchatterjeevns/sports_animation
|
fd5ccfc8cd48ff6f55be896ddae09682b408ab5e
|
c47e3b904582a1cf6e51f6a7fd352e6c5fadc499
|
refs/heads/master
| 2022-12-30T12:49:51.240072
| 2020-08-13T20:18:44
| 2020-08-13T20:18:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,549
|
py
|
from flask import Flask, flash, redirect, render_template, request, session, abort, g
import os
import json
import pandas as pd
import ngs_player_tracking
app = Flask(__name__)
ext_file_path = 'D:/Sports Analytics/sportradar/'
'''
@app.route('/player-tracking-animation?playselection', methods=['POST', 'GET'])
def reload_game():
print ('IT WORKED')
'''
@app.route('/player-tracking-animation', methods=['POST', 'GET'])
def watch_game():
if (request.method == 'POST'):
#print (request.form)
game = request.form['gameselection']
plays = [game + '/' + file for file in os.listdir(game) if file.endswith('.json')]
plays_display = []
for play in plays:
play_json = json.loads(open(play).read())
## First, check if the play was deleted
if ('is_deleted' in play_json['play_tracking']['play'].keys()):
is_deleted = play_json['play_tracking']['play']['is_deleted']
if (is_deleted):
print ('This play: ', play, ' was deleted for unknown reasons.')
continue
try:
play_description = play_json['play_tracking']['play']['description'].replace("'", "")
except:
play_description = 'error in getting the play description'
try:
play_quarter = play_json['play_tracking']['play']['quarter']
except:
play_quarter = 6
try:
play_gameclock = play_json['play_tracking']['play']['game_clock']
except:
play_gameclock = '99:59'
try:
play_down = play_json['play_tracking']['play']['down']
except:
play_down = 5
try:
play_ytg = play_json['play_tracking']['play']['ytg']
except:
play_ytg = -1
time_sort = ((4 - play_quarter) * 15) + int(play_gameclock.split(':')[0]) + (float(play_gameclock.split(':')[1]) / 60.0)
plays_display.append([time_sort, 'Q' + str(play_quarter) + ' ' + play_description, play])
df_plays = pd.DataFrame.from_records(plays_display, columns=['time_sort', 'play_description', 'file'])
df_sorted = df_plays.sort_values('time_sort', ascending=False)
#plays_display = dict(zip(list(df_sorted['file']), list(df_sorted['play_description'])))
plays_display = zip(list(df_sorted['file']), list(df_sorted['play_description']))
#play_to_viz = ngs_player_tracking.extract_player_tracking_data(f_name=list(df_sorted['file'])[0])[0].to_html()
#play_to_viz = ngs_player_tracking.extract_player_tracking_data(f_name=list(df_sorted['file'])[0])[0].to_csv(index=False)
#play_to_viz = ngs_player_tracking.extract_player_tracking_data(f_name=list(df_sorted['file'])[0])[0].values
play_to_viz = ngs_player_tracking.extract_player_tracking_data(f_name=list(df_sorted['file'])[0])[0].to_json(orient='records')
#play_to_viz_title = list(plays_display)[0]
play_to_viz_title = list(df_sorted['play_description'])[0]
print (play_to_viz_title)
return render_template('player_tracking_animation.html', **locals())
else:
print ('Error')
@app.route('/')
def select_game():
# Get the available list of games to visualize
list_of_games = [x[0] for x in os.walk(ext_file_path)]
list_of_games.remove(ext_file_path)
game_keys = list(list_of_games)
game_values = []
list_of_games = [x.replace(ext_file_path, '') for x in list_of_games]
for game in list_of_games:
substrings = game.split('-')
game_str = substrings[0].capitalize() + ' vs ' + substrings[1].capitalize() + ': ' + substrings[2][:-1].capitalize() + ' ' + str(substrings[2][-1])
game_values.append(game_str)
games_dict = dict(zip(game_keys, game_values))
return render_template('select_game.html', **locals())
if __name__ == "__main__":
app.run(debug=True)
|
[
"hatfield_michael@bah.com"
] |
hatfield_michael@bah.com
|
50d0146ae1eae8f489545a3c0b7061aae97a78bb
|
6bb400e3bf0715b2cbea7cc76fcfcad018630a32
|
/fblldbviewcontrollerhelpers.py
|
506405601298dcd927a8af96fca36f867d13d6bc
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
dev-life/chisel
|
ca48dfa7d96a674e1c4e11444e34dbc81e7c7f3b
|
74f9eb2d1cdb22920d3260489a6a289a49ab58d8
|
refs/heads/master
| 2020-04-05T18:32:29.507149
| 2014-03-06T17:53:03
| 2014-03-06T17:53:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,608
|
py
|
#!/usr/bin/python
# Copyright (c) 2014, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import lldb
import fblldbbase as fb
def viewControllerRecursiveDescription(vc):
return _recursiveViewControllerDescriptionWithPrefixAndChildPrefix(fb.evaluateObjectExpression(vc), '', '', '')
def _viewControllerDescription(viewController):
vc = '(%s)' % (viewController)
if fb.evaluateBooleanExpression('[(id)%s isViewLoaded]' % (vc)):
result = fb.evaluateExpressionValue('(id)[[NSString alloc] initWithFormat:@"<%%@: %%p; view = <%%@; %%p>; frame = (%%g, %%g; %%g, %%g)>", (id)NSStringFromClass((id)[(id)%s class]), %s, (id)[(id)[(id)%s view] class], (id)[(id)%s view], ((CGRect)[(id)[(id)%s view] frame]).origin.x, ((CGRect)[(id)[(id)%s view] frame]).origin.y, ((CGRect)[(id)[(id)%s view] frame]).size.width, ((CGRect)[(id)[(id)%s view] frame]).size.height]' % (vc, vc, vc, vc, vc, vc, vc, vc))
else:
result = fb.evaluateExpressionValue('(id)[[NSString alloc] initWithFormat:@"<%%@: %%p; view not loaded>", (id)NSStringFromClass((id)[(id)%s class]), %s]' % (vc, vc))
if result.GetError() is not None and str(result.GetError()) != 'success':
return '[Error getting description.]'
else:
return result.GetObjectDescription()
def _recursiveViewControllerDescriptionWithPrefixAndChildPrefix(vc, string, prefix, childPrefix):
s = '%s%s%s\n' % (prefix, '' if prefix == '' else ' ', _viewControllerDescription(vc))
nextPrefix = childPrefix + ' |'
numChildViewControllers = fb.evaluateIntegerExpression('(int)[(id)[%s childViewControllers] count]' % (vc))
childViewControllers = fb.evaluateExpression('(id)[%s childViewControllers]' % (vc))
for i in range(0, numChildViewControllers):
viewController = fb.evaluateExpression('(id)[(id)[%s childViewControllers] objectAtIndex:%d]' % (vc, i))
s += _recursiveViewControllerDescriptionWithPrefixAndChildPrefix(viewController, string, nextPrefix, nextPrefix)
isModal = fb.evaluateBooleanExpression('((id)[(id)[(id)%s presentedViewController] presentingViewController]) == %s' % (vc, vc))
if isModal:
modalVC = fb.evaluateObjectExpression('(id)[(id)%s presentedViewController]' % (vc))
s += _recursiveViewControllerDescriptionWithPrefixAndChildPrefix(modalVC, string, childPrefix + ' *M' , nextPrefix)
s += '\n// \'*M\' means the view controller is presented modally.'
return string + s
|
[
"arig@fb.com"
] |
arig@fb.com
|
38b268ad2ff334be23697235d64253db9da4ff6a
|
a26c16eb649002567d90b3af8343ebb32e0e9ce7
|
/serve/predict.py
|
e472c77d0fdb4eeb92b55c2082a51c32035dfe64
|
[] |
no_license
|
klay-liu/Sentiment-Analysis-Sagemaker-deployment
|
2b97f5855378b5bc8182d1fcd654ddde7482d168
|
9ac281d4b753ff1073073e1b77a7734f9afea3bb
|
refs/heads/master
| 2022-07-05T11:05:44.838060
| 2020-05-14T09:25:13
| 2020-05-14T09:25:13
| 263,692,667
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,120
|
py
|
import argparse
import json
import os
import pickle
import sys
import sagemaker_containers
import pandas as pd
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import torch.utils.data
from model import LSTMClassifier
from utils import review_to_words, convert_and_pad
def model_fn(model_dir):
"""Load the PyTorch model from the `model_dir` directory."""
print("Loading model.")
# First, load the parameters used to create the model.
model_info = {}
model_info_path = os.path.join(model_dir, 'model_info.pth')
with open(model_info_path, 'rb') as f:
model_info = torch.load(f)
print("model_info: {}".format(model_info))
# Determine the device and construct the model.
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = LSTMClassifier(model_info['embedding_dim'], model_info['hidden_dim'], model_info['vocab_size'])
# Load the store model parameters.
model_path = os.path.join(model_dir, 'model.pth')
with open(model_path, 'rb') as f:
model.load_state_dict(torch.load(f))
# Load the saved word_dict.
word_dict_path = os.path.join(model_dir, 'word_dict.pkl')
with open(word_dict_path, 'rb') as f:
model.word_dict = pickle.load(f)
model.to(device).eval()
print("Done loading model.")
return model
def input_fn(serialized_input_data, content_type):
print('Deserializing the input data.')
if content_type == 'text/plain':
data = serialized_input_data.decode('utf-8')
return data
raise Exception('Requested unsupported ContentType in content_type: ' + content_type)
def output_fn(prediction_output, accept):
print('Serializing the generated output.')
return str(prediction_output)
def predict_fn(input_data, model):
print('Inferring sentiment of input data.')
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if model.word_dict is None:
raise Exception('Model has not been loaded properly, no word_dict.')
# TODO: Process input_data so that it is ready to be sent to our model.
# You should produce two variables:
# data_X - A sequence of length 500 which represents the converted review
# data_len - The length of the review
words = review_to_words(input_data)
data_X, data_len = convert_and_pad(model.word_dict, words)
# Using data_X and data_len we construct an appropriate input tensor. Remember
# that our model expects input data of the form 'len, review[500]'.
data_pack = np.hstack((data_len, data_X))
data_pack = data_pack.reshape(1, -1)
data = torch.from_numpy(data_pack)
data = data.to(device)
# Make sure to put the model into evaluation mode
model.eval()
# TODO: Compute the result of applying the model to the input data. The variable `result` should
# be a numpy array which contains a single integer which is either 1 or 0
with torch.no_grad():
output = model.forward(data)
result = int(np.round(output.numpy()))
return result
|
[
"dwliuliu@126.com"
] |
dwliuliu@126.com
|
9342b0f4620e379698e57385a85f36e40db743b8
|
a4a0d063407900474680987a61f771083478f783
|
/src/python/quik/connector.py
|
546d2b85b746e8605ae85b2ffb9265ee1fc9757f
|
[] |
no_license
|
euvgub/Polygon
|
389cf612f94705b80bc043cef29f440eae017892
|
cf16797f862525599ac2f8b0d6095a86a57b459f
|
refs/heads/master
| 2020-04-17T18:15:18.647719
| 2018-05-28T19:43:42
| 2018-05-28T19:43:42
| 166,818,481
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,013
|
py
|
import socket
import sys
import time
import threading
import queue
import logging
from .messages import *
class QuikConnector:
def __init__(self):
"""
Конструктор
"""
self.callbacks = []
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.receivedQueue = queue.Queue()
self.sendingQueue = queue.Queue()
self.threadReceive = threading.Thread(target=self.__receive_thread__, name="receiving thread")
self.threadSend = threading.Thread(target=self.__send_thread__, name="sending thread")
self.threadDispatch = threading.Thread(target=self.__dispatch_thread__, name="dispatching thread")
def connect(self, ip="127.0.0.1", port=1248):
"""Подключиться к квику
:param str ip: IP адрес с формате строки
:param int port: Порт, на котором слушает терминал
"""
try:
self.s.connect((ip, port))
self.running = True
self.threadSend.start()
self.threadReceive.start()
self.threadDispatch.start()
except:
self.__log_last_errors__("Error while connecting to QUIK")
def disconnect(self):
"""Отключиться от терминала
:rtype: None
"""
self.running = False
time.sleep(1) # даём потокам завершиться
self.f.close()
self.s.close()
def send(self, message):
"""Отправить сообщение в терминал
:param QLMessage message: Сообщение, наследник класса QLMessage
"""
self.sendingQueue.put(message)
def subscribe(self, callback):
"""Подписаться на событие о приходе нового сообщения
:param function callback: Функция обратного вызова
"""
self.callbacks.append(callback)
def __receive_thread__(self):
"""Поток получения сообщений от квика
"""
try:
self.f = self.s.makefile()
while self.running:
line = self.f.readline()
while len(line) > 0:
env = QLEnvelope(line)
self.receivedQueue.put(env)
self.sendingQueue.put(QLEnvelopeAcknowledgment(env.id))
line = self.f.readline()
except:
self.__log_last_errors__("Error while receiving from QUIK")
def __dispatch_thread__(self):
"""Поток распаковки сообщений и отправки их подписчикам
"""
try:
while self.running:
if self.receivedQueue.empty():
time.sleep(0.2)
env = self.receivedQueue.get()
for message in env.body:
for callback in self.callbacks:
callback(message)
except:
self.__log_last_errors__("Error while dispatching messages")
def __send_thread__(self):
"""Поток отправки сообщений в квик
"""
try:
while self.running:
if self.sendingQueue.empty():
time.sleep(0.2)
message = self.sendingQueue.get()
self.s.send(message.toJSON().encode())
except:
self.__log_last_errors__("Error while sending to QUIK")
def __log_last_errors__(self, message):
"""Залоггировать последние ошибки sys.exc_info()
:param message: Сообщение для вывода в лог перед ошибками
"""
logging.error(message)
err = sys.exc_info()
for i in range(0, len(sys.exc_info())):
logging.error("{0}".format(err[i]))
|
[
"koryakin@itglobal.ru"
] |
koryakin@itglobal.ru
|
245db76f639f8cbd13210f1280cf7920a2898639
|
786696ec14c2e1ddda9f0ddfddaf88205e1f5763
|
/Bomberman/groupNN/scenario1/variant5.py
|
31e4c5abb6d80e64baf0204cae66006b73089e1d
|
[] |
no_license
|
jacorse/CS4341-projects
|
6ed95188c25fae16cb4db374d80a54eb47b069cb
|
08a1fdadd3842dff67f23791f3f119b020b1a882
|
refs/heads/master
| 2020-04-22T06:14:10.174478
| 2019-03-01T22:55:11
| 2019-03-01T22:55:11
| 170,183,636
| 0
| 0
| null | 2019-02-11T18:56:31
| 2019-02-11T18:56:31
| null |
UTF-8
|
Python
| false
| false
| 1,089
|
py
|
# This is necessary to find the main code
import sys
sys.path.insert(0, '../../bomberman')
sys.path.insert(1, '..')
# Import necessary stuff
import random
from game import Game
from monsters.stupid_monster import StupidMonster
from monsters.selfpreserving_monster import SelfPreservingMonster
# TODO This is your code!
sys.path.insert(1, '../group22')
from testcharacter5 import TestCharacter
# Create the game
random.seed(123) # TODO Change this if you want different random choices
g = Game.fromfile('map.txt')
g.add_monster(StupidMonster("stupid", # name
"S", # avatar
3, 5, # position
))
g.add_monster(SelfPreservingMonster("aggressive", # name
"A", # avatar
3, 13, # position
2 # detection range
))
# TODO Add your character
g.add_character(TestCharacter("me", # name
"C", # avatar
0, 0 # position
))
# Run!
g.go()
|
[
"hdunphy15@gmail.com"
] |
hdunphy15@gmail.com
|
54df10233f2dc2743f093b4f63bf781a6ba8215e
|
eb301f61f9a6c0fbe5e250b28cc71ee96aed2d6d
|
/initat/snmp_relay/schemes/instances/schemes.py
|
a3b5c56ed58395b2fdac62e459f901c326aae5ac
|
[] |
no_license
|
walong365/icsw
|
615a89e3df668594492b3a3a54107308d1bb868e
|
0d070bdb9ec64e38acaf5d5cf78ca9b5128b199c
|
refs/heads/master
| 2020-05-21T18:39:21.103759
| 2017-03-11T10:48:17
| 2017-03-11T10:48:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 25,306
|
py
|
# Copyright (C) 2009-2015,2017 Andreas Lang-Nevyjel
#
# Send feedback to: <lang-nevyjel@init.at>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License Version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
""" SNMP schemes for SNMP relayer """
import socket
import re
from initat.host_monitoring import limits
from initat.snmp.snmp_struct import snmp_oid
from initat.tools import logging_tools, process_tools
from ..base import SNMPRelayScheme
from ..functions import k_str
class load_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "load", **kwargs)
# T for table, G for get
self.requests = snmp_oid("1.3.6.1.4.1.2021.10.1.3", cache=True)
self.parser.add_argument("-w", type=float, dest="warn", help="warning value [%(default)s]", default=5.0)
self.parser.add_argument("-c", type=float, dest="crit", help="critical value [%(default)s]", default=10.0)
self.parse_options(kwargs["options"])
def process_return(self):
simple_dict = self._simplify_keys(list(self.snmp_dict.values())[0])
load_array = [float(simple_dict[key]) for key in [1, 2, 3]]
max_load = max(load_array)
ret_state = limits.mon_STATE_CRITICAL if max_load > self.opts.crit else (limits.mon_STATE_WARNING if max_load > self.opts.warn else limits.mon_STATE_OK)
return ret_state, "load 1/5/15: %.2f / %.2f / %.2f" % (
load_array[0],
load_array[1],
load_array[2]
)
class MemoryMixin(object):
def show_memory(self, **kwargs):
phys_total = kwargs["phys_total"]
phys_used = kwargs["phys_used"]
swap_total = kwargs["swap_total"]
swap_used = kwargs["swap_used"]
all_used = phys_used + swap_used
phys_free, swap_free = (
phys_total - phys_used,
swap_total - swap_used
)
all_total, _all_free = (
phys_total + swap_total,
phys_free + swap_free
)
if phys_total == 0:
memp = 100
else:
memp = 100. * phys_used / phys_total
if all_total == 0:
allp = 100
else:
allp = 100. * all_used / all_total
ret_state = limits.mon_STATE_OK
return ret_state, "meminfo: {:.2f} % of {} phys, {:.2f} % of {} tot".format(
memp,
k_str(phys_total),
allp,
k_str(all_total)
)
class host_process_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "host_process", **kwargs)
self.requests = snmp_oid("1.3.6.1.2.1.25.4.2.1", cache=True, cache_timeout=5)
self.parser.add_argument("--overview", default=False, action="store_true", help="overview mode [%(default)s]")
self.parser.add_argument("--name", default="", type=str, help="process to check [%(default)s]")
self.parser.add_argument("--detail", default=False, action="store_true", help="show additional info [%(default)s]")
self.parser.add_argument("-w", type=int, default=0, help="minimum number of processes for warning [%(default)s]")
self.parser.add_argument("-c", type=int, default=0, help="minimum number of processes for critical [%(default)s]")
self.parse_options(kwargs["options"])
def process_return(self):
_dict = self._reorder_dict(list(self.snmp_dict.values())[0])
# keys:
# 1 ... pid
# 2 ... name
# 3 ... oid (always (0, 0))
# 4 ... path
# 5 ... options
# 6 ... type (4 ... application 2 ... operatingSystem)
# 7 ... status (1 ... running, 2 ... runnable)
if self.opts.name:
_name_re = re.compile(self.opts.name)
# import pprint
# pprint.pprint(_dict)
_dict = {key: value for key, value in _dict.items() if 2 in value and _name_re.match(value[2])}
_lim = limits.limits(warn_val=self.opts.w, crit_val=self.opts.c)
ret_state, _state_str = _lim.check_floor(len(_dict))
info_f = [
"found {} of {}".format(
logging_tools.get_plural("instance", len(_dict)),
self.opts.name
)
]
if self.opts.detail:
if _dict:
for _key in sorted(_dict):
_value = _dict[_key]
print(_value)
info_f.append(
"{}@{:d}: {} {}".format(
_value[2],
_value[1],
_value[4],
_value[5],
)
)
else:
info_f.append("nothing found")
ret_str = ", ".join(info_f)
# use range_parameter in limits for comparision
elif self.opts.overview:
_type_dict = {}
for _entry in _dict.values():
_type_dict.setdefault(_entry[6], []).append(_entry)
_info_str = ", ".join(
[
"{}: {}".format(
{
4: "application",
2: "operatingSystem",
}.get(_key, "type {:d}".format(_key)),
logging_tools.get_plural("entry", len(_type_dict[_key]))
) for _key in sorted(_type_dict.keys())
]
)
ret_state, ret_str = (
limits.mon_STATE_OK,
"{:d} Processes, {}".format(
len(_dict),
_info_str,
)
)
else:
ret_state, ret_str = (
limits.mon_STATE_OK,
"Process tree has {}".format(logging_tools.get_plural("entry", len(_dict)))
)
return ret_state, ret_str
class ucd_memory_scheme(SNMPRelayScheme, MemoryMixin):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "ucd_memory", **kwargs)
# T for table, G for get
self.requests = snmp_oid("1.3.6.1.4.1.2021.4", cache=True, cache_timeout=5)
def process_return(self):
use_dict = self._simplify_keys(list(self.snmp_dict.values())[0])
swap_total, swap_avail = (
use_dict[(3, 0)] * 1024,
use_dict[(4, 0)] * 1024,
)
phys_total, phys_avail = (
use_dict[(5, 0)] * 1024,
use_dict[(6, 0)] * 1024,
)
return self.show_memory(
phys_total=phys_total,
phys_used=phys_total - phys_avail,
swap_total=swap_total,
swap_used=swap_total - swap_avail,
)
class linux_memory_scheme(SNMPRelayScheme, MemoryMixin):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "linux_memory", **kwargs)
# T for table, G for get
self.requests = snmp_oid("1.3.6.1.2.1.25.2.3.1", cache=True, cache_timeout=5)
self.parse_options(kwargs["options"])
def process_return(self):
use_dict = self._simplify_keys(list(self.snmp_dict.values())[0])
use_dict = {
use_dict[(3, key)].lower(): {
"allocation_units": use_dict[(4, key)],
"size": use_dict[(5, key)],
"used": use_dict.get((6, key), None)
} for key in [
_key[1] for _key in list(use_dict.keys()) if _key[0] == 1
] if not use_dict[(3, key)].startswith("/")
}
return self.show_memory(
phys_total=use_dict["physical memory"]["size"],
phys_used=use_dict["physical memory"]["used"],
swap_total=use_dict["swap space"]["size"],
swap_used=use_dict["swap space"]["used"],
)
class snmp_info_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "snmp_info", **kwargs)
# T for table, G for get
self.requests = snmp_oid("1.3.6.1.2.1.1", cache=True)
self.parse_options(kwargs["options"])
def process_return(self):
simple_dict = list(self.snmp_dict.values())[0]
self._check_for_missing_keys(simple_dict, needed_keys={(4, 0), (5, 0), (6, 0)})
ret_state = limits.mon_STATE_OK
return ret_state, "SNMP Info: contact {}, name {}, location {}".format(
simple_dict[(4, 0)] or "???",
simple_dict[(5, 0)] or "???",
simple_dict[(6, 0)] or "???",
)
class qos_cfg(object):
def __init__(self, idx):
self.idx = idx
self.if_idx, self.direction = (0, 0)
self.class_dict = {}
def set_if_idx(self, if_idx):
self.if_idx = if_idx
def set_direction(self, act_dir):
self.direction = act_dir
# def add_class(self, cm_idx, idx):
# self.class_dict[idx] = qos_class(idx, cm_idx)
def feed_bit_rate(self, class_idx, value):
self.class_dict[class_idx].feed_bit_rate(value)
def feed_drop_rate(self, class_idx, value):
self.class_dict[class_idx].feed_drop_rate(value)
def __repr__(self):
return "qos_cfg %6d; if_idx %4d; direction %d; %s" % (
self.idx,
self.if_idx,
self.direction,
", ".join([str(value) for value in self.class_dict.values()]) if self.class_dict else "<NC>")
class check_snmp_qos_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "check_snmp_qos", **kwargs)
self.oid_dict = {
"if_name": (1, 3, 6, 1, 2, 1, 31, 1, 1, 1, 1),
"if_alias": (1, 3, 6, 1, 2, 1, 31, 1, 1, 1, 18),
"cb_qos_policy_direction": (1, 3, 6, 1, 4, 1, 9, 9, 166, 1, 1, 1, 1, 3),
# qos_idx -> if_index
"cb_qos_if_index": (1, 3, 6, 1, 4, 1, 9, 9, 166, 1, 1, 1, 1, 4),
"cb_qos_config_index": (1, 3, 6, 1, 4, 1, 9, 9, 166, 1, 5, 1, 1, 2),
# QoS classes
"cb_qos_cmname": (1, 3, 6, 1, 4, 1, 9, 9, 166, 1, 7, 1, 1, 1),
"cb_qos_bit_rate": (1, 3, 6, 1, 4, 1, 9, 9, 166, 1, 15, 1, 1, 11),
"cb_qos_dropper_rate": (1, 3, 6, 1, 4, 1, 9, 9, 166, 1, 15, 1, 1, 18)
}
self.parser.add_argument("-k", type=str, dest="key", help="QOS keys [%(default)s]", default="1")
self.parser.add_argument("-z", type=str, dest="qos_ids", help="QOS Ids [%(default)s]", default="")
self.parse_options(kwargs["options"])
self.transform_single_key = True
if not self.dummy_init:
if self.opts.key.count(","):
self.qos_key, self.if_idx = [int(value) for value in self.opts.key.split(",")]
else:
self.qos_key, self.if_idx = (int(self.opts.key), 0)
self.requests = [snmp_oid(value, cache=True, cache_timeout=150) for value in self.oid_dict.values()]
def _build_base_cfg(self):
self.__qos_cfg_dict, self.__rev_dict = ({}, {})
idx_list, idx_set = ([], set())
cfg_keys = sorted(
[
key for key in list(self.snmp_dict[
self.oid_dict["cb_qos_if_index"]
].keys()) if self.snmp_dict[self.oid_dict["cb_qos_policy_direction"]][key] == 2
]
)
for key in cfg_keys:
act_cfg = qos_cfg(key)
act_idx = self.snmp_dict[self.oid_dict["cb_qos_if_index"]][key]
act_cfg.set_if_idx(act_idx)
act_cfg.set_direction(self.snmp_dict[self.oid_dict["cb_qos_policy_direction"]][key])
self.__qos_cfg_dict[key] = act_cfg
self.__rev_dict[act_cfg.if_idx] = key
if act_idx not in idx_set:
idx_set.add(act_idx)
idx_list.append(act_idx)
self.idx_list, self.idx_set = (idx_list, idx_set)
def process_return(self):
self._build_base_cfg()
idx_list, idx_set = (self.idx_list, self.idx_set)
ret_value, ret_lines = (limits.mon_STATE_OK, [])
if self.qos_key == 1:
ret_lines = ["%d" % (value) for value in idx_list]
elif self.qos_key == 2:
ret_lines = ["%d!%d" % (value, value) for value in idx_list]
elif self.qos_key == 3:
ret_lines = ["%d!%s" % (value, self.snmp_dict[self.oid_dict["if_alias"]][value]) for value in sorted(idx_set)]
elif self.qos_key == 4:
ret_lines = ["%d!%s" % (value, self.snmp_dict[self.oid_dict["if_name"]][value]) for value in sorted(idx_set)]
elif self.qos_key in [5, 6]:
# qos class names
cm_dict = {key: value for key, value in self.snmp_dict[self.oid_dict["cb_qos_cmname"]].items()}
if self.opts.qos_ids:
needed_keys = [key for key, value in cm_dict.items() if value in self.opts.qos_ids.split(",")]
else:
needed_keys = list(cm_dict.keys())
# index dict
try:
cfg_idx_start, val_idx_start = (
self.oid_dict["cb_qos_config_index"],
self.oid_dict["cb_qos_bit_rate" if self.qos_key == 5 else "cb_qos_dropper_rate"]
)
# cfg_idx_start = tuple(list(cfg_idx_start) + [rev_dict[self.if_idx]])
# val_idx_start = tuple(list(val_idx_start) + [rev_dict[self.if_idx]])
# pprint.pprint(self.snmp_dict)
idx_dict = {key[1]: value for key, value in self.snmp_dict[cfg_idx_start].items() if key[0] == self.__rev_dict[self.if_idx]}
value_dict = {key[1]: value for key, value in self.snmp_dict[val_idx_start].items() if key[0] == self.__rev_dict[self.if_idx]}
# #pprint.pprint(value_dict)
except KeyError:
ret_value, ret_lines = (limits.mon_STATE_CRITICAL, ["Could not find interface %d, giving up." % (self.if_idx)])
else:
# value dict
# reindex value_dict
r_value_dict = {idx_dict[key]: value for key, value in value_dict.items()}
ret_lines = [
" ".join(
[
"%s:%d" % (
cm_dict[needed_key],
r_value_dict[needed_key]
) for needed_key in needed_keys if needed_key in r_value_dict
]
)
]
else:
ret_value = limits.mon_STATE_CRITICAL
ret_lines = ["unknown key / idx %d / %d" % (self.qos_key,
self.if_idx)]
# pprint.pprint(self.snmp_dict)
return ret_value, "\n".join(ret_lines)
class port_info_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "port_info", **kwargs)
self.__th_mac = (1, 3, 6, 1, 2, 1, 17, 4, 3, 1, 2)
self.__th_type = (1, 3, 6, 1, 2, 1, 17, 4, 3, 1, 3)
self.requests = [
snmp_oid(self.__th_mac, cache=True, cache_timeout=240),
snmp_oid(self.__th_type, cache=True, cache_timeout=240)]
self.parser.add_argument("--arg0", type=int, dest="p_num", help="port number [%(default)s]", default=0)
self.parse_options(kwargs["options"])
def _transform_macs(self, mac_list):
arp_dict = process_tools.get_arp_dict()
host_list, ip_list, new_mac_list = ([], [], [])
for mac in mac_list:
if mac in arp_dict:
try:
host = socket.gethostbyaddr(arp_dict[mac])
except:
ip_list.append(arp_dict[mac])
else:
host_list.append(host[0])
else:
new_mac_list.append(mac)
return sorted(new_mac_list), sorted(ip_list), sorted(host_list)
def process_return(self):
s_mac_dict = self._simplify_keys(self.snmp_dict[self.__th_mac])
s_type_dict = self._simplify_keys(self.snmp_dict[self.__th_type])
p_num = self.opts.p_num
port_ref_dict = {}
for key, value in s_mac_dict.items():
mac = ":".join(["%02x" % (int(val)) for val in key])
port_ref_dict.setdefault(value, []).append((mac, int(s_type_dict.get(key, 5))))
macs = [mac for mac, p_type in port_ref_dict.get(p_num, []) if p_type == 3]
if macs:
mac_list, ip_list, host_list = self._transform_macs(macs)
return limits.mon_STATE_OK, "port %d (%s): %s" % (
p_num,
", ".join(
[
logging_tools.get_plural(name, len(what_list)) for name, what_list in [
("Host", host_list),
("IP", ip_list),
("MAC", mac_list)
] if len(what_list)
]
),
", ".join(host_list + ip_list + mac_list)
)
else:
return limits.mon_STATE_OK, "port %d: ---" % (p_num)
class trunk_info_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "trunk_info", **kwargs)
self.requests = snmp_oid("1.0.8802.1.1.2.1.4.1.1", cache=True)
def process_return(self):
simple_dict = self._simplify_keys(list(self.snmp_dict.values())[0])
trunk_dict = {}
for key, value in simple_dict.items():
sub_idx, trunk_id, port_num, _idx = key
trunk_dict.setdefault(trunk_id, {}).setdefault(port_num, {})[sub_idx] = value
t_array = []
for t_key in sorted(trunk_dict.keys()):
t_stuff = trunk_dict[t_key]
t_ports = sorted(t_stuff.keys())
try:
port_map = {port: int(t_stuff[port][7]) for port in t_ports}
except:
t_array.append("error decoding port_num: %s" % (process_tools.get_except_info()))
else:
dest_name = t_stuff[t_ports[0]][9]
dest_hw = t_stuff[t_ports[0]][10]
t_array.append(
"%s [%s]: %s to %s (%s)" % (
logging_tools.get_plural("port", len(t_ports)),
str(t_key),
"/".join(["%d-%d" % (port, port_map[port]) for port in t_ports]),
dest_name,
dest_hw
)
)
if t_array:
return limits.mon_STATE_OK, "%s: %s" % (
logging_tools.get_plural("trunk", len(t_array)),
", ".join(t_array))
else:
return limits.mon_STATE_OK, "no trunks"
class ibm_bc_blade_status_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "ibm_bc_blade_status", **kwargs)
self.__blade_oids = {
key: (1, 3, 6, 1, 4, 1, 2, 3, 51, 2, 22, 1, 5, 1, 1, idx + 1) for idx, key in enumerate(
["idx", "id", "exists", "power_state", "health_state", "name"]
)
}
for value in list(self.__blade_oids.values()):
self.requests = snmp_oid(value, cache=True)
self.parse_options(kwargs["options"])
def process_return(self):
all_blades = list(self.snmp_dict[self.__blade_oids["idx"]].values())
ret_state, state_dict = (limits.mon_STATE_OK, {})
for blade_idx in all_blades:
loc_dict = {
t_name: self._simplify_keys(self.snmp_dict[self.__blade_oids[t_name]])[blade_idx] for t_name in [
"exists", "power_state", "health_state", "name"
]
}
loc_state = limits.mon_STATE_OK
if loc_dict["exists"]:
if loc_dict["power_state"]:
loc_state = max(loc_state, {
0: limits.mon_STATE_UNKNOWN,
1: limits.mon_STATE_OK,
2: limits.mon_STATE_WARNING,
3: limits.mon_STATE_CRITICAL,
}.get(loc_dict["health_state"], limits.mon_STATE_CRITICAL))
loc_str = {
0: "unknown",
1: "good",
2: "warning",
3: "bad"
}.get(loc_dict["health_state"], "???")
else:
loc_str = "off"
else:
loc_str = "N/A"
ret_state = max(ret_state, loc_state)
state_dict.setdefault(loc_str, []).append(loc_dict["name"])
return ret_state, "%s, %s" % (
logging_tools.get_plural("blade", len(all_blades)),
"; ".join(["%s: %s" % (key, ", ".join(value)) for key, value in state_dict.items()]))
class ibm_bc_storage_status_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "ibm_bc_storage_status", **kwargs)
self.__blade_oids = {
key: (1, 3, 6, 1, 4, 1, 2, 3, 51, 2, 22, 6, 1, 1, 1, idx + 1) for idx, key in enumerate(
["idx", "module", "status", "name"]
)
}
for value in list(self.__blade_oids.values()):
self.requests = snmp_oid(value, cache=True)
self.parse_options(kwargs["options"])
def process_return(self):
store_dict = {}
for key, value in self.__blade_oids.items():
for s_key, s_value in self._simplify_keys(self.snmp_dict[value]).items():
if key in ["module"]:
s_value = int(s_value)
store_dict.setdefault(s_key, {})[key] = s_value
ret_state, state_dict = (limits.mon_STATE_OK, {})
for idx in sorted(store_dict):
loc_dict = store_dict[idx]
if loc_dict["status"] != 1:
loc_state, state_str = (limits.mon_STATE_CRITICAL, "problem")
else:
loc_state, state_str = (limits.mon_STATE_OK, "good")
state_dict.setdefault(state_str, []).append(loc_dict["name"])
ret_state = max(ret_state, loc_state)
return ret_state, "%s, %s" % (
logging_tools.get_plural("item", len(store_dict)),
"; ".join(
[
"%s: %s" % (key, ", ".join(value)) for key, value in state_dict.items()
]
)
)
class temperature_probe_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "temperature_probe_scheme", **kwargs)
self.requests = snmp_oid((1, 3, 6, 1, 4, 1, 22626, 1, 2, 1, 1), cache=True)
self.parser.add_argument("-w", type=float, dest="warn", help="warning value [%(default)s]", default=35.0)
self.parser.add_argument("-c", type=float, dest="crit", help="critical value [%(default)s]", default=40.0)
self.parse_options(kwargs["options"])
def process_return(self):
warn_temp = int(self.opts.warn)
crit_temp = int(self.opts.crit)
use_dict = self._simplify_keys(list(self.snmp_dict.values())[0])
cur_temp = float(list(use_dict.values())[0])
if cur_temp > crit_temp:
cur_state = limits.mon_STATE_CRITICAL
elif cur_temp > warn_temp:
cur_state = limits.mon_STATE_WARNING
else:
cur_state = limits.mon_STATE_OK
return cur_state, "temperature %.2f C | temp=%.2f" % (
cur_temp,
cur_temp
)
class temperature_probe_hum_scheme(SNMPRelayScheme):
def __init__(self, **kwargs):
SNMPRelayScheme.__init__(self, "temperature_probe_hum_scheme", **kwargs)
self.requests = snmp_oid((1, 3, 6, 1, 4, 1, 22626, 1, 2, 1, 2), cache=True)
self.parser.add_argument("-w", type=float, dest="warn", help="warning value [%(default)s]", default=80.0)
self.parser.add_argument("-c", type=float, dest="crit", help="critical value [%(default)s]", default=95.0)
self.parse_options(kwargs["options"])
def process_return(self):
warn_hum = int(self.opts.warn)
crit_hum = int(self.opts.crit)
use_dict = self._simplify_keys(list(self.snmp_dict.values())[0])
cur_hum = float(list(use_dict.values())[0])
if cur_hum > crit_hum:
cur_state = limits.mon_STATE_CRITICAL
elif cur_hum > warn_hum:
cur_state = limits.mon_STATE_WARNING
else:
cur_state = limits.mon_STATE_OK
return cur_state, "humidity %.2f %% | hum=%.2f%%" % (
cur_hum,
cur_hum
)
|
[
"lang-nevyjel@init.at"
] |
lang-nevyjel@init.at
|
5e84459c53c425352fa3cc790723ee48d0608eab
|
fb4ed5b9061e835aebf81325ea5ba98a78ba1ff0
|
/chest_xray_challenge/logger.py
|
1c1e865b6cbfac9afd911600b5e7516a14d03045
|
[
"Apache-2.0"
] |
permissive
|
guruprasaad123/all_dl_projects
|
999a65b396b4a9c94c8051b5918bfb1947317d08
|
04c869f7f001ef94c467740260663d91a34815e0
|
refs/heads/main
| 2023-06-29T16:45:16.745286
| 2021-08-03T07:19:44
| 2021-08-03T07:19:44
| 389,979,504
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,360
|
py
|
import logging
from logging.handlers import RotatingFileHandler
import os
def get_logger(name, log_path=os.path.join(os.path.dirname(__file__), "main.log"), console=False):
"""
Simple logging wrapper that returns logger
configured to log into file and console.
Args:
name (str): name of logger
log_path (str): path of log file
console (bool): whether to log on console
Returns:
logging.Logger: configured logger
"""
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# ensure that logging handlers are not duplicated
for handler in list(logger.handlers):
logger.removeHandler(handler)
# rotating file handler
if log_path:
fh = RotatingFileHandler(log_path,
maxBytes=2 ** 20, # 1 MB
backupCount=1) # 1 backup
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
# console handler
if console:
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch.setFormatter(formatter)
logger.addHandler(ch)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
return logger
|
[
"jiiva116@live.com"
] |
jiiva116@live.com
|
26b7587e4a47c5bbf04e8676576f861e76678d09
|
92dea7bc7cdd9738a6c6ffe2c1c545097dc1c521
|
/service/ValidationService.py
|
3728ec280e3453e1f6f2262cd8220b8857bee761
|
[] |
no_license
|
mehedithedue/web-scrapper-with-python
|
5823c0223e2b5bd98fcd326fe40b96a783007493
|
0640f7a5dbeb8f261619489aea7c71e411302367
|
refs/heads/main
| 2023-07-04T08:18:03.343945
| 2021-08-05T16:41:48
| 2021-08-05T16:41:48
| 393,109,352
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 520
|
py
|
class ValidationService:
def productValidate(self, productData):
'''
to check validation of product data before
insert to the db
'''
for element in productData:
if (element.get('product') is None) or (element.get('amount') is None):
raise ValueError(" Validation Error, product and amount must be present")
if not isinstance(element.get('amount'), int):
raise ValueError(" Validation Error, amount must be integer")
|
[
"mehedi.thedue@gmail.com"
] |
mehedi.thedue@gmail.com
|
48a80c3566d244ce8cc1b67302502c84d45ea9bd
|
5a281cb78335e06c631181720546f6876005d4e5
|
/cloudkitty-9.0.0/cloudkitty/writer/csv_base.py
|
95524cc77fbc7f1ab8b31537f676100b288c060c
|
[
"Apache-2.0"
] |
permissive
|
scottwedge/OpenStack-Stein
|
d25b2a5bb54a714fc23f0ff0c11fb1fdacad85e8
|
7077d1f602031dace92916f14e36b124f474de15
|
refs/heads/master
| 2021-03-22T16:07:19.561504
| 2020-03-15T01:31:10
| 2020-03-15T01:31:10
| 247,380,811
| 0
| 0
|
Apache-2.0
| 2020-03-15T01:24:15
| 2020-03-15T01:24:15
| null |
UTF-8
|
Python
| false
| false
| 8,262
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2014 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Stéphane Albert
#
import collections
import csv
import datetime
import os
from cloudkitty import utils as ck_utils
from cloudkitty import writer
class InconsistentHeaders(Exception):
pass
class BaseCSVBackend(writer.BaseReportWriter):
"""Report format writer:
Generates report in csv format
"""
report_type = 'csv'
def __init__(self, write_orchestrator, user_id, backend, basepath):
super(BaseCSVBackend, self).__init__(write_orchestrator,
user_id,
backend,
basepath)
# Detailed transform OrderedDict
self._field_map = collections.OrderedDict()
self._headers = []
self._headers_len = 0
self._extra_headers = []
self._extra_headers_len = 0
# File vars
self._csv_report = None
# State variables
self.cached_start = None
self.cached_start_str = ''
self.cached_end = None
self.cached_end_str = ''
self._crumpled = False
# Current usage period lines
self._usage_data = []
def _gen_filename(self, timeframe):
filename = ('{}-{}-{:02d}.csv').format(self._tenant_id,
timeframe.year,
timeframe.month)
if self._basepath:
filename = os.path.join(self._basepath, filename)
return filename
def _open(self):
filename = self._gen_filename(self.usage_start_dt)
self._report = self._backend(filename, 'rb+')
self._csv_report = csv.writer(self._report)
self._report.seek(0, 2)
def _close_file(self):
if self._report is not None:
self._report.close()
def _get_state_manager_timeframe(self):
if self.report_type is None:
raise NotImplementedError()
def _update_state_manager(self):
if self.report_type is None:
raise NotImplementedError()
super(BaseCSVBackend, self)._update_state_manager()
metadata = {'total': self.total}
metadata['headers'] = self._extra_headers
self._sm.set_metadata(metadata)
def _init_headers(self):
headers = self._field_map.keys()
for header in headers:
if ':*' in header:
continue
self._headers.append(header)
self._headers_len = len(self._headers)
def _write_header(self):
self._csv_report.writerow(self._headers + self._extra_headers)
def _write(self):
self._csv_report.writerows(self._usage_data)
def _post_commit(self):
self._crumpled = False
self._usage_data = []
self._write_total()
def _update(self, data):
"""Dispatch report data with context awareness.
"""
if self._crumpled:
return
try:
for service in data:
for report_data in data[service]:
self._process_data(service, report_data)
self.total += report_data['rating']['price']
except InconsistentHeaders:
self._crumple()
self._crumpled = True
def _recover_state(self):
# Rewind 3 lines
self._report.seek(0, 2)
buf_size = self._report.tell()
if buf_size > 2000:
buf_size = 2000
elif buf_size == 0:
return
self._report.seek(-buf_size, 2)
end_buf = self._report.read()
last_line = buf_size
for dummy in range(4):
last_line = end_buf.rfind('\n', 0, last_line)
if last_line > 0:
last_line -= len(end_buf) - 1
else:
raise RuntimeError('Unable to recover file state.')
self._report.seek(last_line, 2)
self._report.truncate()
def _crumple(self):
# Reset states
self._usage_data = []
self.total = 0
# Recover state from file
if self._report is not None:
self._report.seek(0)
reader = csv.reader(self._report)
# Skip header
for dummy in range(2):
line = reader.next()
self.usage_start_dt = datetime.datetime.strptime(
line[0],
'%Y/%m/%d %H:%M:%S')
self.usage_start = ck_utils.dt2ts(self.usage_start_dt)
self.usage_end_dt = datetime.datetime.strptime(
line[1],
'%Y/%m/%d %H:%M:%S')
self.usage_end = ck_utils.dt2ts(self.usage_end_dt)
# Reset file
self._report.seek(0)
self._report.truncate()
self._write_header()
timeframe = self._write_orchestrator.get_timeframe(
self.usage_start)
start = self.usage_start
self.usage_start = None
for data in timeframe:
self.append(data['usage'],
start,
None)
self.usage_start = self.usage_end
def _update_extra_headers(self, new_head):
self._extra_headers.append(new_head)
self._extra_headers.sort()
self._extra_headers_len += 1
def _allocate_extra(self, line):
for dummy in range(self._extra_headers_len):
line.append('')
def _map_wildcard(self, base, report_data):
wildcard_line = []
headers_changed = False
self._allocate_extra(wildcard_line)
base_section, dummy = base.split(':')
if not report_data:
return []
for field in report_data:
col_name = base_section + ':' + field
if col_name not in self._extra_headers:
self._update_extra_headers(col_name)
headers_changed = True
else:
idx = self._extra_headers.index(col_name)
wildcard_line[idx] = report_data[field]
if headers_changed:
raise InconsistentHeaders('Headers value changed'
', need to rebuild.')
return wildcard_line
def _recurse_sections(self, sections, data):
if not sections.count(':'):
return data.get(sections, '')
fields = sections.split(':')
cur_data = data
for field in fields:
if field in cur_data:
cur_data = cur_data[field]
else:
return None
return cur_data
def _process_data(self, context, report_data):
"""Transform the raw json data to the final CSV values.
"""
if not self._headers_len:
self._init_headers()
formated_data = []
for base, mapped in self._field_map.iteritems():
final_data = ''
if isinstance(mapped, str):
mapped_section, mapped_field = mapped.rsplit(':', 1)
data = self._recurse_sections(mapped_section, report_data)
if mapped_field == '*':
extra_fields = self._map_wildcard(base, data)
formated_data.extend(extra_fields)
continue
elif mapped_section in report_data:
data = report_data[mapped_section]
if mapped_field in data:
final_data = data[mapped_field]
elif mapped is not None:
final_data = mapped(context, report_data)
formated_data.append(final_data)
self._usage_data.append(formated_data)
|
[
"Wayne Gong@minbgong-winvm.cisco.com"
] |
Wayne Gong@minbgong-winvm.cisco.com
|
dccea37e778c3555ab4fe16fdf907963027530a6
|
bfb9ccb48891e882cbbdce6fc164ee1abf8f8c5d
|
/PythonDynamoDB/alpha/CreateTable.py
|
c87b3e7ad42e88d999ff0266d75601097f53f81c
|
[] |
no_license
|
mwduncan2018/AWS_Practice
|
5f88a5de3d17ae222e716a20fdc8bc41041a8bec
|
04b16a8e7f68e5897be0be33155e27a9a199ed8c
|
refs/heads/master
| 2022-04-25T14:18:04.639496
| 2020-04-25T18:27:00
| 2020-04-25T18:27:00
| 254,909,521
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,516
|
py
|
#
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
from __future__ import print_function # Python 2/3 compatibility
import boto3
#dynamodb = boto3.resource('dynamodb', region_name='us-east-1', endpoint_url="http://localhost:8000")
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
table = dynamodb.create_table(
TableName='YouTubeChannel',
KeySchema=[
{
'AttributeName': 'channel_name',
'KeyType': 'HASH' #Partition key
},
{
'AttributeName': 'creator_name',
'KeyType': 'RANGE' #Sort key
}
],
AttributeDefinitions=[
{
'AttributeName': 'channel_name',
'AttributeType': 'S'
},
{
'AttributeName': 'creator_name',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
print("Hmmmm?? What is the table status?")
print("Table status:", table.table_status)
|
[
"ec2-user@ip-10-0-0-75.ec2.internal"
] |
ec2-user@ip-10-0-0-75.ec2.internal
|
1832becbc37d3f749a808823b4235f0525ddf89f
|
4be2f655d1b450e233912091d6f570d72b161305
|
/blogdown/__init__.py
|
7610ea584bb2ce6601bc1e2ea3ee8b4543290515
|
[
"BSD-3-Clause"
] |
permissive
|
rodricios/blogdown
|
1468d980d634bcf35835f7b4f98fe52c52f92a31
|
112ee6d6b9d89fbcad4dd706bda505e3739c04f7
|
refs/heads/master
| 2021-01-18T16:55:56.362308
| 2015-05-22T02:59:02
| 2015-05-22T02:59:02
| 35,073,404
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 35
|
py
|
#from blogdown import *
import gen
|
[
"rodrigopala91@gmail.com"
] |
rodrigopala91@gmail.com
|
13a19ab37a9793a63821b1a0c0eb50a87231119c
|
485e11308711e36ff15427d9b34cfa50f4321b77
|
/moviebase-project/showtimes/tests/conftest.py
|
0ab8c4f9ba1a60f15b5b40cffe618bf61378912d
|
[] |
no_license
|
Swierad/REST_API
|
4a1839ef805470f7c7875f70f8a3eba1acf74687
|
b87114719046e56534ae83de784d70d30ef7c7b2
|
refs/heads/main
| 2023-01-11T01:08:33.963009
| 2020-11-11T15:18:42
| 2020-11-11T15:18:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 198
|
py
|
import pytest
from showtimes.models import Cinema, Screening
@pytest.fixture
def fake_cinema_data():
a = Cinema()
a.name = 'example1'
a.city = 'example1a'
a.save()
return a
|
[
"swierad@gmail.com"
] |
swierad@gmail.com
|
3cf54681e3c98c6274ddba5600320cbd95ecbf77
|
16d22787f345a467ddd57b6a45261788954e4691
|
/LeetCode/14.py
|
906118793cecff03fb2177b6e5e1d54d77c30418
|
[
"MIT"
] |
permissive
|
dynamoh/Interview-solved
|
6b6f5680ad9235188c26942671e788d254d1950f
|
cf13af0cc9db6e851d26677877647e6121c6ec30
|
refs/heads/master
| 2021-07-11T03:50:09.112220
| 2020-10-18T07:57:30
| 2020-10-18T07:57:30
| 303,154,831
| 0
| 0
|
MIT
| 2020-10-11T15:43:57
| 2020-10-11T15:43:56
| null |
UTF-8
|
Python
| false
| false
| 1,244
|
py
|
"""
Longest Common Prefix
Write a function to find the longest common prefix string amongst an array of strings.
If there is no common prefix, return an empty string "".
Example 1:
Input: ["flower","flow","flight"]
Output: "fl"
Example 2:
Input: ["dog","racecar","car"]
Output: ""
Explanation: There is no common prefix among the input strings.
"""
class Solution(object):
def longestCommonPrefix(self, strs):
"""
:type strs: List[str]
:rtype: str
"""
result = ''
if len(strs) == 0:
return ''
i = 0
d = {i: len(v) for i,v in enumerate(strs)}
count = min(d.values())
for i in range(1, count+1):
prefix = strs[0][:i]
for s in strs:
if s[:i] != prefix:
return result
result = prefix
return result
def optimized(self, strs):
result = ""
for n in zip(*strs):
if (len(set(n))) == 1:
result = result + n[0]
else:
return result
return result
s = Solution()
# print(s.longestCommonPrefix(raw_input().split()))
print(s.optimized(raw_input().split()))
|
[
"aarakeri@apple.com"
] |
aarakeri@apple.com
|
69e2067b42c75a595b9836d9dfff546555d5bb54
|
feb06ec473a0550692d56c5fdd34bdf4ba42a12c
|
/core-py/server.py
|
ba17df3a0e1ab7de8b2fcafe15a13ef1fedc0b2c
|
[] |
no_license
|
altafsubhan/python-xsa-setup
|
c473ae4ea1f073e971e10d2b2a5c29097ec28ab7
|
30ca6684f2a9bcf9c76c04e9e9fd46bfb704019e
|
refs/heads/master
| 2020-03-20T20:22:40.048240
| 2018-06-14T18:32:51
| 2018-06-14T18:32:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,326
|
py
|
import os
import logging
from flask import Flask
from cfenv import AppEnv
from flask import request
from flask import abort
from sap import xssec
from hdbcli import dbapi
from cf_logging import flask_logging
app = Flask(__name__)
env = AppEnv()
flask_logging.init(app, logging.INFO)
port = int(os.environ.get('PORT', 3000))
hana = env.get_service(label='hana')
uaa_service = env.get_service(name='myuaa').credentials
@app.route('/')
def hello():
logger = logging.getLogger('route.logger')
logger.info('Someone accessed us')
if 'authorization' not in request.headers:
abort(403)
access_token = request.headers.get('authorization')[7:]
security_context = xssec.create_security_context(access_token, uaa_service)
isAuthorized = security_context.check_scope('openid')
if not isAuthorized:
abort(403)
logger.info('Authorization successful')
conn = dbapi.connect(address=hana.credentials['host'], port=int(hana.credentials['port']), user=hana.credentials['user'], password=hana.credentials['password'])
cursor = conn.cursor()
cursor.execute("select CURRENT_UTCTIMESTAMP from DUMMY", {})
ro = cursor.fetchone()
cursor.close()
conn.close()
return "Current time is: " + str(ro["CURRENT_UTCTIMESTAMP"])
if __name__ == '__main__':
app.run(port=port)
|
[
"muhammad.altaf@sap.com"
] |
muhammad.altaf@sap.com
|
938371c2d74f6821b018e1157bb4b991c1116cf2
|
02bdcaf3816cba8f75559c692bf8e1bb01986700
|
/venv/main.py
|
883d22d1a9a774a741ce9b705b0dbb4050f92194
|
[] |
no_license
|
kobg/WD2020
|
74d3148402e0f093e78ebae63c21c0fb77bc305f
|
d5148be5db4f0230e6aecdbcebaf8518677d09e5
|
refs/heads/master
| 2021-02-25T22:40:30.517694
| 2020-04-21T20:51:01
| 2020-04-21T20:51:01
| 245,473,436
| 0
| 0
| null | 2020-04-03T16:24:33
| 2020-03-06T16:56:03
| null |
UTF-8
|
Python
| false
| false
| 2,376
|
py
|
#JAKUB GOŚCINIAK IO GR I/2
from akcje import *
from screen import *
from plik import *
import random
import os
map = []
def addstep(x,y,i):
if i<1:
map.append(funkcje.move(point={"pozx": x, "pozy": y}, vector={"pozx": x, "pozy": y}))
map.append(funkcje.move(point={"pozx":map[i][0],"pozy":map[i][1]},vector={"pozx":x,"pozy":y}))
def newgame():
addstep(random.randint(-1,51),random.randint(-1,51),0)
def loadgame(map):
i=0
for step in map:
addstep(step[0],step[1],i)
i+=1
def game(steps):
i=len(steps)-1
while True:
x=steps[i][0]
y=steps[i][1]
display.canvas(x,y,steps)
movement = str(input())
os.system("cls")
if movement in "wasdQS":
if movement =='S':
print("Gra zapisana")
savemode.save(steps)
continue
if movement.lower() == 'w':
addstep(x,y+1,i)
i+=1
continue
if movement.lower() =='s':
addstep(x, y-1,i)
i += 1
continue
if movement.lower() =='a':
addstep(x-1, y,i)
i += 1
continue
if movement.lower() =='d':
addstep(x+1,y,i)
i += 1
continue
if movement =='Q':
exit()
def sterowanie():
print("Po wpisaniu kierunku ruchu zatwierdź ENTEREM.")
print("w -> W góre o 1")
print("a -> W lewo o 1")
print("s -> W dół o 1")
print("d -> W prawo o 1")
print("Nacisnij ENTER by wrocic do glownego menu.")
input()
def menu():
while True:
os.system("cls")
print("Nowa gra: 1")
print("Wczytaj gre: 2")
print("Sterowanie: 3")
print("Wyjdz: 0")
wybor = int(input())
if wybor==1:
newgame()
game(map)
return
if wybor==2:
loaded_map = savemode.load()
if len(loaded_map) == 0:
print("Brak zapisu")
input()
continue
else:
loadgame(loaded_map)
game(map)
return
if wybor==3:
sterowanie()
menu()
return
if wybor==0:
exit()
menu()
|
[
"kubapoland1@gmail.com"
] |
kubapoland1@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.