blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3310db16db0b7e706cf311d53bfc028b406cebcf
|
7403b7cbf2745b2ecc109c2339a78fc6c5f4911d
|
/evaluation_scripts/auswertung_mw_stdabw.py
|
6f18f5e3c7ebe6203cce0f19a6a1211f7cf4b53b
|
[] |
no_license
|
T3K14/Carcassonne
|
6f16976fc080d49cec97a0b745a341e314442b0b
|
9f82cb89c34608be48c05574710148ebcf3f21b5
|
refs/heads/master
| 2020-04-07T08:15:00.375686
| 2019-10-12T11:41:21
| 2019-10-12T11:41:21
| 158,206,258
| 3
| 0
| null | 2018-11-22T15:15:21
| 2018-11-19T10:56:38
|
Python
|
UTF-8
|
Python
| false
| false
| 8,629
|
py
|
import numpy as np
# meeples
Orte1 = [2, 2, 2, 2, 2, 1, 1, 1, 1, 1]
Strassen1 = [2, 2, 2, 2, 2, 1, 1, 1, 1, 1]
Wiesen1 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
Kloester1 = [0, 0, 0, 0, 0, 2, 2, 2, 2, 2]
Orte2 = [1, 1, 1, 1, 1, 2, 2, 2, 2, 2]
Strassen2 = [1, 1, 1, 1, 1, 2, 2, 2, 2, 2]
Wiesen2 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
Kloester2 = [2, 2, 2, 2, 2, 0, 0, 0, 0, 0]
# gebietspunkte
Orte1_punkte = [5, 5, 5, 5, 5, 4, 4, 4, 4, 4]
Strassen1_punkte = [6, 6, 6, 6, 6, 2, 2, 2, 2, 2]
Wiesen1_punkte = [3, 3, 3, 3, 3, 3, 3, 3, 6, 3]
Kloester1_punkte = [0, 0, 0, 0, 0, 10, 10, 11, 11, 11]
Orte2_punkte = [4, 4, 4, 4, 4, 8, 8, 8, 8, 8]
Strassen2_punkte = [3, 4, 3, 3, 4, 5, 5, 6, 6, 6]
Wiesen2_punkte = [3, 3, 3, 3, 3, 6, 6, 6, 6, 6]
Kloester2_punkte = [9, 10, 9, 9, 10, 0, 0, 0, 0, 0]
# gesamtpunkte
Player1_ergebnisse = [14, 14, 14, 14, 14, 19, 19, 20, 23, 20]
Player2_ergebnisse = [19, 21, 19, 19, 21, 19, 19, 20, 20, 20]
gesamtmeeples1 = Orte1[:]
gesamtmeeples2 = Orte2[:]
for i in range(len(Orte1)):
gesamtmeeples1[i] += Strassen1[i]
gesamtmeeples1[i] += Wiesen1[i]
gesamtmeeples1[i] += Kloester1[i]
for i in range(len(Orte2)):
gesamtmeeples2[i] += Strassen2[i]
gesamtmeeples2[i] += Wiesen2[i]
gesamtmeeples2[i] += Kloester2[i]
print(gesamtmeeples2)
def calculate_mittelwert(liste):
mittelwert1 = 0
for wert in liste:
mittelwert1 += wert
if mittelwert1 != 0:
return mittelwert1 / len(liste)
else:
return None
def calculate_standardabweichung(liste, mittelwert):
abw = []
for wert in liste:
abw.append((wert-mittelwert)**2)
summe = 0
for wert in abw:
summe += wert
return np.sqrt(summe / (len(liste) - 1))
print('\nPlayer1 hat durchschnittlich folgende Punktezahlen gemacht:')
print(f'Erste Hälfte: {calculate_mittelwert(Player1_ergebnisse[:int(len(Player1_ergebnisse)/2)])} +- {calculate_standardabweichung(Player1_ergebnisse[:int(len(Player1_ergebnisse)/2)], calculate_mittelwert(Player1_ergebnisse[:int(len(Player1_ergebnisse)/2)]))},'
f'\nzweite Hälfte: {calculate_mittelwert(Player1_ergebnisse[int(len(Player1_ergebnisse)/2):])} +- {calculate_standardabweichung(Player1_ergebnisse[int(len(Player1_ergebnisse)/2):], calculate_mittelwert(Player1_ergebnisse[int(len(Player1_ergebnisse)/2):]))},'
f'\ngesamt: {calculate_mittelwert(Player1_ergebnisse)} +- {calculate_standardabweichung(Player1_ergebnisse, calculate_mittelwert(Player1_ergebnisse))}')
print('\nPlayer2 hat durchschnittlich folgende Punktezahlen gemacht:')
print(f'Erste Hälfte: {calculate_mittelwert(Player2_ergebnisse[:int(len(Player2_ergebnisse)/2)])} +- {calculate_standardabweichung(Player2_ergebnisse[:int(len(Player2_ergebnisse)/2)], calculate_mittelwert(Player2_ergebnisse[:int(len(Player2_ergebnisse)/2)]))},'
f'\nzweite Hälfte: {calculate_mittelwert(Player2_ergebnisse[int(len(Player2_ergebnisse)/2):])} +- {calculate_standardabweichung(Player2_ergebnisse[int(len(Player2_ergebnisse)/2):], calculate_mittelwert(Player2_ergebnisse[int(len(Player2_ergebnisse)/2):]))},'
f'\ngesamt: {calculate_mittelwert(Player2_ergebnisse)} +- {calculate_standardabweichung(Player2_ergebnisse, calculate_mittelwert(Player2_ergebnisse))}')
print('\ndurchschnittlich auf Gebiete gesetzt Meeples:')
print('Player1:')
print('Orte: erste Hälfte: ', calculate_mittelwert(Orte1[:int(len(Orte1)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Orte1[int(len(Orte1)/2):]), ', gesamt:', calculate_mittelwert(Orte1), '+-', calculate_standardabweichung(Orte1, calculate_mittelwert(Orte1)))
print('Strassen: erste Hälfte: ', calculate_mittelwert(Strassen1[:int(len(Strassen1)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Strassen1[int(len(Strassen1)/2):]), ', gesamt:', calculate_mittelwert(Strassen1), '+-', calculate_standardabweichung(Strassen1, calculate_mittelwert(Strassen1)))
print('Wiesen: erste Hälfte: ', calculate_mittelwert(Wiesen1[:int(len(Wiesen1)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Wiesen1[int(len(Wiesen1)/2):]), ', gesamt:', calculate_mittelwert(Wiesen1), '+-', calculate_standardabweichung(Wiesen1, calculate_mittelwert(Wiesen1)))
print('Kloester: erste Hälfte: ', calculate_mittelwert(Kloester1[:int(len(Kloester1)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Kloester1[int(len(Kloester1)/2):]), ', gesamt:', calculate_mittelwert(Kloester1), '+-', calculate_standardabweichung(Kloester1, calculate_mittelwert(Kloester1)))
print('gesamt: ', calculate_mittelwert(gesamtmeeples1), '+- ', calculate_standardabweichung(gesamtmeeples1, calculate_mittelwert(gesamtmeeples1)))
print('\n\nPlayer2:')
print('Orte: erste Hälfte: ', calculate_mittelwert(Orte2[:int(len(Orte2)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Orte2[int(len(Orte2)/2):]), ', gesamt:', calculate_mittelwert(Orte2), '+-', calculate_standardabweichung(Orte2, calculate_mittelwert(Orte2)))
print('Strassen: erste Hälfte: ', calculate_mittelwert(Strassen2[:int(len(Strassen2)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Strassen2[int(len(Strassen2)/2):]), ', gesamt:', calculate_mittelwert(Strassen2), '+-', calculate_standardabweichung(Strassen2, calculate_mittelwert(Strassen2)))
print('Wiesen: erste Hälfte: ', calculate_mittelwert(Wiesen2[:int(len(Wiesen2)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Wiesen2[int(len(Wiesen2)/2):]), ', gesamt:', calculate_mittelwert(Wiesen2), '+-', calculate_standardabweichung(Wiesen2, calculate_mittelwert(Wiesen2)))
print('Kloester: erste Hälfte: ', calculate_mittelwert(Kloester2[:int(len(Kloester2)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Kloester2[int(len(Kloester2)/2):]), ', gesamt:', calculate_mittelwert(Kloester2), '+-', calculate_standardabweichung(Kloester2, calculate_mittelwert(Kloester2)))
print('gesamt: ', calculate_mittelwert(gesamtmeeples2), '+- ', calculate_standardabweichung(gesamtmeeples2, calculate_mittelwert(gesamtmeeples2)))
print('\ndurchschnittlich gemachte Punkte mit den Gebieten:')
print('Player1:')
print('Orte: erste Hälfte: ', calculate_mittelwert(Orte1_punkte[:int(len(Orte1)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Orte1_punkte[int(len(Orte1_punkte)/2):]), ', gesamt:', calculate_mittelwert(Orte1_punkte), '+-', calculate_standardabweichung(Orte1_punkte, calculate_mittelwert(Orte1_punkte)))
print('Strassen: erste Hälfte: ', calculate_mittelwert(Strassen1_punkte[:int(len(Strassen1)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Strassen1_punkte[int(len(Strassen1_punkte)/2):]), ', gesamt:', calculate_mittelwert(Strassen1_punkte), '+-', calculate_standardabweichung(Strassen1_punkte, calculate_mittelwert(Strassen1_punkte)))
print('Wiesen: erste Hälfte: ', calculate_mittelwert(Wiesen1_punkte[:int(len(Wiesen1)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Wiesen1_punkte[int(len(Wiesen1_punkte)/2):]), ', gesamt:', calculate_mittelwert(Wiesen1_punkte), '+-', calculate_standardabweichung(Wiesen1_punkte, calculate_mittelwert(Wiesen1_punkte)))
print('Kloester: erste Hälfte: ', calculate_mittelwert(Kloester1_punkte[:int(len(Kloester1)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Kloester1_punkte[int(len(Kloester1_punkte)/2):]), ', gesamt:', calculate_mittelwert(Kloester1_punkte), '+-', calculate_standardabweichung(Kloester1_punkte, calculate_mittelwert(Kloester1_punkte)))
print('Player2:')
print('Orte: erste Hälfte: ', calculate_mittelwert(Orte2_punkte[:int(len(Orte2)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Orte2_punkte[int(len(Orte2_punkte)/2):]), ', gesamt:', calculate_mittelwert(Orte2_punkte), '+-', calculate_standardabweichung(Orte2_punkte, calculate_mittelwert(Orte2_punkte)))
print('Strassen: erste Hälfte: ', calculate_mittelwert(Strassen2_punkte[:int(len(Strassen2)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Strassen2_punkte[int(len(Strassen2_punkte)/2):]), ', gesamt:', calculate_mittelwert(Strassen2_punkte), '+-', calculate_standardabweichung(Strassen2_punkte, calculate_mittelwert(Strassen2_punkte)))
print('Wiesen: erste Hälfte: ', calculate_mittelwert(Wiesen2_punkte[:int(len(Wiesen2)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Wiesen2_punkte[int(len(Wiesen2_punkte)/2):]), ', gesamt:', calculate_mittelwert(Wiesen2_punkte), '+-', calculate_standardabweichung(Wiesen2_punkte, calculate_mittelwert(Wiesen2_punkte)))
print('Kloester: erste Hälfte: ', calculate_mittelwert(Kloester2_punkte[:int(len(Kloester2)/2)]), ', zweite Hälfte: ', calculate_mittelwert(Kloester2_punkte[int(len(Kloester2_punkte)/2):]), ', gesamt:', calculate_mittelwert(Kloester2_punkte), '+-', calculate_standardabweichung(Kloester2_punkte, calculate_mittelwert(Kloester2_punkte)))
|
[
"robert97752@gmail.com"
] |
robert97752@gmail.com
|
6c46584afeafccb229c8951b1084318a37bcf398
|
7255e170e5aa2767a94468bfc4c2a48a09bb7514
|
/ejercicio5/fileman_celery/upload_file_app/url.py
|
24d0d11b15fbd2a123876f5aa569cc4fec01f55e
|
[] |
no_license
|
fremeva/email-microservices
|
04ed33c7c8ca5a576d658fa4daf28cd262b14691
|
7c764a70992c326b46aced48a6fdbcd1cbf5424b
|
refs/heads/master
| 2020-03-21T16:46:29.152424
| 2018-07-03T17:48:21
| 2018-07-03T17:48:21
| 138,792,782
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 164
|
py
|
from django.conf.urls import url
from .views import model_form_upload
app_name = 'uloploadapp'
urlpatterns = [
url(r'^$', model_form_upload, name='upload'),
]
|
[
"fredymv03@gmail.com"
] |
fredymv03@gmail.com
|
d8561fb670b357cc2ca8606b595e9ac5669d49e1
|
e7bfd5fae17cb6e58700a5b94d0d2234fff2e198
|
/trader.py
|
1384d032c4ba6a5dbf60b18844061a512cd133e2
|
[] |
no_license
|
virtualpeer/PytohBackTraderBot
|
2447a2e45d7331b35bc44d38ba6601aed42785ec
|
f5ea36ab99560f7393c01f23565e8bdac948f8e8
|
refs/heads/master
| 2022-12-19T14:14:29.273915
| 2020-09-24T13:25:01
| 2020-09-24T13:25:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 712
|
py
|
import backtrader as bt
import datetime
import pandas as pd
from strategies import GoldenCross
# from strategies.GoldenCross import GoldenCross
# Init cerebro
cerebro = bt.Cerebro()
cerebro.broker.setcash(1000000)
# Create a Data Feed
spy_prices = pd.read_csv('data/spy.csv', index_col='Date', parse_dates=True)
feed = bt.feeds.PandasData(dataname=spy_prices)
# Add Data
cerebro.adddata(feed)
# Run Strategy
cerebro.addstrategy(GoldenCross)
cerebro.run()
cerebro.plot()
# # Add Sizer
# cerebro.addsizer(backtrader.sizers.FixedSize, stake = 1000)
# print('Starting Portfolio Value: %.2f' % cerebro.broker.getvalue())
# print('Final Portfolio Value: %.2f' % cerebro.broker.getvalue())
# # cerebro.plot()
|
[
"fennellosc@gmail.com"
] |
fennellosc@gmail.com
|
af36fc03448e75a4ca6ac789df88869a4f585ea2
|
52d9c6d005b2e91f489fdd817059b1217efd711d
|
/users/dflt_style_changes-1.py
|
f81506a21653adad6e6c52fa446642141babe048
|
[] |
no_license
|
yuhaihaiyu/matplotlib.github.com
|
2a785654d4a0e4a9b6d1876b0aae96b6b5d20fc5
|
fbe748c706e92f9ccb660eab656deaebe179a6af
|
refs/heads/master
| 2023-07-31T08:04:16.716267
| 2021-10-02T06:00:49
| 2021-10-02T06:00:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 39
|
py
|
../stable/users/dflt_style_changes-1.py
|
[
"quantum.analyst@gmail.com"
] |
quantum.analyst@gmail.com
|
81498da355e96df3728c5a14fca97664116f0786
|
13dc7d936642e0b0aafa4b9003a3c66bae655007
|
/features/multiple_births_test.py
|
69d714470b039b2eaaa82bc62f29a54e2ca6e86b
|
[] |
no_license
|
davidhu34/ssw555tm082021Spring
|
922eb4f329e4af35531e0539abebb9d1d2ef394d
|
d99fc3ec995554118c7cff47311965f2ed289793
|
refs/heads/dev
| 2023-04-14T09:05:32.104805
| 2021-04-27T19:22:07
| 2021-04-27T19:22:07
| 342,986,614
| 0
| 0
| null | 2021-04-27T18:47:32
| 2021-02-28T00:17:13
|
Python
|
UTF-8
|
Python
| false
| false
| 1,034
|
py
|
from gedcom.testing import GedcomTestCase
from features.multiple_births import siblings_born_at_same_time,too_many_siblings
class MultpleBirthsTest(GedcomTestCase):
def test_siblings_born_at_same_time(self) -> None:
""" test if more than 5 siblings are born at once """
self.assert_file_validation_fails(
'incorrect_siblings_born_at_same_time', siblings_born_at_same_time,
["ERROR US14 at line 39: too many siblings born at once(0835-02-10) in family(F01)"])
self.assert_file_validation_passes(
'correct_siblings_born_at_same_time', siblings_born_at_same_time)
def test_too_many_siblings(self) -> None:
""" test if there are more than 15 siblings in the family """
self.assert_file_validation_fails(
'incorrect_too_many_siblings', too_many_siblings,
['ERROR US15 at line 172: too many siblings in family: (F01)'])
self.assert_file_validation_passes(
'correct_too_many_siblings', too_many_siblings)
|
[
"32401263+JackBittker@users.noreply.github.com"
] |
32401263+JackBittker@users.noreply.github.com
|
e134e1df982498b8d8cb8c20fcfe5af7a380980e
|
b9c41602616c626eaf6bf193b7a0fc7fe7b7be28
|
/xmlpython.py
|
f8caaa78d76e5adffd6e8a0b648ff65f37fe8f98
|
[] |
no_license
|
Pelero016/Python_Libro
|
a725d82e6068e7909d78a0c14eb28c5998215790
|
b76742220a404b6744d61ef7491258c8e999c574
|
refs/heads/main
| 2023-02-12T20:52:44.287777
| 2021-01-09T17:04:00
| 2021-01-09T17:04:00
| 320,736,661
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,485
|
py
|
import urllib.request, urllib.parse, urllib.error
import xml.etree.ElementTree as ET
import ssl
api_key = False
# If you have a Google Places API key, enter it here
# api_key = 'AIzaSy___IDByT70'
# https://developers.google.com/maps/documentation/geocoding/intro
if api_key is False:
api_key = 42
serviceurl = 'http://py4e-data.dr-chuck.net/xml?'
else :
serviceurl = 'https://maps.googleapis.com/maps/api/geocode/xml?'
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
while True:
address = input('Enter location: ')
if len(address) < 1: break
parms = dict()
parms['address'] = address
if api_key is not False: parms['key'] = api_key
url = "http://py4e-data.dr-chuck.net/comments_1080923.xml"
print('Retrieving', url)
uh = urllib.request.urlopen(url, context=ctx)
data = uh.read()
print('Retrieved', len(data), 'characters')
print(data.decode())
tree = ET.fromstring(data)
count = 0
counts = tree.findall('.//comment')
print(len(counts))
for subtree in counts:
count+=int(subtree.find('count').text)
# results = tree.findall('result')
# lat = results[0].find('geometry').find('location').find('lat').text
# lng = results[0].find('geometry').find('location').find('lng').text
# location = results[0].find('formatted_address').text
#print('lat', lat, 'lng', lng)
#print(location)
print(count)
|
[
"brayancalderonamorocho@gmail.com"
] |
brayancalderonamorocho@gmail.com
|
488c591d3bb093588b60fc6dbc71bc4a50e20f77
|
294578eee80bc89e0411e41b4a351561bd4fbbd2
|
/tempbyhfmjal.py
|
c5531e89104da0958997fa2cf50bb89032512623
|
[] |
no_license
|
HackerSpot2001/Voilent-Python-with-Python3
|
947a09dd7ba6bd07fecefef89beb443670987b99
|
bca716d9f88d3c2657a24476ee93860b7c59bc6a
|
refs/heads/master
| 2023-07-22T15:41:34.839458
| 2023-07-07T17:05:07
| 2023-07-07T17:05:07
| 375,223,373
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 112
|
py
|
Shell = create_string_buffer(shellCode,len(shellCode))
shell = cast(memoryWithShell,CFUNCTYPE(c_void_p))
shell()
|
[
"abhisheksagar513@gmail.com"
] |
abhisheksagar513@gmail.com
|
522b13731417729f6f1f9ab66e0e2143a7b4c88f
|
98fbb63f49942daf17208ba0c0590b3d7bb43140
|
/ch6_dynamic_typing/shared_ref.py
|
c4b8953c9591696e053020f82d52283d61e11603
|
[] |
no_license
|
dsm-kbl/python-snippets
|
b0b546abdfaf2a676c060cdf4afda1828ec4169e
|
0ef3d48967c8334b26e1cf392cfba089beaca08a
|
refs/heads/master
| 2023-05-14T08:29:17.138695
| 2021-06-02T12:02:23
| 2021-06-02T12:02:23
| 299,259,889
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 140
|
py
|
L1 = [2, 3, 4]
L2 = L1[:] # Copy L1 to L2
print(L1 == L2)
print(L1 is L2) # It checks if the references are pointing to the same object
|
[
"dibyam@amazon.com"
] |
dibyam@amazon.com
|
6fcc046029e432c4919bb48b6005dfe3799068c3
|
0e06b376dbdf22417a77d82ac09e8f992707e532
|
/Seccion 11/metodos_cadenas.py
|
7581327e315ba90071aba6aabe24c461ea77d535
|
[] |
no_license
|
juanPabloCesarini/cursoPYTHON2021
|
266ea8cbc8f405f9f2917b29ae1165bc03e94612
|
c55d75ab68eb66ec9e5b1da9f5a10b8406bdefe4
|
refs/heads/main
| 2023-03-28T10:31:36.911390
| 2021-03-24T23:48:17
| 2021-03-24T23:48:17
| 342,884,214
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 507
|
py
|
nombre = input("Ingresa tu nombre: ")
print("el nombre es: ", nombre.upper())
nombre = input("Ingresa tu nombre: ")
print("el nombre es: ", nombre.lower())
nombre = input("Ingresa tu nombre: ")
print("el nombre es: ", nombre.capitalize())
frase= input("Ingresa una frase: ")
letra= input("Letra a buscar: ")
print(letra, "se encuentra: ",frase.count(letra), "veces")
print(letra, "se encuentra en la posicion: ",frase.find(letra))
print(frase.split(" "))
## HAY MAS BUSCARLAS EN LA DOCU DE PYTHON
|
[
"juanpcesarini@hotmail.com"
] |
juanpcesarini@hotmail.com
|
e72b8ae7d25be4cb758724f93d68d547ab21c7f9
|
2a3f3546dc7a1395a0c71b9a95468e7895ed642d
|
/Caesar.py
|
693ec4aa73e49f8c71d62ad80b725ca54e3a8cb3
|
[] |
no_license
|
seanon414/Crypto
|
68eb90a356c3784301562ab0015cfe8df2b5e87a
|
25b5e3d58ecf8c2e65054b864bfd0b5634744202
|
refs/heads/master
| 2021-01-10T18:38:09.163497
| 2014-02-20T03:44:40
| 2014-02-20T03:44:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,143
|
py
|
"""
Cryto assignment 1
Encrypting and decryting caesar cipher
"""
plain = 'Hello World'
cipher = 'Khoor Zruog'
sky = 'Greetings, Exalted One. Allow me to introduce myself. I am Luke Skywalker, Jedi Knight and friend to Captain Solo.'
gibber = 'L FDQ VHH LW LQ BRXU HBHV BRX KDYH WKH ORRN RI D PDQ ZKR DFFHSWV ZKDW KH VHHV EHFDXVH KH LV HASHFWLQJ WR ZDNH XS LURQLFDOOB WKLV LV QRW IDU IURP WKH WUXWK GR BRX EHOLHYH LQ IDWH QHR'
def builder(plain):
text = ''
for char in plain.upper():
if char.isalpha():
temp = ord(char) + 3
if temp > ord('Z'):
temp -= 26
blah = chr(temp)
else:
blah = char
text += blah
return text
print (''.join(builder(plain)))
print (''.join(builder(sky)))
def breaker(cipher):
text = ''
for char in cipher.upper():
if char.isalpha():
temp = ord(char) - 3
if temp < ord('A'):
temp += 26
blah = chr(temp)
else:
blah = char
text += blah
return text
print (''.join(breaker(cipher)))
print (''.join(breaker(gibber)))
|
[
"seanoneill@SCB227072210.adelphi.internal"
] |
seanoneill@SCB227072210.adelphi.internal
|
70d93dc6d373c483b6e5449605a161072d94d17c
|
a59d537800bc958fc136ff4a0c5d2474b0641404
|
/example.py
|
0e6130cc4ab10d84a1e987a0f286c02f6d622f1a
|
[] |
no_license
|
aleskruba/relationship
|
6cce95524917d0dc02128e38015a62d3d1ce26e7
|
997dd6322ec46a1763d2d5e50c4567533718447b
|
refs/heads/main
| 2023-02-25T15:29:39.855536
| 2021-02-01T13:07:08
| 2021-02-01T13:07:08
| 334,951,994
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,187
|
py
|
from flask import Flask, render_template, redirect, url_for, request
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///newtest.db'
db = SQLAlchemy(app)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
author = db.Column(db.String(50))
class Lang(db.Model):
id = db.Column(db.Integer, primary_key=True)
language = db.Column(db.String(50))
class Level(db.Model):
id = db.Column(db.Integer, primary_key=True)
level = db.Column(db.String(50))
@app.route("/")
def author():
return render_template('author.html')
@app.route("/languages")
def languages():
return render_template('languages.html')
@app.route("/level")
def level():
return render_template('level.html')
@app.route("/main")
def main():
post = User.query.all()
post1 = Lang.query.all()
post2 = Level.query.all()
return render_template('main.html',post=post,post1=post1,post2=post2)
@app.route('/adduser', methods=['POST'])
def adduser():
if request.method == 'POST':
author = request.form['author']
post = User(author=author)
db.session.add(post)
db.session.commit()
return redirect(url_for('languages'))
return render_template('author.html')
@app.route('/addlanguage', methods=['POST','GET'])
def addlanguage():
if request.method == 'POST':
language = request.form['language']
post1 = Lang(language=language)
db.session.add(post1)
db.session.commit()
return redirect(url_for('addlevel'))
return render_template('languages.html')
@app.route('/addlevel', methods=['POST','GET'])
def addlevel():
if request.method == 'POST':
level = request.form['level']
post2 = Level(level=level)
db.session.add(post2)
db.session.commit()
return redirect(url_for('main'))
return render_template('level.html')
@app.route("/resetdb")
def resetdb():
db.drop_all()
db.create_all()
return redirect(url_for('author'))
if __name__ == '__main__':
app.run(debug=True)
|
[
"noreply@github.com"
] |
aleskruba.noreply@github.com
|
55e9754351f6b1e3ca6f60883588906568cc0c46
|
c034c27e96f518515840f9c25fb9f4be22cb9b18
|
/csv_to_ansible_inv.py
|
98bf8458e2ac5f002822ca129869e9755f7d970b
|
[] |
no_license
|
br41thw8/python
|
c9aa5e963f9240353ca254ada79e0317fee179f5
|
2569f7146027893f258786801a7b796ca70e88ba
|
refs/heads/master
| 2022-04-14T17:21:59.707881
| 2020-04-07T20:46:30
| 2020-04-07T20:46:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,630
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 7 10:09:46 2019
@author: gnasses
**Convert CSV list of devices to ansible hostname file**
(Created to create ansible inventory files from a TVM Vulnerability group)
1. Open Vuln Group
2. Right click on Configuration item
3. Select Export, then Excel (large file will have to be emailed)
4. Open Excel file, filter out any servers or unwanted devices, delete all rows and columns except CI names
5. Save as CSV
6. Run script and enter source CSV filename and filename for Ansible inventory
"""
import csv
from netmiko import Netmiko
import util
#util.py located on ns python github
#prompt user for source and destination file
sourcefile = input("Enter the Name of the CSV file to convert to Ansible Inventory: ")
dstfile = input("Enter the Name of the destination Inventory: ")
print ()
print ("-- Working on CSV File -- ")
#open CSV file and generate list
with open(sourcefile, 'r') as f:
reader = csv.reader(f)
devlist = list(reader)
print (devlist)
print ("csv file processed")
#close out the file
f.close()
print ("-- Filtering Duplicates -- ")
#filter out non-unique entries using a new list
devlist1 = []
for dev in devlist:
dev = (str(dev)[2:-2])
# print (dev)
if dev not in devlist1:
devlist1.append(dev)
inv = []
print ("duplicates filtered")
"""
Code below use try/except/finally blocks to be sure to close router connections and revert lists
and also continue the loops in case the script has a problem.
Leverages the Netmiko function, and RO automation user in the utils.py, and the n9kswitch1 router to ping/resolve.
Please adapt to other routers or hosts as appropriate.
"""
print("-- Collecting Device IP Addresses -- ")
try:
n9kswitch1 = util.CiscoDeviceRO(host="cisctc01ipt01")
net_connect = Netmiko(**cisctc01ipt01.__dict__)
for dev in devlist1:
try:
print (dev)
ping1 = net_connect.send_command('ping ' + dev)
if "%" not in ping1:
ip = ping1.splitlines()[1].split()[6][:-1]
else:
ip = ()
print ("couldn't ping or couldn't resolve name!")
if ip:
invline = (dev + " ansible_host=" + ip)
inv.append(invline)
print ("ok")
net_connect.disconnect()
except:
print ("unable to collect device IP")
net_connect.disconnect()
print ("inv generated")
except:
print ("it broke making inventory")
finally:
net_connect.disconnect()
devlist = []
devlist1 = []
print ("addresses collected")
print("-- Connecting to devices to sort by Network OS -- ")
# sort this list into NXOS and non-ios devices
nxos_inv = []
ios_inv = []
unknown_inv = []
"""
Nested Try/Except blocks are used here to keep the loops running if a host has a problem or timeout
to print a messaage to the screen so you could investigate if you were watching as it ran.
"""
try:
for invline in inv:
inv_host = invline.split()[0]
print (inv_host)
try:
device = util.CiscoDeviceRO(host=inv_host)
net_connect = Netmiko(**device.__dict__)
ver = net_connect.send_command('show version')
except:
print ("could not connect to device or run show version")
try:
if "NX-OS" in ver:
nxos_inv.append(invline)
print ("nxos device")
elif "IOS" in ver:
ios_inv.append(invline)
print ("ios device")
elif "ios" in ver:
ios_inv.append(invline)
print ("ios device")
else:
# sw_ver = "unknown"
unknown_inv.append(invline)
print ("neither nxos, nor ios")
except:
print ("could not sort, device not added to Inventory")
print ("inv sorted by OS")
except:
print ("it broke checking Network OS")
finally:
net_connect.disconnect()
# write line to output file and reset vars
outF = open(dstfile, 'w')
outF.write("[NXOS]")
outF.write("\n")
for item in nxos_inv:
outF.write(item)
outF.write("\n")
outF.write("\n")
outF.write("[IOS]")
outF.write("\n")
for item in ios_inv:
outF.write(item)
outF.write("\n")
outF.write("\n")
outF.write("[UNKNOWN]")
outF.write("\n")
for item in unknown_inv:
outF.write(item)
outF.write("\n")
outF.write("\n")
outF.close()
print (" ** Inventory file created successfully ** ")
inv = []
nxos_inv = []
ios_inv = []
unknown_inv = []
|
[
"noreply@github.com"
] |
br41thw8.noreply@github.com
|
aa2b84595a402e7291b27e7f04961c7dbf738303
|
edd1b4165abf73d44d03d20cbd554368fe23dd82
|
/task5.py
|
893eab583138cb372d80b241dfb81c5c172f1f82
|
[] |
no_license
|
DimonYarkin/repogeet
|
6a570b8a36c0bb9fb0e7d6e92620e5b5bce54264
|
722a6d7244ec995d45b21cf7eb645d4270aa29a7
|
refs/heads/master
| 2022-12-16T07:14:05.686392
| 2020-09-08T21:56:00
| 2020-09-08T21:56:00
| 293,505,827
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 664
|
py
|
profit = float(input("Введите выручку фирмы "))
costs = float(input("Введите издержки фирмы "))
if profit > costs:
print(f"Фирма работает с прибылью. Рентабельность выручки составила {profit / costs:.2f}")
workers = int(input("Введите количество сотрудников фирмы "))
print(f"Прибыль в расчете на одного сторудника сотавила {profit / workers:.2f}")
elif profit == costs:
print("Фирма работает в ноль")
else:
print("Фирма работает в убыток")
|
[
"noreply@github.com"
] |
DimonYarkin.noreply@github.com
|
c6336d5c2f37c0f888125e21d3747fec799f9684
|
69c477f674592e3dd87cd10f32fc0893d20db030
|
/models/distill.py
|
3a66df629791011e576ef712628f199f9cd10c42
|
[] |
no_license
|
vietnamican/Distill
|
76a59b4a4a5a358ec08b74722257f4f5ee902aa6
|
97c52b5e614c5bee78e5a282e732d4b93078cc11
|
refs/heads/main
| 2023-05-04T12:37:35.016435
| 2021-05-23T08:43:04
| 2021-05-23T08:43:04
| 369,752,339
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,089
|
py
|
import torch
from torch import optim
from .base import Base
from .get_intermediate_layer import GetIntermediateLayer
class Distill(Base):
def __init__(self, teacher, student, teacher_layer_list, student_layer_list, connectors, config, is_kd=False):
super().__init__()
self.teacher = teacher
self.student = student
self.teacher_layer_list = teacher_layer_list
self.student_layer_list = student_layer_list
self.connectors = connectors
self.is_kd = is_kd
self.teacher_features_factory = GetIntermediateLayer()
self.student_features_factory = GetIntermediateLayer()
self.teacher_features_factory.register_forward_hook(
teacher, teacher_layer_list)
self.student_features_factory.register_forward_hook(
student, student_layer_list)
self.config = config
self.freeze_with_prefix('teacher')
def forward(self, x):
with torch.no_grad():
self.teacher(x)
self.student(x)
return self.teacher_features_factory.profile, self.student_features_factory.profile
def training_step(self, batch, batch_idx):
x, y = batch
teacher_profile, student_profile = self.forward(x)
train_loss = 0
for teacher_layer, student_layer, connector in zip(self.teacher_layer_list, self.student_layer_list, self.connectors):
shared_input = teacher_profile[teacher_layer]['input']
teacher_module = teacher_profile[teacher_layer]['module']
student_module = student_profile[student_layer]['module']
teacher_output = teacher_module(shared_input)
student_output = student_module(shared_input)
loss = connector(teacher_output, student_output)
self.log("train_{}".format(teacher_layer), loss)
train_loss += loss
return train_loss
def validation_step(self, batch, batch_idx):
x, y = batch
teacher_profile, student_profile = self.forward(x)
val_Loss = 0
for teacher_layer, student_layer, connector in zip(self.teacher_layer_list, self.student_layer_list, self.connectors):
shared_input = teacher_profile[teacher_layer]['input']
teacher_module = teacher_profile[teacher_layer]['module']
student_module = student_profile[student_layer]['module']
teacher_output = teacher_module(shared_input)
student_output = student_module(shared_input)
loss = connector(teacher_output, student_output)
self.log("val_{}".format(teacher_layer), loss)
val_Loss += loss
return val_Loss
def configure_optimizers(self):
max_epochs = self.config.max_epochs
steps = [step*max_epochs for step in self.config.steps]
optimizer = optim.SGD(self.student.parameters(),
lr=0.01, momentum=0.9, weight_decay=5e-4)
lr_scheduler = optim.lr_scheduler.MultiStepLR(
optimizer, steps, gamma=0.1)
return [optimizer], [lr_scheduler]
|
[
"vietnamican@gmail.com"
] |
vietnamican@gmail.com
|
92aaab5544643db4ea478ec9dddcefaeb0d0de21
|
79ea42a1d2dc6260e1403785fa716b96c109c65e
|
/src/abstract_rl/plots/plot.py
|
269dddeadb8bf731a818666fb9e3a12182289742
|
[
"MIT"
] |
permissive
|
kosmitive/abstract_rl
|
b468391120c523e4efb23b8ba4b1ff7b8c745c88
|
13038a1a5a93c78374ba869c9e75221c2b73d290
|
refs/heads/main
| 2023-05-03T02:04:24.263600
| 2021-05-25T21:12:34
| 2021-05-25T21:12:34
| 370,822,105
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 848
|
py
|
import matplotlib.pyplot as plt
class Plot:
"""This class can be used to give a class access to subplots managed
by itself."""
def __init__(self, title, grid_shape):
"""Constructs a new LayoutManagedPlot. Creates subplots according to grid_shapes.
"""
# common settings
self.title = title
if grid_shape is None:
self.fig = plt.figure()
self.axes = plt.gca()
else:
self.fig, self.axes = plt.subplots(*grid_shape)
self.fig.suptitle(title)
# build up grid
def get_axes(self):
"""Get all axes"""
return self.axes
def show(self):
"""This method stops the interactive mode if activated
and shows the plot."""
plt.show()
def save(self, filename):
self.fig.savefig(filename)
|
[
"m.semmler.94@googlemail.com"
] |
m.semmler.94@googlemail.com
|
ec6bf10464f97711aeddc9958f1f2efcb2be231f
|
bec8f235b1392542560166dd02c2f0d88c949a24
|
/examples/twisted/websocket/broadcast/server.py
|
ff8c3171874c94b755038cbd24663fb9d944b3c8
|
[
"Apache-2.0"
] |
permissive
|
gourneau/AutobahnPython
|
f740f69b9ecbc305a97a5412ba3bb136a4bdec69
|
5193e799179c2bfc3b3f8dda86ccba69646c7ee3
|
refs/heads/master
| 2021-01-15T22:02:32.459491
| 2014-07-02T13:34:57
| 2014-07-02T13:34:57
| 21,437,288
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,691
|
py
|
###############################################################################
##
## Copyright (C) 2011-2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import File
from autobahn.twisted.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
class BroadcastServerProtocol(WebSocketServerProtocol):
def onOpen(self):
self.factory.register(self)
def onMessage(self, payload, isBinary):
if not isBinary:
msg = "{} from {}".format(payload.decode('utf8'), self.peer)
self.factory.broadcast(msg)
def connectionLost(self, reason):
WebSocketServerProtocol.connectionLost(self, reason)
self.factory.unregister(self)
class BroadcastServerFactory(WebSocketServerFactory):
"""
Simple broadcast server broadcasting any message it receives to all
currently connected clients.
"""
def __init__(self, url, debug = False, debugCodePaths = False):
WebSocketServerFactory.__init__(self, url, debug = debug, debugCodePaths = debugCodePaths)
self.clients = []
self.tickcount = 0
self.tick()
def tick(self):
self.tickcount += 1
self.broadcast("tick %d from server" % self.tickcount)
reactor.callLater(1, self.tick)
def register(self, client):
if not client in self.clients:
print("registered client {}".format(client.peer))
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
print("unregistered client {}".format(client.peer))
self.clients.remove(client)
def broadcast(self, msg):
print("broadcasting message '{}' ..".format(msg))
for c in self.clients:
c.sendMessage(msg.encode('utf8'))
print("message sent to {}".format(c.peer))
class BroadcastPreparedServerFactory(BroadcastServerFactory):
"""
Functionally same as above, but optimized broadcast using
prepareMessage and sendPreparedMessage.
"""
def broadcast(self, msg):
print("broadcasting prepared message '{}' ..".format(msg))
preparedMsg = self.prepareMessage(msg)
for c in self.clients:
c.sendPreparedMessage(preparedMsg)
print("prepared message sent to {}".format(c.peer))
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
ServerFactory = BroadcastServerFactory
#ServerFactory = BroadcastPreparedServerFactory
factory = ServerFactory("ws://localhost:9000",
debug = debug,
debugCodePaths = debug)
factory.protocol = BroadcastServerProtocol
factory.setProtocolOptions(allowHixie76 = True)
listenWS(factory)
webdir = File(".")
web = Site(webdir)
reactor.listenTCP(8080, web)
reactor.run()
|
[
"tobias.oberstein@tavendo.de"
] |
tobias.oberstein@tavendo.de
|
79c66d925c7a9c8dcbea9a1f8296b224b8821906
|
c831ed60548ec75cae2110f3f139dac57984f749
|
/examples/nn_potentials/cu_training.py
|
8d59bca1d0b72ab8617f4319b143d1c5fae344d8
|
[] |
no_license
|
eligardella/ml4chem
|
ab83f76e04bc623fe14dbd43fd5bf97b7bd3a6b3
|
09cd235271147655bd1a6150ea6c64b54add19a0
|
refs/heads/master
| 2020-06-20T20:27:07.352841
| 2019-07-29T20:56:19
| 2019-07-29T20:56:19
| 197,237,913
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,326
|
py
|
import sys
from ase.io import Trajectory
from dask.distributed import Client, LocalCluster
sys.path.append("../../")
from ml4chem import Potentials
from ml4chem.fingerprints import Gaussian
from ml4chem.models.neuralnetwork import NeuralNetwork
from ml4chem.utils import logger
def train():
# Load the images with ASE
images = Trajectory("cu_training.traj")
# Arguments for fingerprinting the images
normalized = True
# Arguments for building the model
n = 10
activation = "relu"
# Arguments for training the potential
convergence = {"energy": 5e-3}
epochs = 100
lr = 1.0e-2
weight_decay = 0.0
regularization = 0.0
calc = Potentials(
fingerprints=Gaussian(
cutoff=6.5, normalized=normalized, save_preprocessor="model.scaler"
),
model=NeuralNetwork(hiddenlayers=(n, n), activation=activation),
label="cu_training",
)
optimizer = ("adam", {"lr": lr, "weight_decay": weight_decay})
calc.train(
training_set=images,
epochs=epochs,
regularization=regularization,
convergence=convergence,
optimizer=optimizer,
)
if __name__ == "__main__":
logger(filename="cu_training.log")
cluster = LocalCluster()
client = Client(cluster, asyncronous=True)
train()
|
[
"muammarelkhatib@gmail.com"
] |
muammarelkhatib@gmail.com
|
bc62c8f31f6e6ee722223582fb287117a6e3584d
|
3d8a2251504164a9a7ffa6484a2ed7933ea3c590
|
/practice_dictionaries_set1.py
|
983ec5dbe24596036253b95cf833136252cb4ccf
|
[] |
no_license
|
kjergens/NextGen
|
6b05187b10a867827c18c9a7d5a78a8cd6ab2bf2
|
b98d2ed8288713a7f522608e56b19816a7cf0f5e
|
refs/heads/master
| 2020-03-23T08:19:43.333176
| 2018-07-19T16:45:44
| 2018-07-19T16:45:44
| 141,320,376
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,193
|
py
|
# Set 1
# 1
# residents = {'Puffin': 104, 'Sloth': 105, 'Burmese Python': 106}
# print(residents['Sloth'])
# 2
# menu = {}
# menu['Sunday'] = 16.78
# print(menu['Sunday'])
#
# menu['Monday'] = 3
# menu['Tuesday'] = 0
#
# # BONUS: print each value on its own line
# for key in menu:
# print(key, ":", menu[key])
# 3
# key - animal_name : value - location
# zoo_animals = { 'Unicorn' : 'Cotton Candy House',
# 'Sloth' : 'Rainforest Exhibit',
# 'Bengal Tiger' : 'Jungle House',
# 'Atlantic Puffin' : 'Arctic Exhibit',
# 'Rockhopper Penguin' : 'Arctic Exhibit'}
#
# del zoo_animals['Sloth'] # kill the sloth
# del zoo_animals['Bengal Tiger'] # kill the tiger
#
# zoo_animals['Rockhopper Penguin'] = 'Children Zoo Area'
#
# for k in zoo_animals:
# print(k, ":", zoo_animals[k])
# 4
# webster = {
# "Aardvark" : "A star of a popular children's cartoon show.",
# "Baa" : "The sound a goat makes.",
# "Carpet": "Goes on the floor.",
# "Dab": "A small amount."
# }
#
# for k in webster:
# print(k, ":", webster[k])
#5
my_dict={'title':'Stranger Things', 'is_show':True, 'seasons':2}
print(my_dict.items())
for k, v in my_dict.items():
print(k, ":", v)
|
[
"katie.jergens@gmail.com"
] |
katie.jergens@gmail.com
|
a8beaefc2f3e2bc33229e58d142280504ebae0cd
|
a80d16f9a47db055f6c22bf1e8d8cfd10a84e56c
|
/game_v2/card/base.py
|
37765faea10836e4a92143afd7419070fece6d74
|
[] |
no_license
|
JanzenLiu/uno-dev
|
70ce53b5067cf4b444aafa2d358f106b4108b58a
|
9e39cf8232f43d038feeb3ffa6c239e5db52fb5d
|
refs/heads/master
| 2020-03-28T22:09:40.470068
| 2019-09-11T18:57:58
| 2019-09-11T18:57:58
| 149,209,225
| 3
| 0
| null | 2019-03-23T05:14:27
| 2018-09-18T01:13:35
|
Python
|
UTF-8
|
Python
| false
| false
| 3,862
|
py
|
from enum import Enum, unique
from colorama import init
from colorama import Fore, Back, Style
init()
colormap = {
"WILD": Fore.BLACK + Back.WHITE,
"RED": Fore.LIGHTRED_EX,
"GREEN": Fore.LIGHTGREEN_EX,
"BLUE": Fore.LIGHTBLUE_EX,
"YELLOW": Fore.LIGHTYELLOW_EX
}
@unique
class CardColor(Enum):
WILD = 0 # just a conceptual type
RED = 1
GREEN = 2
BLUE = 3
YELLOW = 4
def __call__(self, string):
return "{}{}{}".format(colormap[self.name], string, Style.RESET_ALL)
@staticmethod
def option_set(ignore_wild=True):
options = set([option.value for option in CardColor])
if ignore_wild:
options.remove(CardColor.WILD)
return options
@staticmethod
def option_string(ignore_wild=True):
if ignore_wild:
return "\n".join(["{}) {}".format(option.value, option(option.name))
for option in CardColor if option != CardColor.WILD])
else:
return "\n".join(["{}) {}".format(option.value, option(option.name))
for option in CardColor])
@unique
class CardType(Enum):
ABSTRACT = -1 # just a conceptual type
NUMBER = 0
REVERSE = 1
SKIP = 2
WILDCARD = 3
DRAW_2 = 4
DRAW_4 = 5
class Card(object):
def __init__(self, card_type, color, score):
assert isinstance(card_type, CardType)
assert isinstance(color, CardColor)
assert isinstance(score, int)
self.card_type = card_type
self.color = color
self.score = score
self.short_name = None # to be overriden
def __repr__(self):
return self.color("{}({})".format(type(self).__name__, self.format_attribute()))
def __str__(self):
return self.color("{}({})".format(type(self).__name__, self.format_attribute()))
def format_attribute(self):
pass
def check_playable(self, current_color, current_value, current_type, current_to_draw):
assert isinstance(current_color, CardColor) and current_color != CardColor.WILD
assert isinstance(current_value, int) and -1 <= current_value <= 9
assert isinstance(current_type, CardType) and current_type != CardType.ABSTRACT
assert isinstance(current_to_draw, int) and current_to_draw >= 0
return self._check_playable(current_color, current_value, current_type, current_to_draw)
def _check_playable(self, current_color, current_value, current_type, current_to_draw):
return False # to override
# =============
# Type Checkers
# =============
# to override
def is_number(self):
return False
def is_reverse(self):
return False
def is_skip(self):
return False
def is_wildcard(self):
return False
def is_draw2(self):
return False
def is_draw4(self):
return False
def is_action(self):
return False
def is_weak_action(self):
return False
def is_strong_action(self):
return False
def is_draw_action(self):
return False
class ActionCard(Card):
def __init__(self, card_type, color, score):
assert card_type != CardType.NUMBER
super().__init__(card_type, color, score)
def is_action(self):
return True
class WeakActionCard(ActionCard):
def __init__(self, card_type, color):
assert color != CardColor.WILD # might be extended?
super().__init__(card_type, color, 20)
def format_attribute(self):
return self.color.name
def is_weak_action(self):
return True
class StrongActionCard(ActionCard):
def __init__(self, card_type):
super().__init__(card_type, CardColor.WILD, 50)
def format_attribute(self):
return ""
def is_strong_action(self):
return True
|
[
"chengtsung_liu@yahoo.com"
] |
chengtsung_liu@yahoo.com
|
b623a5b5e9c0b4e9acc55503388c50eae27c6f9b
|
360fef062f2bf2accdc9b708f94ad069dadd94d9
|
/Resources/__init__.py
|
41580712b197dd1a18b1ef4792092ffa39da4b6a
|
[] |
no_license
|
UlisesRamirez/Clicker
|
e361af236633cfc581ec2e6d1a146ad0b9ab2704
|
d3c7966f4a992822007f0cb86ab448bd6599037f
|
refs/heads/master
| 2020-09-11T08:36:01.851921
| 2019-12-04T00:48:44
| 2019-12-04T00:48:44
| 222,007,510
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 164
|
py
|
import sys
sys.path.insert(0, 'C:/Users/Ulises/Desktop/Projects/Python/Clicker')
from Resources.MoneyClass import Money
from Resources.UpgradesClass import Upgrade
|
[
"ulisesm.ramirez@gmail.com"
] |
ulisesm.ramirez@gmail.com
|
045e16d809ee51ce633497ad26cf76404a396b2f
|
6927d519f6ac056328580d0e97057d85c694bfa3
|
/VRPTW/src/construction_simple_1.py
|
02acafa024eb82fbf9e92076eb9cc91c6428f14a
|
[] |
no_license
|
petersonzilli/or
|
b83a12c0ad6a1f676ab209550921cb03cc56bfa3
|
06c9a193712a92475e19280acda20c7051bb909a
|
refs/heads/master
| 2021-05-09T10:33:45.058112
| 2018-01-27T11:22:38
| 2018-01-27T11:22:38
| 118,967,803
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,258
|
py
|
# -*- coding: utf-8 -*-
"""
Complete a full solution for VRPMTW given a partial solution
Author: Peterson Katagiri Zilli <peterson.zilli@gmail.com>
Date: 2018-01-25
Important Notes:
- Time Windows are sorted while reading the instance. it is important for simple construction algorithm
"""
from solution import Solution
import random
import sys
class ConstructionSimple1:
"""Instance Class for VRPMTW"""
def __init__(self, instance, solution=None):
self.instance = instance
if not solution:
solution = Solution(self.instance)
self.solution = solution
# preprocessing instance for the algo
for c in self.instance.customers:
c.time_windows = sorted(c.time_windows)
def complete_solution(self, random_seed=0):
"""
Builds a solution loosely based on Solomon (1987) and Russell (1995) sequential construction heuristics
"""
#print('Phase 1: determining seed customers between the not used customers')
random.seed(random_seed)
seed_customers = [c.number for c in self.instance.customers if c.route == None]
random.shuffle(seed_customers)
#print(seed_customers)
#print('Phase 2: inserting each customer to the route which yields the least incurred cost')
"""
The algorithm is simple:
for the next customer of seed_customers - ordered and not already used
for each route:
for each one of the possible positions in the route:
calculate the aditional cost of adding the customer 'c'
if it is better than the best, update the best
perform the best insertion (and update route and solution costs)
remove the best insertion customer from the seed customers
"""
while len(seed_customers) > 0:
# reset best move
best_insertion_cost = sys.float_info.max
best_insertion_customer = None
best_insertion_previous_customer = None
c = seed_customers[0]
tbic = self.instance.customers[c]
for r in self.solution.routes:
previous_customer = r.first_customer
# verify if capacity constraint is ok
if not self.verify_constraint_demand(tbic, r):
continue
while previous_customer.next_customer != None:
pc = previous_customer
nc = pc.next_customer
delta_cost = self.instance.dist(pc, tbic) + self.instance.dist(tbic, nc) - self.instance.dist(pc, nc)
if delta_cost < best_insertion_cost and self.verify_constraint_tw(tbic, pc):
best_insertion_cost = delta_cost
best_insertion_customer = tbic
best_insertion_previous_customer = pc
previous_customer = nc
#print('best_insertion cost: ', best_insertion_cost)
#print('best_insertion customer: ', best_insertion_customer.number)
#print('best_insertion prev customer: ', best_insertion_previous_customer.number)
#print('best_insertion route: ', best_insertion_previous_customer.route)
best_insertion_previous_customer.route.insert(best_insertion_customer, best_insertion_previous_customer)
seed_customers.remove(best_insertion_customer.number)
#self.solution.print_solution()
#input('waiting...')
return self.solution
def verify_constraint_demand(self, tbic, r):
"""Verify demand constraints for the route"""
return self.instance.capacity >= r.total_demand + tbic.demand
def verify_constraint_tw(self, tbic, pc):
""" Verify arival and waiting time from the inserted cutomer to the end"""
# verify tw constraints on tbic
aux_arival = pc.arival + pc.waiting_time + pc.service_time + self.instance.dist(pc, tbic)
active_tw = None
for tw in list(tbic.time_windows):
if aux_arival <= tw[1]:
active_tw = tuple(tw)
break
# if there is no active tw then it is a infeasible insertion
if not active_tw:
return False
aux_waiting_time = max(active_tw[0], aux_arival) - aux_arival
# verify tw constraints on pc.next "[...] and beyond!" - Buzz Lightyer
c = pc.next_customer
pc = tbic
while c:
aux_arival = aux_arival + aux_waiting_time + pc.service_time + self.instance.dist(pc, c)
active_tw = None
for tw in c.time_windows:
if aux_arival <= tw[1]:
active_tw = tuple(tw)
break
# if there is no active tw then it is a infeasible insertion
if not active_tw:
return False
aux_waiting_time = max(active_tw[0], aux_arival) - aux_arival
# update customer to be tested
pc = c
c = pc.next_customer
return True
|
[
"peterson.zilli@gmail.com"
] |
peterson.zilli@gmail.com
|
37c6df28baee31a44b9aaff421245af8c00e3af1
|
1d92b947c6729ddbd43c4c41de4aaa3e76eaa936
|
/pkgs/fc/agent/fc/manage/monitor.py
|
c630426213cf56ac73d5cb9bb37fcfc4252e711c
|
[] |
no_license
|
ckauhaus/fc-nixos
|
2a64742855cadddd6caa10c744211a39b8bcdee2
|
98b204b5d54b7c2c837d0cba4f4d608d97ab88fb
|
refs/heads/fc-19.03-dev
| 2020-12-21T14:54:45.550884
| 2019-03-29T11:15:52
| 2019-03-29T13:19:47
| 236,466,025
| 0
| 0
| null | 2020-01-28T11:33:32
| 2020-01-27T10:27:54
|
Nix
|
UTF-8
|
Python
| false
| false
| 3,810
|
py
|
"""Manages local Sensu monitoring checks"""
import argparse
import datetime
import json
import shlex
import socket
import sys
import urllib.parse
import xmlrpc.client
def get_directory(enc, url):
user = socket.gethostname()
password = enc['parameters']['directory_password']
parts = urllib.parse.urlsplit(url)
if not socket.getdefaulttimeout():
socket.setdefaulttimeout(300)
return xmlrpc.client.ServerProxy('%s://%s:%s@%s%s' % (
parts.scheme, user, password, parts.netloc, parts.path))
def get_sensucheck_configuration(servicechecks):
checks = {}
result = {'checks': checks}
for servicecheck in servicechecks:
name = 'directory.servicecheck.{rg}.{id}'.format(
rg=servicecheck['resource_group'],
id=servicecheck['id'])
url = urllib.parse.urlsplit(servicecheck['url'])
path = '?'.join([p for p in [url.path, url.query] if p])
command = [
'check_http', '-4',
'-H', shlex.quote(url.hostname)]
if url.port:
command.extend(['-p', str(url.port)])
if path:
command.extend(['-u', shlex.quote(path)])
if url.scheme == 'https':
command.append('-S')
if url.username:
auth_pair = ':'.join([url.username, url.password or ''])
command.extend(['-a', auth_pair])
checks[name] = dict(
command=' '.join(command),
interval=120,
subscribers='default',
handler='directory',
type='metric',
check_id=servicecheck['id'],
standalone=True)
return result
def write_checks(directory=None, config_file=None, **kw):
servicechecks = directory.list_servicechecks()
sensu_checks = get_sensucheck_configuration(servicechecks)
try:
with open(config_file, 'r') as f:
old_check_configuration = json.load(f)
except IOError:
old_check_configuration = None
if old_check_configuration != sensu_checks:
with open(config_file, 'w') as f:
json.dump(sensu_checks, f, indent=2, sort_keys=True)
def handle_result(directory=None, enc=None, **kw):
result = json.load(sys.stdin)
check = result['check']
# XXX We should aggregate multiple results into one call.
directory.register_servicecheck_results([
dict(
id=check['check_id'],
result=check['output'],
state=check['status'],
last_check=datetime.datetime.fromtimestamp(
check['executed']).isoformat())])
def main():
parser = argparse.ArgumentParser(description='Flying Circus Monitoring')
parser.add_argument('-E', '--enc', default='/etc/nixos/enc.json',
help='Path to enc.json (default: %(default)s)')
subparsers = parser.add_subparsers(help='sub-command help',
dest='subparser_name')
checks_parser = subparsers.add_parser('configure-checks')
checks_parser.set_defaults(func=write_checks)
checks_parser.add_argument(
'-c', '--config-file',
help='Where to put the check configuration (default: %(default)s)',
default='/etc/local/sensu-client/directory_servicechecks.json')
result_parser = subparsers.add_parser('handle-result')
result_parser.set_defaults(func=handle_result)
args = parser.parse_args()
if not args.subparser_name:
parser.print_usage()
parser.exit("No command given.")
with open(args.enc) as f:
enc = json.load(f)
directory = get_directory(
enc,
'https://directory.fcio.net/v2/api') # RING0
kw = vars(args)
kw.pop('enc', None)
args.func(directory=directory, enc=enc, **kw)
if __name__ == '__main__':
main()
|
[
"kc@flyingcircus.io"
] |
kc@flyingcircus.io
|
9bd24db49a7dd8925fb5158cff5980e5f8e55213
|
cef8e528087eeb0ece37bb908ffea7cf81ade0db
|
/p30.py
|
876efda6403fabfdc9ea032908ae0be32bfb5490
|
[] |
no_license
|
ashrafulemon/python
|
f03ef07c77885bd00e73674e8767796c9bcac058
|
5d257288a52195500cf1f6b5cc1e017dae0fdcc0
|
refs/heads/main
| 2023-01-31T11:08:17.985674
| 2020-12-16T14:59:39
| 2020-12-16T14:59:39
| 322,011,688
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 143
|
py
|
#3.6 calculate hours minutes seconds
ts=11725
h=ts//(3600)
ts=ts%3600
m=ts//60
s=ts%60
print("{} hours ,{} minutes ,{} seconds".format(h,m,s))
|
[
"emon118.bd@gmail.com"
] |
emon118.bd@gmail.com
|
821779bac3d8bc0210df53b1c8ff26ce43477be3
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_044/ch84_2020_04_13_13_56_41_678562.py
|
da10f174f55747b6eb7ed555591ff9a14a30c706
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 312
|
py
|
def inverte_dicionario(dicionario):
dn={}
ls=[]
for n in dicionario.values():
ls.clear()
if n not in dn:
for f,m in dicionario.items():
if n==m:
ls.append(f)
dn[int(n)]=ls
return dn
|
[
"you@example.com"
] |
you@example.com
|
ab3578e2923369a39677d534ff9b10f1b3f6fdb6
|
d8fe7116d778e381dc21abc1e38d6aa798b2a982
|
/ch25-inheritance/human.py
|
5b471d81e77c35a0c7385352c88e2ffe0363b729
|
[] |
no_license
|
eejay73/python-bootcamp
|
f474c9b6e7462d084f98b3d6ab0fc9434443b2e3
|
85160f1197bc18dcd1fdc9c8e4a6f70d20c64a5a
|
refs/heads/master
| 2020-03-11T10:02:38.976973
| 2018-05-13T07:17:31
| 2018-05-13T07:17:31
| 129,930,218
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 775
|
py
|
#!/usr/bin/env python3
"""
Chapter 25 - Inheritance: Exploring properties (@property decorator)
@author Ellery Penas
@version 2018.05.11
"""
class Human:
def __init__(self, first, last, age):
self.first = first
self.last = last
self._age = max(age, 0)
# exposes the age proerty setters and getters interface
# users need only to access the prop without the
@property
def age(self):
return self._age
@age.setter
def age(self, age):
self._age = max(age, 0)
@property
def full_name(self):
return f"{self.first} {self.last}"
def main():
"""main line function"""
john = Human("John", "Doe", 24)
print(john.age)
print(john.full_name)
if __name__ == "__main__":
main()
|
[
"coachejay@gmail.com"
] |
coachejay@gmail.com
|
60bdc23307b368ff56e0beba99cf76d30a0442eb
|
ad6a7e282095e813ae1a854190b0a7536a137ea8
|
/ConsolidateMap.py
|
87721f6ba1305457208a02a660ee662e5e0420cb
|
[] |
no_license
|
MikeQuartic/CompassGIS_DEV
|
80e172ebce326923edcf411b0037f758558c9e67
|
8845c5837f481d5bbd21400ba09d495d8274ee9f
|
refs/heads/master
| 2021-01-10T16:22:17.794184
| 2016-03-23T18:56:51
| 2016-03-23T18:56:51
| 54,585,631
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,100
|
py
|
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# ConsolidateMap.py
# Created on: 2014-07-08
#
# Description: This is to extract the data layers needed from sde and
# place in a localfile.gdb for the CompassGIS Application.
# ---------------------------------------------------------------------------
# Import arcpy module
import os.path, arcpy, logging, smtplib, sys
from datetime import datetime
from arcpy import env
env.overwriteOutput = True
#Set up logger. If you need to debug, set the level=logging.INFO to logging.DEBUG
#(TODO Change the path when moving to another machine)
fileLog = r'E:\Data\PUD_CompassGIS\LocalExtractedFGDBs\Scripts\ConsolidateMap.log'
logging.basicConfig(filename = fileLog, level=logging.DEBUG)
#Header for the log file
logging.info('--------------------------------------------------------------')
logging.info(' ' + str(datetime.now()))
logging.info(' Running ConsolidateMap.py')
logging.info('--------------------------------------------------------------')
# Local variables:
# this is the where the data is coming from. In this case the infrastructure data is coming from SDE and the station data is coming from a query layer on atlasdev
CompassGIS_fgdb_source_mxd = arcpy.GetParameterAsText(0)
##CompassGIS_fgdb_source_mxd = sys.argv[0]
# this is where the data is going to. This extract data will be used by the arcserver service
Extract = arcpy.GetParameterAsText(1)
##Extract = sys.argv[1]
# Process: Consolidate Map
arcpy.env.overwriteOutput = True
try:
arcpy.ConsolidateMap_management(CompassGIS_fgdb_source_mxd, Extract, "CONVERT", "CONVERT_ARCSDE", "DEFAULT", "ALL")
logging.info('ConsolidateMap_management ran successfully')
Subject = 'ConsolidateMap_management ran successfully'
EmailText = 'ConsolidateMap_management ran successfully'
except:
logging.error(arcpy.GetMessages())
Subject = 'ERROR with ConsolidateMap_management'
EmailText = 'Check error log at E:\\Data\\PUD_CompassGIS\\LocalExtractedFGDBs\\Scripts \n ERROR: ' + arcpy.GetMessages()
# Setup Email notification
#TODO change: the sender to prod when moved to prod
try:
sender = 'vmgisdev04@sannet.gov'
receivers = ['mike@quarticsolutions.com'] #'chris@quarticsolutions.com','tyler@quarticsolutions.com','drew@quarticsolutions.com', 'rob@quarticsolutions.com', 'timo@quarticsolutions.com']
except Exception as e:
logging.error('ERROR: Email not sent!')
logging.error('ERROR: ' + str(e))
message = """\
From: %s
To: %s
Subject: %s
%s
""" % (sender, ", ".join(receivers), Subject, EmailText)
try:
smtpObj = smtplib.SMTP('smtp-out.sannet.gov')
smtpObj.sendmail(sender,receivers, message)
except Exception as e:
logging.error('Email NOT sent!' + str(e))
logging.info('--------------------------------------------------------------')
logging.info(' ' + str(datetime.now()))
logging.info(' Finished ConsolidateMap.py')
logging.info('--------------------------------------------------------------')
|
[
"mike@quarticsolutions.com"
] |
mike@quarticsolutions.com
|
07587e0f98236cb80d07ba561a4bad48fd023f89
|
9eae72aeb54495b3e04ee51261d5638cbfa4a5fc
|
/policyquote/urls.py
|
d35c207d34ffe0e8b01297d57b023aa81a348433
|
[] |
no_license
|
abinabraham/policyquote_v1
|
18bd6d1fb29985a811a2f28eb57e033f3ffd38f3
|
b29836984fcbe6bb8ed01f1a1da40c6d202c2d7f
|
refs/heads/main
| 2023-05-11T15:09:16.591557
| 2021-06-02T21:22:14
| 2021-06-02T21:22:14
| 373,302,897
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 357
|
py
|
from django.contrib import admin
from django.urls import path, include
#swagger URL
from rest_framework_swagger.views import get_swagger_view
schema_view = get_swagger_view(title='POLICYQUOTE APIs')
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('apps.accounts.urls')),
path('api-auth/', include('rest_framework.urls'))
]
|
[
"abinabrahamcs@gmail.com"
] |
abinabrahamcs@gmail.com
|
4ae121c2acf517b1d7a41b2410cce4ffb2fd7ecc
|
d66b0ff3c41fa48e478469997eff96107656f0f2
|
/test.py
|
6356fac78f2453472b7d1a69eee31e18f89cd019
|
[] |
no_license
|
lazurey/then
|
8c559795795543145dda1fca234e09fbea751a3c
|
1bffae4ee0eb3e15e9ccebdc3ebd1f0f9c785815
|
refs/heads/master
| 2021-01-16T19:32:37.890115
| 2013-01-22T07:29:07
| 2013-01-22T07:29:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,947
|
py
|
#coding:utf-8
from google.appengine.api import xmpp
from google.appengine.ext import webapp
from google.appengine.ext.webapp import xmpp_handlers
from google.appengine.ext.webapp.util import run_wsgi_app
import base64
import urllib,urllib2
from Funcs import Funcs
class XMPPHandler(webapp.RequestHandler):
def post(self):
msg = xmpp.Message(self.request.POST)
b = msg.sender.split('/', 1)
email = b[0]
func = Funcs()
if msg.body[0:5].lower() == '/name':
a = msg.body.split('=', 2)
name = a[1]
pwd = a[2]
func.save(name, pwd, email)
msg.reply("已绑定, 因为目前无法验证用户, 输错自负.")
elif msg.body[0:5].lower() == '/help':
msg.reply("绑定用户请回复: /name=ID=密码\nID和密码以一个'='间隔, 请不要加入其他字符\n目前无法验证用户, 如有错误请重新绑定, 谢谢!")
msg.reply("查看目前绑定用户请回复: /see \n如果长时间未回复, 请重新绑定, 谢谢!")
elif msg.body[0:4].lower() == '/new':
msg.reply("这里是查看新消息\n目前这里是一片空白")
elif msg.body[0:4].lower() == '/see':
res = func.checkUser(email)
msg.reply(res)
else:
msg.reply('然后呢?')
auth = func.getUser(email)
if len(auth) > 1 :
data = msg.body.encode('utf-8')
url = 'http://api.fanfou.com/statuses/update.xml'
headers = {"Content-type":"application/x-www-form-urlencoded", "Accept":"text/xml", "Authorization": auth}
params = urllib.urlencode({"status": data, "source":"then"})
req = urllib2.Request(url, params, headers)
response = urllib2.urlopen(req)
application = webapp.WSGIApplication([('/_ah/xmpp/message/chat/', XMPPHandler)], debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
|
[
"lazureys@gmail.com"
] |
lazureys@gmail.com
|
369c9528c8d546e48f98a4fa3200eca9a649d445
|
2090cda8924fe51f30805460fcdd2e365d532da6
|
/kippo/commands/gcc.py
|
bb6b7f9db58e8720bda4e5b43c9d1ac8fa03f16c
|
[] |
no_license
|
SecPascal/kippo
|
f88b4969748460b85ecfdc4f868c7b2e655f35e2
|
40e654017e641273bb2613c4e780d9b3453f6850
|
refs/heads/master
| 2020-12-24T21:00:42.689081
| 2015-04-17T09:10:28
| 2015-04-17T09:10:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,164
|
py
|
# Copyright (c) 2013 Bas Stottelaar <basstottelaar [AT] gmail [DOT] com>
import time
import re
import getopt
import random
from twisted.internet import reactor
from kippo.core.honeypot import HoneyPotCommand
commands = {}
class command_gcc(HoneyPotCommand):
# Name of program. Under OSX, you might consider i686-apple-darwin11-llvm-gcc-X.X
APP_NAME = "gcc"
# GCC verson, used in help, version and the commandline name gcc-X.X
APP_VERSION = (4, 7, 2)
# Random binary data, which looks awesome. You could change this to whatever you want, but this
# data will be put in the actual file and thus exposed to our hacker when he\she cats the file.
RANDOM_DATA = "\x6a\x00\x48\x89\xe5\x48\x83\xe4\xf0\x48\x8b\x7d\x08\x48\x8d\x75\x10\x89\xfa" \
"\x83\xc2\x01\xc1\xe2\x03\x48\x01\xf2\x48\x89\xd1\xeb\x04\x48\x83\xc1\x08\x48" \
"\x83\x39\x00\x75\xf6\x48\x83\xc1\x08\xe8\x0c\x00\x00\x00\x89\xc7\xe8\xb9\x00" \
"\x00\x00\xf4\x90\x90\x90\x90\x55\x48\x89\xe5\x48\x83\xec\x40\x89\x7d\xfc\x48" \
"\x89\x75\xf0\x48\x8b\x45\xf0\x48\x8b\x00\x48\x83\xf8\x00\x75\x0c\xb8\x00\x00" \
"\x00\x00\x89\xc7\xe8\x8c\x00\x00\x00\x48\x8b\x45\xf0\x48\x8b\x40\x08\x30\xc9" \
"\x48\x89\xc7\x88\xc8\xe8\x7e\x00\x00\x00\x89\xc1\x89\x4d\xdc\x48\x8d\x0d\xd8" \
"\x01\x00\x00\x48\x89\xcf\x48\x89\x4d\xd0\xe8\x72\x00\x00\x00\x8b\x4d\xdc\x30" \
"\xd2\x48\x8d\x3d\xa4\x00\x00\x00\x89\xce\x88\x55\xcf\x48\x89\xc2\x8a\x45\xcf" \
"\xe8\x53\x00\x00\x00\x8b\x45\xdc\x88\x05\xc3\x01\x00\x00\x8b\x45\xdc\xc1\xe8" \
"\x08\x88\x05\xb8\x01\x00\x00\x8b\x45\xdc\xc1\xe8\x10\x88\x05\xad\x01\x00\x00" \
"\x8b\x45\xdc\xc1\xe8\x18\x88\x05\xa2\x01\x00\x00\x48\x8b\x45\xd0\x48\x89\x45" \
"\xe0\x48\x8b\x45\xe0\xff\xd0\x8b\x45\xec\x48\x83\xc4\x40\x5d\xc3\xff\x25\x3e" \
"\x01\x00\x00\xff\x25\x40\x01\x00\x00\xff\x25\x42\x01\x00\x00\xff\x25\x44\x01" \
"\x00\x00\x4c\x8d\x1d\x1d\x01\x00\x00\x41\x53\xff\x25\x0d\x01\x00\x00\x90\x68" \
"\x00\x00\x00\x00\xe9\xe6\xff\xff\xff\x68\x0c\x00\x00\x00\xe9\xdc\xff\xff\xff" \
"\x68\x1d\x00\x00\x00\xe9\xd2\xff\xff\xff\x68\x2b\x00\x00\x00\xe9\xc8\xff\xff" \
"\xff\x01\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x1c\x00\x00\x00\x00\x00" \
"\x00\x00\x1c\x00\x00\x00\x02\x00\x00\x00\x00\x0e\x00\x00\x34\x00\x00\x00\x34" \
"\x00\x00\x00\xf5\x0e\x00\x00\x00\x00\x00\x00\x34\x00\x00\x00\x03\x00\x00\x00" \
"\x0c\x00\x02\x00\x14\x00\x02\x00\x00\x00\x00\x01\x40\x00\x00\x00\x00\x00\x00" \
"\x01\x00\x00\x00"
def start(self):
"""
Parse as much as possible from a GCC syntax and generate the output
that is requested. The file that is generated can be read (and will)
output garbage from an actual file, but when executed, it will generate
a segmentation fault.
The input files are expected to exists, but can be empty.
Verified syntaxes, including non-existing files:
* gcc test.c
* gcc test.c -o program
* gcc test1.c test2.c
* gcc test1.c test2.c -o program
* gcc test.c -o program -lm
* gcc -g test.c -o program -lm
* gcc test.c -DF_CPU=16000000 -I../etc -o program
* gcc test.c -O2 -o optimized_program
* gcc test.c -Wstrict-overflow=n -o overflowable_program
Others:
* gcc
* gcc -h
* gcc -v
* gcc --help
* gcc --version
"""
output_file = None
input_files = 0
complete = True
# Parse options or display no files
try:
opts, args = getopt.gnu_getopt(self.args, 'ESchvgo:x:l:I:W:D:X:O:', ['help', 'version', 'param'])
except getopt.GetoptError as err:
self.no_files()
return
# Parse options
for o, a in opts:
if o in ("-v"):
self.version(short=False)
return
elif o in ("--version"):
self.version(short=True)
return
elif o in ("-h"):
self.arg_missing("-h")
return
elif o in ("--help"):
self.help()
return
elif o in ("-o"):
if len(a) == 0:
self.arg_missing("-o")
else:
output_file = a
# Check for *.c or *.cpp files
for value in args:
if '.c' in value.lower():
sourcefile = self.fs.resolve_path(value, self.honeypot.cwd)
if self.fs.exists(sourcefile):
input_files = input_files + 1
else:
self.writeln("%s: %s: No such file or directory" % (command_gcc.APP_NAME, value))
complete = False
# To generate, or not
if input_files > 0 and complete:
func = lambda: self.generate_file(output_file if output_file else 'a.out')
timeout = 0.1 + random.random()
# Schedule call to make it more time consuming and real
self.scheduled = reactor.callLater(timeout, func)
else:
self.no_files()
def ctrl_c(self):
""" Make sure the scheduled call will be canceled """
if getattr(self, 'scheduled', False):
self.scheduled.cancel()
def no_files(self):
""" Notify user there are no input files, and exit """
self.writeln( """gcc: fatal error: no input files
compilation terminated.""" )
self.exit()
def version(self, short):
""" Print long or short version, and exit """
# Generate version number
version = '.'.join([ str(v) for v in command_gcc.APP_VERSION[:3] ])
version_short = '.'.join([ str(v) for v in command_gcc.APP_VERSION[:2] ])
if short:
data = ( """%s (Debian %s-8) %s
Copyright (C) 2010 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.""", (command_gcc.APP_NAME, version, version) )
else:
data = ( """Using built-in specs.
COLLECT_GCC=gcc
COLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/4.7/lto-wrapper
Target: x86_64-linux-gnu
Configured with: ../src/configure -v --with-pkgversion=\'Debian %s-5\' --with-bugurl=file:///usr/share/doc/gcc-%s/README.Bugs --enable-languages=c,c++,fortran,objc,obj-c++ --prefix=/usr --program-suffix=-%s --enable-shared --enable-multiarch --enable-linker-build-id --with-system-zlib --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --with-gxx-include-dir=/usr/include/c++/%s --libdir=/usr/lib --enable-nls --enable-clocale=gnu --enable-libstdcxx-debug --enable-objc-gc --with-arch-32=i586 --with-tune=generic --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu
Thread model: posix
gcc version %s (Debian %s-5)""" % (version, version_short, version_short, version_short, version, version))
# Write
self.writeln(data)
self.exit()
def generate_file(self, outfile):
data = ""
# TODO: make sure it is written to temp file, not downloads
safeoutfile = '%s/%s_%s' % \
(self.honeypot.env.cfg.get('honeypot', 'download_path'),
time.strftime('%Y%m%d%H%M%S'),
re.sub('[^A-Za-z0-9]', '_', outfile))
# Data contains random garbage from an actual file, so when
# catting the file, you'll see some 'real' compiled data
for i in range(random.randint(3, 15)):
if random.randint(1, 3) == 1:
data = data + command_gcc.RANDOM_DATA[::-1]
else:
data = data + command_gcc.RANDOM_DATA
# Write random data
with open(safeoutfile, 'wb') as f: f.write(data)
# Output file
outfile = self.fs.resolve_path(outfile, self.honeypot.cwd)
# Create file for the honeypot
self.fs.mkfile(outfile, 0, 0, len(data), 33188)
self.fs.update_realfile(self.fs.getfile(outfile), safeoutfile)
# Segfault command
class segfault_command(HoneyPotCommand):
def call(self):
self.write("Segmentation fault\n")
# Trick the 'new compiled file' as an segfault
self.honeypot.commands[outfile] = segfault_command
# Done
self.exit()
def arg_missing(self, arg):
""" Print missing argument message, and exit """
self.writeln("%s: argument to '%s' is missing" % (command_gcc.APP_NAME, arg))
self.exit()
def help(self):
""" Print help info, and exit """
version = '.'.join([ str(v) for v in command_gcc.APP_VERSION[:2] ])
self.writeln( """Usage: gcc [options] file...
Options:
-pass-exit-codes Exit with highest error code from a phase
--help Display this information
--target-help Display target specific command line options
--help={common|optimizers|params|target|warnings|[^]{joined|separate|undocumented}}[,...]
Display specific types of command line options
(Use '-v --help' to display command line options of sub-processes)
--version Display compiler version information
-dumpspecs Display all of the built in spec strings
-dumpversion Display the version of the compiler
-dumpmachine Display the compiler's target processor
-print-search-dirs Display the directories in the compiler's search path
-print-libgcc-file-name Display the name of the compiler's companion library
-print-file-name=<lib> Display the full path to library <lib>
-print-prog-name=<prog> Display the full path to compiler component <prog>
-print-multiarch Display the target's normalized GNU triplet, used as
a component in the library path
-print-multi-directory Display the root directory for versions of libgcc
-print-multi-lib Display the mapping between command line options and
multiple library search directories
-print-multi-os-directory Display the relative path to OS libraries
-print-sysroot Display the target libraries directory
-print-sysroot-headers-suffix Display the sysroot suffix used to find headers
-Wa,<options> Pass comma-separated <options> on to the assembler
-Wp,<options> Pass comma-separated <options> on to the preprocessor
-Wl,<options> Pass comma-separated <options> on to the linker
-Xassembler <arg> Pass <arg> on to the assembler
-Xpreprocessor <arg> Pass <arg> on to the preprocessor
-Xlinker <arg> Pass <arg> on to the linker
-save-temps Do not delete intermediate files
-save-temps=<arg> Do not delete intermediate files
-no-canonical-prefixes Do not canonicalize paths when building relative
prefixes to other gcc components
-pipe Use pipes rather than intermediate files
-time Time the execution of each subprocess
-specs=<file> Override built-in specs with the contents of <file>
-std=<standard> Assume that the input sources are for <standard>
--sysroot=<directory> Use <directory> as the root directory for headers
and libraries
-B <directory> Add <directory> to the compiler's search paths
-v Display the programs invoked by the compiler
-### Like -v but options quoted and commands not executed
-E Preprocess only; do not compile, assemble or link
-S Compile only; do not assemble or link
-c Compile and assemble, but do not link
-o <file> Place the output into <file>
-pie Create a position independent executable
-shared Create a shared library
-x <language> Specify the language of the following input files
Permissible languages include: c c++ assembler none
'none' means revert to the default behavior of
guessing the language based on the file's extension
Options starting with -g, -f, -m, -O, -W, or --param are automatically
passed on to the various sub-processes invoked by gcc. In order to pass
other options on to these processes the -W<letter> options must be used.
For bug reporting instructions, please see:
<file:///usr/share/doc/gcc-4.7/README.Bugs>.""")
self.exit()
# Definitions
commands['/usr/bin/gcc'] = command_gcc
commands['/usr/bin/gcc-%s' % ('.'.join([ str(v) for v in command_gcc.APP_VERSION[:2] ]))] = command_gcc
|
[
"michel@oosterhof.net"
] |
michel@oosterhof.net
|
29e05b23a191b02695c803250f347ec537a63b62
|
b79a4b534484ebaac5a850609d43130c95fde18d
|
/r3.py
|
98853a30787b4ad8b9c24bf330cb3197d21415ce
|
[] |
no_license
|
howardgogo/chat
|
d9418da74e3311b4f0424e9bb962391dfd9f77bc
|
50baa4cf22f4f314f531d957c3e6d9d7ea302d89
|
refs/heads/master
| 2022-11-24T18:39:44.494288
| 2020-08-04T10:01:12
| 2020-08-04T10:01:12
| 282,804,624
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 205
|
py
|
lines = []
with open('3.txt', 'r', encoding = 'utf-8-sig') as f:
for line in f:
lines.append(line.strip())
for line in lines:
s = line.split(' ')
time = s[0][:5]
name = s[0][5:]
print(time, name)
|
[
"howard104023060@icloud.com"
] |
howard104023060@icloud.com
|
340940bfba998d55ccd22765aa38f5b6c5ac0912
|
763c50a9f4f996d42c4b1078b9cc7c369fdb8584
|
/radio-box.py
|
ddf6964449a2d880f20d4b51d1635111618d1a60
|
[] |
no_license
|
clementthomas/radio-box
|
68ad0ff2bd1163512f786c772eea03d6269c9dbb
|
7c38e542052aaa7c6a48b192930cd3eed0a70f4f
|
refs/heads/master
| 2020-05-07T19:47:37.326804
| 2019-06-12T14:40:14
| 2019-06-12T14:40:14
| 180,828,647
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,382
|
py
|
# -*- coding: utf-8 -*
import vlc
import time, sched
from gpiozero import LED, Button, DigitalOutputDevice
from signal import pause
from datetime import datetime
from grove_rgb_lcd import *
import os,signal,sys
from vlc import EventType
import random
import pdb
import threading
#inputs
button1 = Button(26, bounce_time=0.2)
button2 = Button(13, bounce_time=0.2)
button3 = Button(16, bounce_time=0.2)
button4 = Button(5, bounce_time=0.2)
button5 = Button(6, bounce_time=0.2)
buttonOnOff = Button(25, hold_time=5)
#outputs
led = LED(24)
display = DigitalOutputDevice(23)
#funny sentences to say hello and good bye
hellos = ['Coucou !', 'Salut a toi !', 'Toujours en vie ?', 'Comment vas-tu ?', 'Besoin de rien\nEnvie de toi...', 'BlBblblBLbblb !!', 'Pourquoi pas ?', 'Salamalekoum', ':-)']
byes = ['Tchao l\'ami', 'Hasta la vista,\nBaby !', 'Hasta la victoria !', 'A plus \ndans l\'bus !', 'Bisous !']
context = {
'playerList':vlc.MediaListPlayer(),
'stations': [
('Sing Sing','http://stream.sing-sing-bis.org:8000/singsingaac256'),
('Radio Sympa','http://radio2.pro-fhi.net:9095/stream2'),
('mega','http://live.francra.org:8000/Radio-Mega'),
('France Inter','http://direct.franceinter.fr/live/franceinter-midfi.mp3'),
('FIP','http://direct.fipradio.fr/live/fipnantes-midfi.mp3'),
('France culture','http://direct.franceculture.fr/live/franceculture-midfi.mp3'),
('St Fereol','http://live.francra.org:8000/RadioSaintFerreol.m3u'),
('Grenouille','http://live.radiogrenouille.com/live.m3u'),
],
'current_station':0,
'current_lcd_text':'',
'state':'stopped' #stopped, loading, playing
}
def main():
#set volume to vlc
context['playerList'].get_media_player().audio_set_volume(60)
#Set handler when station reached
context['playerList'].get_media_player().event_manager().event_attach(EventType.MediaPlayerPlaying, stationReached)
#set function called when process stopped
signal.signal(signal.SIGTERM, on_exit)
#sequence to say hello
launch()
#set button handlers
buttonOnOff.when_held = shutdown #held after 5 seconds
buttonOnOff.when_pressed = buttonOnOffPressed
button1.when_pressed = buttonPressed
button2.when_pressed = buttonPressed
button3.when_pressed = buttonPressed
button4.when_pressed = buttonPressed
button5.when_pressed = buttonPressed
#wait
pause()
def launch():
'''
simple initializing sequence
'''
display.off()
led.on()
def shutdown():
'''
shutdown raspbian
'''
on_exit()
os.system('sudo halt')
def buttonOnOffPressed():
'''
depending on display state.
If display is on, switch the system off
If display is off, switch the system on.
'''
if display.value:
context.get('playerList').stop()
print_lcd("%s"%(random.choice(byes),))
time.sleep(1)
print_lcd("")
display.off()
led.on()
context['state'] = 'stopped'
else:
display.on()
led.off()
print_lcd("%s"%(random.choice(hellos),))
time.sleep(1)
play()
def buttonPressed():
'''
handler when station button pressed
'''
if button1.is_pressed:
play(0)
elif button2.is_pressed:
play(1)
elif button3.is_pressed:
play(2)
elif button4.is_pressed:
play(3)
elif button5.is_pressed:
play(4)
def on_exit(sig=None, func=None):
'''
when process is stopped
'''
print_lcd("Adios !")
time.sleep(1)
print_lcd("")
context.get('playerList').stop()
sys.exit(1)
def print_lcd(text, no_refresh=False):
'''
display on LCD
'''
try:
if no_refresh:
setText_norefresh(text)
else:
setText(text)
context['current_lcd_text'] = text
except OSError as e:
try:
setText(text) #try 2 time
except OSError as e:
print('OS Error : %s (text = %s)'%(str(e),text))
def play(station_id=-1):
'''
play a station and display it on LCD followed by '...'
'''
context['state'] = 'loading'
playerList = context.get('playerList')
if station_id == -1:
station_id = context.get('current_station')
else:
context['current_station'] = station_id
station = context.get('stations')[station_id]
if playerList.is_playing():
playerList.stop()
print_lcd('%s...'%(station[0],))
if 'm3u' in station[1]:
mediaList = vlc.MediaList([station[1]])
print('MediaPlayerList')
else:
mediaList = vlc.MediaList()
mediaList.add_media(station[1])
print('MediaPlayer')
playerList.set_media_list(mediaList)
playerList.play()
def stationReached(event):
'''
callback of MediaPlayerList when playing
'''
context['state'] = 'playing'
context['title_position'] = 0
station_id = context.get('current_station')
station = context.get('stations')[station_id]
print('Station Reached !')
print_lcd('%s !'%(station[0],))
displayTitle()
def getTitle():
station_id = context.get('current_station')
station = context.get('stations')[station_id]
media = context.get('playerList').get_media_player().get_media()
title = media.get_meta(12)
if not title:
title = ''
return title
def displayTitle():
title = getTitle()
print('len : %s'%(len(title),))
if len(title) <= 16:
print_lcd('\n%s'%(title.ljust(16),), no_refresh=True)
else:
#Update title every 0.5 second
threading.Timer(0.5, updateTitle, (0,)).start()
def updateTitle(position=0, old_title=None):
if context['state'] == 'playing':
title = None
if position == -1:
print_lcd('\n ', no_refresh=True)
elif position > -1:
title = getTitle()
title_mini = title[position:position+16]
#if title change or scroll at the end of title : reset position
if (old_title and old_title != title) or \
(position + 16 == len(title)):
position = -3
print_lcd('\n%s'%(title_mini,), no_refresh=True)
threading.Timer(0.5, updateTitle, (position+1,title)).start()
if __name__ == "__main__":
main()
|
[
"clement.thomas@pm.me"
] |
clement.thomas@pm.me
|
ee7437d8197478af5343298db3edd6cec9f47867
|
d1540b8aa3d561210fa8da5caf8efea83e3bd96f
|
/run.py
|
bb3a74a25e32e1a5fbfdc2098459c48d30f3bb27
|
[] |
no_license
|
thanh-pham2k/SECURITY-ASSIGNMENT
|
b483b2691d09aa369587469b22f6071db9924c19
|
8f8d83322eb7a4e0329aa243b350e611b8d66a86
|
refs/heads/main
| 2023-05-12T14:02:42.870139
| 2021-05-31T15:54:01
| 2021-05-31T15:54:01
| 369,404,401
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,671
|
py
|
from UtilityFunctions import *
import argparse
def caesar(text,key,alphabet="ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
log_func("caesar")
text = text.upper()
validalpha(alphabet)
validptext(text,alphabet)
validkeys(key,int)
M = len(alphabet)
T = alphaToNumber(text,alphabet)
# Allow key to be specified by letter
if type(key) == str:
if key in alphabet:
key = alphabet.index(key)
out = []
for i in T:
# Shift the number by the key value
out.append( (i + key) % M )
return "".join(numberToAlpha(out,alphabet))
#-----------------------------------ATTACK-----------------------------------
ngrams4 = open('4gramScores.csv', 'r')
quadgrams = {}
for line in ngrams4:
L = line.split(" ")
quadgrams[L[0]] = int(L[2])
def quadgramScore(text):
score = 0
for i in range(len(text)-1):
if text[i:i+4] not in quadgrams.keys():
score -= 3000
else:
score += quadgrams[text[i:i+4]]
return score
def caesarCracker(ctext):
bestkey = 0
bestdecode = ""
bestscore = float("-inf")
for i in range(0,26):
dtext = caesar(ctext,i)
s = quadgramScore(dtext)
if s > bestscore:
bestkey = i
bestscore = s
bestdecode = dtext
return bestkey,bestdecode
def run_cesar():
# we check with 3 testcases
# short length
# medium length
# long length
parser = argparse.ArgumentParser()
parser.add_argument('algo_name',type=str,help="algorithm's name")
parser.add_argument('--argument',default=None,help=
'''
python run.py caesar ptext_1.txt 17 --decode=True
''')
parser.add_argument("file_raw_text", type=str, help="file_name")
parser.add_argument("key_number", type=int, help="key_number")
parser.add_argument("--decode", type=str, help="True - False",default=False)
args = parser.parse_args()
if parser.parse_args().algo_name == 'caesar':
ptext = readfile(str(args.file_raw_text))
key = args.key_number
ctext = caesar(ptext,key)
file_name_dest = "result_" + args.file_raw_text
if (args.decode=="True"):
bestKey,bestDecode = caesarCracker(ctext)
writefile(file_name_dest ,str(bestKey))
writefile(file_name_dest,"\n"+bestDecode)
else:
writefile(file_name_dest,ctext)
def main():
run_cesar()
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
thanh-pham2k.noreply@github.com
|
ab5c2e1a5ffa93ac8af705b7899c622e9f953efe
|
a8593c0e3904c216d56d9c756518981538160a47
|
/day18.py
|
0ecb188c4ab90a96dac7b24b9016652c1c0f2560
|
[
"MIT"
] |
permissive
|
JnxF/advent-of-code-2020
|
cec423f14d18512d88a7da74f65ab3c7ab1bb07c
|
3a746d094e9ba292dd8e765ceb4be4a0ad46bb37
|
refs/heads/main
| 2023-02-03T18:54:14.985656
| 2020-12-25T13:36:21
| 2020-12-25T13:36:21
| 317,524,664
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,031
|
py
|
from aocd.models import Puzzle
from ast import parse, Constant, BinOp, Expression, Add
input: str = Puzzle(day=18, year=2020).input_data.splitlines()
def reverseEvaluator(exp):
if type(exp) == Constant:
return exp.value
elif type(exp) == Expression:
return reverseEvaluator(exp.body)
elif type(exp) == BinOp:
left = reverseEvaluator(exp.left)
right = reverseEvaluator(exp.right)
return left * right if type(exp.op) == Add else left + right
def simpleEvaluator(exp):
exp = exp.replace("+", "PLUS").replace("*", "TIMES")
exp = exp.replace("PLUS", "-").replace("TIMES", "+")
return reverseEvaluator(parse(exp, mode="eval"))
def part1():
return sum([simpleEvaluator(exp) for exp in input])
def advancedEvaluator(exp):
exp = exp.replace("+", "PLUS").replace("*", "TIMES")
exp = exp.replace("PLUS", "*").replace("TIMES", "+")
return reverseEvaluator(parse(exp, mode="eval"))
def part2():
return sum([advancedEvaluator(exp) for exp in input])
|
[
"frml@itu.dk"
] |
frml@itu.dk
|
ebd0332c76081d1ab9a2107cb20a7a76989d99bd
|
d67edf67e9ec5853b1887308c741a71b8ef6e196
|
/strategies.py
|
a3ad242243426997356ee02aa2c89e35b856e7f5
|
[] |
no_license
|
mayagbarnes/big-blind-defense
|
bf6184ce155a754562be0e2c4f9541dcb3ab9e3b
|
ce0e14e1752cbfbbb02fa78f35086ded8d67399a
|
refs/heads/master
| 2023-05-19T11:45:39.315443
| 2021-05-27T21:55:44
| 2021-05-27T21:55:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,787
|
py
|
from enum import Enum
import random
import numpy as np
class Action(Enum):
FOLD = 0
CALL = 1
RAISE = 2
class GameState(Enum):
START = 0
VS_OPEN = 1
VS_3BET = 2
VS_4BET = 3
HAND_OVER = 4
class Player(Enum):
HERO = 0
VILLAIN = 1
def _card_to_index(card):
return {
"A": 0,
"K": 1,
"Q": 2,
"J": 3,
"T": 4,
"9": 5,
"8": 6,
"7": 7,
"6": 8,
"5": 9,
"4": 10,
"3": 11,
"2": 12,
}[card]
def next_state(curr_state, action):
if action in [Action.CALL, Action.FOLD]:
return GameState.HAND_OVER
else: # action is Action.RAISE
return {
GameState.START: GameState.VS_OPEN,
GameState.VS_OPEN: GameState.VS_3BET,
GameState.VS_3BET: GameState.VS_4BET,
GameState.VS_4BET: GameState.HAND_OVER,
}[curr_state]
class DecisionPoint:
def __init__(self, range_matrix):
self.range_matrix = range_matrix
def _strategy_for_hand(self, hand):
range_matrix = self.range_matrix
# Hacky way to get cards and suits from a hand given how we format them.
c1, s1, _, c2, s2 = hand
# The convention that we follow in our range matrix is that suited hands
# correspond to entries that order the indices in increasing order, and
# offsuit hands correspond to those ordering the indices in decreasing
# order. For example, AKs is entry (0, 1) in our matrix, and AKo is entry
# (1, 0).
offsuit_hand = s1 != s2
i1, i2 = sorted([_card_to_index(c1), _card_to_index(c2)], reverse=offsuit_hand)
return self.range_matrix[i1][i2]
def is_valid_hand(self, hand):
"""Return whether the given hand is one that can be held at this decision
point if all players follow our approximations of Nash Equilibrium
strategies.
"""
return sum(_strategy_for_hand(hand)) > 0
def is_valid_action(self, hand, action):
raise_freq, call_freq, fold_freq = self._strategy_for_hand(hand)
if action == Action.RAISE:
return raise_freq > 0
elif action == Action.CALL:
return call_freq > 0
return fold_freq > 0
def pick_action(self, hand):
actions = (Action.RAISE, Action.CALL, Action.FOLD)
frequencies = self._strategy_for_hand(hand)
return random.choices(actions, weights=frequencies)
# fmt: off
UTG_vs_BB: {
GameState.START: DecisionPoint([
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
]),
GameState.VS_OPEN: DecisionPoint([
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
]),
GameState.VS_3BET: DecisionPoint([
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
]),
GameState.VS_4BET: DecisionPoint([
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
[ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0) ],
]),
}
# fmt: on
|
[
"vincent.rdonato@gmail.com"
] |
vincent.rdonato@gmail.com
|
7fe816a59ea85a030ffa5705f7bacde34609fcf8
|
be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1
|
/DaVinci_v41r2/tuplemaking/signalmc2016/nostripping/joboptionsup.py
|
68c025c354c801bc56f4392a4754516bb2631537
|
[] |
no_license
|
Sally27/backup_cmtuser_full
|
34782102ed23c6335c48650a6eaa901137355d00
|
8924bebb935b96d438ce85b384cfc132d9af90f6
|
refs/heads/master
| 2020-05-21T09:27:04.370765
| 2018-12-12T14:41:07
| 2018-12-12T14:41:07
| 185,989,173
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,906
|
py
|
myJobName = '16NewMCtagcorrectUp'
myApplication = DaVinci(version="v41r2")
myApplication.optsfile = ['/home/hep/ss4314/cmtuser/DaVinci_v41r2/tuplemaking/signalmc2016/nostripping/B23MuNuSignalSelectionMagUp.py']
#Inputdata
#bk_down = BKQuery("/MC/2012/Beam4000GeV-2012-MagDown-Nu2.5-Pythia6/Sim08h/Digi13/Trig0x409f0045/Reco14c/Stripping20NoPrescalingFlagged/12513070/ALLSTREAMS.DST")
#data = bk_down.getDataset()
bk_up = BKQuery("/MC/2016/Beam6500GeV-2016-MagUp-Nu1.6-25ns-Pythia8/Sim09b/Trig0x6138160F/Reco16/Turbo03/Stripping26NoPrescalingFlagged/12513070/ALLSTREAMS.DST")
data = bk_up.getDataset()
#data.extend(bk_up.getDataset())
#bk_down2 = BKQuery("/MC/2012/Beam4000GeV-2012-MagDown-Nu2.5-Pythia8/Sim08h/Digi13/Trig0x409f0045/Reco14c/Stripping20NoPrescalingFlagged/12513070/ALLSTREAMS.DST")
#data.extend(bk_down2.getDataset())
#bk_up2 = BKQuery("/MC/2012/Beam4000GeV-2012-MagUp-Nu2.5-Pythia8/Sim08h/Digi13/Trig0x409f0045/Reco14c/Stripping20NoPrescalingFlagged/12513070/ALLSTREAMS.DST")
#data.extend(bk_up2.getDataset())
myOutputdata =['*.root']
#TupleFile = "Bplus23munu.root"
#extraOptsString = "DaVinci().DDDBtag = 'dddb-20130929-1';""DaVinci().CondDBtag = 'sim-20130522-1-vc-md100';""DaVinci().TupleFile = '"+TupleFile+"';"
myBackend = Dirac() # for test only!!
mySplitter = SplitByFiles()
mySplitter.filesPerJob = 1
### create and submit job
j = Job (
name = myJobName,
application = myApplication,
splitter = mySplitter,
# postprocessors = myMerger,
outputfiles = myOutputdata,
backend = myBackend,
inputdata = data
# extraopts = extraOptsString
)
j.inputfiles = [ LocalFile("/home/hep/ss4314/cmtuser/DaVinci_v41r2/tuplemaking/signalmc2016/nostripping/weights_110614_Lc_pX.xml")]
#j.inpudata=j.inputdata[:1]
print "Ok, please read the job options for job number %i then submit if it looks ok." % j.id
|
[
"slavomirastefkova@b2pcx39016.desy.de"
] |
slavomirastefkova@b2pcx39016.desy.de
|
de28198edff167e9255f5010f57a2cb06e486a20
|
6a041b116e307f86bada68202546597d8afd8003
|
/adress/migrations/0001_initial.py
|
f7340398afc6d68362167e49fbef9fb2d58b93c2
|
[] |
no_license
|
eilst/simpleDjango
|
abf791278a4f5ffbb74de42bfcd619c22849ac2c
|
123468e5e8f5edf2eaa5b902ce4de510aec39889
|
refs/heads/master
| 2021-07-04T22:08:09.230941
| 2017-09-27T16:02:54
| 2017-09-27T16:02:54
| 105,039,352
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 805
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-27 13:49
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Adress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('adress', models.CharField(max_length=250)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"a01337755@gmail.com"
] |
a01337755@gmail.com
|
071723436dce13e92152d5823f054a4d5da28c33
|
81147b637857afe78aa8ca5cdd25001bcc058512
|
/opencv/MOG Bg.py
|
58f8e80eac826e5e7e6b1f0db85bc85a31ac177b
|
[] |
no_license
|
Shani0045/Machine-Learning
|
ccb0e4a22bc0baaedb92b3374fb42301e4bdbadd
|
9e60f6f45d8d170b256bb924689033e6dcde3fec
|
refs/heads/master
| 2022-04-09T06:01:03.705225
| 2020-02-29T01:11:02
| 2020-02-29T01:11:02
| 197,973,031
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 417
|
py
|
import cv2
cap=cv2.VideoCapture("D:/videos/Kinna_Sona_Full_Video___Marjaavaan___Sidharth_M%2C_Tara_S___Meet_Bros%2CJubin_N%2C_Dhvani_Bhanushali(1080p).mp4")
fgbg=cv2.createBackgroundSubtractorMOG2()
while True:
ret,frame=cap.read()
fgmask=fgbg.apply(frame)
cv2.imshow("original",frame)
cv2.imshow("fg",fgmask)
if cv2.waitKey(1)==13:
break
cap.release()
cv2.destroyAllWindows()
|
[
"noreply@github.com"
] |
Shani0045.noreply@github.com
|
3b898c31353d70e4e746ca2d0f3ec6210e14337d
|
c28c05ffabe58a5ed903534c7d1e6832c155db97
|
/evolution.py
|
f7380ebb322f38a1e90b66a7dc46fbb2f06cfafb
|
[] |
no_license
|
novikmeister/Labs
|
7d4dd4496cf41ec907cd75be0e40993bcfd80493
|
05273db11fe9166b6197391f70dc7b8c69df3486
|
refs/heads/main
| 2023-01-30T04:34:32.421635
| 2020-12-15T19:26:21
| 2020-12-15T19:26:21
| 321,743,582
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 703
|
py
|
import matplotlib.pyplot as plt # TASK 2
import numpy as np
f = open("frames.txt")
frames = []
N = int(sum(1 for i in f))
f.seek(0)
for _ in range(int(N/2)):
x = [float(x) for x in f.readline().split()]
y = [float(y) for y in f.readline().split()]
frames.append(np.array([x, y]))
frames = np.array(frames)
fig, axs = plt.subplots(3, 2, figsize=(15, 12))
for _ in range(0, 3):
axs[_, 0].plot(frames[_, 0], frames[_, 1])
axs[_, 0].title.set_text("Frame "+str(_))
axs[_, 0].grid()
axs[_, 1].plot(frames[_+3, 0], frames[_+3, 1])
axs[_, 1].title.set_text("Frame "+str(_+3))
axs[_, 1].grid()
plt.setp(axs, xlim=(0, 17), ylim=(-10, 12))
plt.savefig("res.png")
plt.show()
|
[
"noreply@github.com"
] |
novikmeister.noreply@github.com
|
220eafb29768411d5c23c12f2657fe453879a4a4
|
f935d9d2f3ce54bbe4242533d1bdb79e33d96b76
|
/rrmpg/utils/metrics.py
|
ea1073d54cda9136ad282679f3545f6edae75d2e
|
[
"MIT"
] |
permissive
|
nduqueg/RRMPG
|
b1f92adfd7e2d0d8eab86b96b20779e6984dbf6d
|
6f3e4d694d97f3f92df5380b5ca88bcc123199e9
|
refs/heads/master
| 2020-04-12T06:11:39.674666
| 2017-11-28T20:42:59
| 2017-11-28T20:42:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,577
|
py
|
# -*- coding: utf-8 -*-a
# This file is part of RRMPG.
#
# RRMPG is free software with the aim to provide a playground for experiments
# with hydrological rainfall-runoff-models while achieving competitive
# performance results.
#
# You should have received a copy of the MIT License along with RRMPG. If not,
# see <https://opensource.org/licenses/MIT>
"""
Implementation of evaluation metrics for e.g. hydrological model simulations.
Implemented functions:
nse: Calculate the Nash-Sutcliffe model efficiency coefficient.
rmse: Calculate the root mean squared error.
mse: Calculate the mean squared error.
"""
import numpy as np
from .array_checks import validate_array_input
def nse(obs, sim):
"""Calculate the Nash-Sutcliffe model efficiency coefficient.
Original Publication:
Nash, J. Eamonn, and Jonh V. Sutcliffe. "River flow forecasting through
conceptual models part I—A discussion of principles." Journal of
hydrology 10.3 (1970): 282-290.
Args:
obs: Array of the observed values
sim: Array of the simulated values
Returns:
The NSE value for the simulation, compared to the observation.
Raises:
ValueError: If the arrays are not of equal size or have non-numeric
values.
TypeError: If the arrays is not a supported datatype.
RuntimeError: If all values in qobs are equal. The NSE is not defined
for this cases.
"""
# Validation check on the input arrays
obs = validate_array_input(obs, np.float64, 'obs')
sim = validate_array_input(sim, np.float64, 'sim')
# denominator of the fraction term
denominator = np.sum((obs-np.mean(obs))**2)
# this would lead to a division by zero error and nse is defined as -inf
if denominator == 0:
msg = ["The Nash-Sutcliffe-Efficiency coefficient is not defined ",
"for the case, that all values in the observations are equal.",
" Maybe you should use the Mean-Squared-Error instead."]
raise RuntimeError("".join(msg))
# numerator of the fraction term
numerator = np.sum((sim-obs)**2)
# calculate the NSE
nse = 1 - numerator/denominator
return nse
def rmse(obs, sim):
"""Calculate the root mean squared error.
Args:
obs: Array of the observed values
sim: Array of the simulated values
Returns:
The RMSE value for the simulation, compared to the observation.
Raises:
ValueError: If the arrays are not of equal size or have non-numeric
values.
TypeError: If the arrays is not a supported datatype.
"""
# Validation check on the input arrays
obs = validate_array_input(obs, np.float64, 'obs')
sim = validate_array_input(sim, np.float64, 'sim')
# Calculate the rmse value
rmse = np.sqrt(np.mean((obs-sim)**2))
return rmse
def mse(obs, sim):
"""Calculate the mean squared error.
Args:
obs: Array of the observed values
sim: Array of the simulated values
Returns:
The MSE value for the simulation, compared to the observation.
Raises:
ValueError: If the arrays are not of equal size or have non-numeric
values.
TypeError: If the arrays is not a supported datatype.
"""
# Validation check on the input arrays
obs = validate_array_input(obs, np.float64, 'obs')
sim = validate_array_input(sim, np.float64, 'sim')
# Calculate the rmse value
mse = np.mean((obs-sim)**2)
return mse
|
[
"f.kratzert@gmail.com"
] |
f.kratzert@gmail.com
|
9cacf59e819a340c50995cbc4a5438b065e24504
|
2a4fb2c75546a8752ab83a7a3808c9f689677d33
|
/2019_12_16/FunkSVD.py
|
270c7f455cef6589274f2b0e09a53e3123fbd341
|
[
"MIT"
] |
permissive
|
qing0991/ML_Practice
|
ae6d55275c98a223a3bbb05442f5557f6290e0a3
|
03dba80b425843ce8d7003a85dbc2a3ad8aae13b
|
refs/heads/master
| 2022-12-06T05:36:03.868371
| 2020-08-23T14:30:10
| 2020-08-23T14:30:10
| 292,324,698
| 1
| 0
|
MIT
| 2020-09-02T15:41:02
| 2020-09-02T15:41:01
| null |
UTF-8
|
Python
| false
| false
| 3,170
|
py
|
import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
from datetime import date,datetime
import logging
import torch
from torch import nn
from torch import optim
from torch.autograd import Variable
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
device
# --------------------
# 下面三种计算loss的方式
# --------------------
def PredictR(P, Q, R):
"""
最基本的FunkSVD
"""
R_pred = torch.mm(P, Q.t()) # 矩阵相乘
N,M = R.shape
loss = 0
for i in range(N):
for j in range(M):
if R[i][j] > 0:
loss = loss + (R_pred[i][j]-R[i][j])**2
return loss
def PredictRegularizationR(P, Q, R):
"""
FunkSVD+Regularization
"""
B = 0.02 # 正则话的系数
R_pred = torch.mm(P, Q.t()) # 矩阵相乘
N,M = R.shape
loss = 0
for i in range(N):
for j in range(M):
if R[i][j] > 0:
loss = loss + (R_pred[i][j]-R[i][j])**2
loss = loss + B*((P**2).sum() + (Q**2).sum()) # 加上正则项
return loss
def PredictRegularizationConstrainR(P, Q, R):
"""
FunkSVD+Regularization+矩阵R的约束(取值只能是0-5, P,Q>0)
"""
B = 0.1 # 正则话的系数
R_pred = torch.mm(P, Q.t()) # 矩阵相乘
N,M = R.shape
loss = 0
for i in range(N):
for j in range(M):
if R[i][j] > 0:
loss = loss + (R_pred[i][j]-R[i][j])**2
elif R[i][j] == 0: # 下面是限定R的范围
if R_pred[i][j] > 5:
loss = loss + (R_pred[i][j]-5)**2
elif R_pred[i][j] < 0:
loss = loss + (R_pred[i][j]-0)**2
loss = loss + B*((P**2).sum() + (Q**2).sum()) # 加上正则项
# 限定P和Q的范围
loss = loss + ((P[P<0]**2).sum() + (Q[Q<0]**2).sum())
return loss
if __name__ == "__main__":
# 原始矩阵R
R = np.array([[5.0, 3.0, 0.0, 1.0],
[4.0, 0.0 ,0.0 ,1.0],
[1.0, 1.0, 0.0, 5.0],
[1.0, 0.0, 0.0, 4.0],
[0.0, 1.0, 5.0, 4.0]])
N,M = R.shape
K = 2
R = torch.from_numpy(R).float()
# 初始化矩阵P和Q
P = Variable(torch.randn(N, K), requires_grad=True)
Q = Variable(torch.randn(M, K), requires_grad=True)
# -----------
# 定义优化器
# -----------
learning_rate = 0.02
optimizer = torch.optim.Adam([P,Q], lr=learning_rate)
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1000, gamma=0.9) # 学习率每20轮变为原来的90%
# ---------
# 开始训练
# ---------
num_epochs = 5000
for epoch in range(num_epochs):
lr_scheduler.step()
# 计算Loss
loss = PredictRegularizationConstrainR(P, Q, R)
# 反向传播, 优化矩阵P和Q
optimizer.zero_grad() # 优化器梯度都要清0
loss.backward() # 反向传播
optimizer.step() # 进行优化
if epoch % 20 ==0:
print(epoch,loss)
# 求出最终的矩阵P和Q, 与P*Q
R_pred = torch.mm(P, Q.t())
print(R_pred)
print('-'*10)
print(P)
print('-'*10)
print(Q)
|
[
"18811371255@163.com"
] |
18811371255@163.com
|
f17310c367e7e12cbab44d5a7348dc0f92e9f598
|
0c78499de3117281780d4b0f1c80cfb9b4d29d76
|
/src/htmlparser/fsm_config.py
|
ff27c896686a2b0c4bf352e79868d6180f3f7ff2
|
[
"BSD-3-Clause",
"FSFAP"
] |
permissive
|
readdle/ctemplate
|
8b290501836c51d4dcc019a1efdf163db7aeaee3
|
d064089b9f9ba785951236388f291a992ea2bcd0
|
refs/heads/master
| 2023-06-29T14:50:17.687832
| 2021-07-30T10:05:37
| 2021-07-30T10:05:37
| 103,137,183
| 0
| 1
|
BSD-3-Clause
| 2020-11-17T14:37:27
| 2017-09-11T13:08:19
|
C++
|
UTF-8
|
Python
| false
| false
| 6,993
|
py
|
#!/usr/bin/env python
#
# Copyright (c) 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ---
#
# Create a state machine object based on a definition file.
#
__author__ = 'falmeida@google.com (Filipe Almeida)'
class OrderedDict:
"""Ordered dictionary implementation."""
# Define the minimum functionality we need for our application.
# Easiser would be to subclass from UserDict.DictMixin, and only
# define __getitem__, __setitem__, __delitem__, and keys, but that's
# not as portable. We don't need to define much more, so we just do.
def __init__(self):
self._dict = {}
self._keys = []
def __getitem__(self, key):
return self._dict[key]
def __setitem__(self, key, value):
if key not in self._keys:
self._keys.append(key)
self._dict[key] = value
def __delitem__(self, key):
self._keys.remove(key)
del self._dict[key]
def keys(self):
return self._keys
# Below are all we have to define in addition to what DictMixin would need
def __len__(self):
return len(self.keys())
def __contains__(self, key):
return self.has_key(key)
def __iter__(self):
# It's not as portable -- though it would be more space-efficient -- to do
# for k in self.keys(): yield k
return iter(self.keys())
class State(object):
"""Contains information about a specific state."""
def __init__(self):
pass
name = None
external_name = None
transitions = []
class Transition(object):
"""Contains information about a specific transition."""
def __init__(self, condition, source, destination):
self.condition = condition
self.source = source
self.destination = destination
class FSMConfig(object):
"""Container for the statemachine definition."""
sm = {} # dictionary that contains the finite state machine definition
# loaded from a config file.
transitions = [] # List of transitions.
conditions = {} # Mapping between the condition name and the bracket
# expression.
states = OrderedDict() # Ordered dictionary of states.
name = None
comment = None
def AddState(self, **dic):
"""Called from the definition file with the description of the state.
Receives a dictionary and populates internal structures based on it. The
dictionary is in the following format:
{'name': state_name,
'external': exposed state name,
'transitions': [
[condition, destination_state ],
[condition, destination_state ]
]
}
"""
state = State()
state.name = dic['name']
state.external_name = dic['external']
state_transitions = []
for (condition, destination) in dic['transitions']:
transition = Transition(condition, state.name, destination)
state_transitions.append(transition)
self.transitions.extend(state_transitions)
state.transitions = state_transitions
self.states[state.name] = state
def AddCondition(self, name, expression):
"""Called from the definition file with the definition of a condition.
Receives the name of the condition and it's expression.
"""
self.conditions[name] = expression
def Load(self, filename):
"""Load the state machine definition file.
In the definition file, which is based on the python syntax, the following
variables and functions are defined.
name: Name of the state machine
comment: Comment line on the generated file.
condition(): A mapping between condition names and bracket expressions.
state(): Defines a state and it's transitions. It accepts the following
attributes:
name: name of the state
external: exported name of the state. The exported name can be used
multiple times in order to create a super state.
transitions: List of pairs containing the condition for the transition
and the destination state. Transitions are ordered so if
a default rule is used, it must be the last one in the list.
Example:
name = 'c comment parser'
condition('/', '/')
condition('*', '*')
condition('linefeed', '\\n')
condition('default', '[:default:]')
state(name = 'text',
external = 'comment',
transitions = [
[ '/', 'comment_start' ],
[ 'default', 'text' ]
])
state(name = 'comment_start',
external = 'comment',
transitions = [
[ '/', 'comment_line' ],
[ '*', 'comment_multiline' ],
[ 'default', 'text' ]
])
state(name = 'comment_line',
external = 'comment',
transitions = [
[ 'linefeed', 'text' ],
[ 'default', 'comment_line' ]
])
state(name = 'comment_multiline',
external = 'comment',
transitions = [
[ '*', 'comment_multiline_close' ],
[ 'default', 'comment_multiline' ]
])
state(name = 'comment_multiline_close',
external = 'comment',
transitions = [
[ '/', 'text' ],
[ 'default', 'comment_multiline' ]
])
"""
self.sm['state'] = self.AddState
self.sm['condition'] = self.AddCondition
execfile(filename, self.sm)
self.name = self.sm['name']
if not self.name.isalnum():
raise Exception("State machine name must consist of only alphanumeric"
"characters.")
self.comment = self.sm['comment']
def __init__(self):
pass
|
[
"csilvers@cb39b542-eb2e-2a05-867b-f1791dd5f601"
] |
csilvers@cb39b542-eb2e-2a05-867b-f1791dd5f601
|
d660b8f6f725d3cd5c10ca886c02fb972c6e269d
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2948/8246/310215.py
|
b758e25e47b223df14d6b4ad25555750e6dc585b
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 596
|
py
|
def toNum(str, ST):
a = []
for i in str:
a.append(ord(i) + ST - ord('A'))
s = ''
for i in a:
b = repr(i)
s = s + b
return s
def cal(num):
yuan = 0
tem = 0
end = 0
num1 = ''
for i, n in enumerate(num):
if i < len(num) - 1:
tem = (int(num[i + 1]) + int(num[i])) % 10
num1 = num1 + repr(tem)
yuan += (tem * pow(10, (len(num) - 2 - i)))
if yuan <= 100:
print(yuan,end="")
end = 1
if end == 0:
cal(num1)
a = input()
b = input()
num = toNum(a, int(b))
cal(num)
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
81edb177b636d7282ab003407f5fa089d31d0e6d
|
c66955c6fc178955c2024e0318ec7a91a8386c2d
|
/testframework/excise/utils/capturepic.py
|
ff5b329a4d6139ca203f1c8101ee25e3b8be21c2
|
[] |
no_license
|
duheng18/python-study
|
a98642d6ee1b0043837c3e7c5b91bf1e28dfa588
|
13c0571ac5d1690bb9e615340482bdb2134ecf0e
|
refs/heads/master
| 2022-11-30T17:36:57.060130
| 2019-11-18T07:31:40
| 2019-11-18T07:31:40
| 147,268,053
| 1
| 0
| null | 2022-11-22T03:36:51
| 2018-09-04T00:49:42
|
Python
|
UTF-8
|
Python
| false
| false
| 180
|
py
|
#!/usr/bin/env python
# encoding: utf-8
from PIL import ImageGrab
def capture_screen(name):
pic = ImageGrab.grab()
rgb_im = pic.convert('RGB')
rgb_im.save(name+'.jpg')
|
[
"emaildh@163.com"
] |
emaildh@163.com
|
25e3abd88e5495b1d169ae76202aff290c4c0f12
|
837e3c702227e315add14614c13b6bd8e26751f4
|
/packages/python3/predefined.py
|
10a12a4d4d5e2a0a240295eaa6e4f224918779de
|
[] |
no_license
|
Somewater/raic2019
|
22887670f5db92f4712d249626460240711af199
|
8b8496c4e07ec3d76c616390d322cc674f3b15cf
|
refs/heads/main
| 2023-02-09T02:07:10.342847
| 2019-01-24T21:30:56
| 2019-01-24T21:32:57
| 325,128,711
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,908
|
py
|
from typing import Tuple
from model import *
from vector import Vector3D
def dan_to_plane(point: Vector3D, point_on_plane: Vector3D, plane_normal: Vector3D) -> Tuple[float, Vector3D]:
return (point - point_on_plane).dot(plane_normal), plane_normal
def dan_to_sphere_inner(point: Vector3D, sphere_center: Vector3D, sphere_radius: float) -> Tuple[float, Vector3D]:
return sphere_radius - (point - sphere_center).len(), (sphere_center - point).normalize()
def dan_to_sphere_outer(point: Vector3D, sphere_center: Vector3D, sphere_radius: float):
return (point - sphere_center).len() - sphere_radius, (point - sphere_center).normalize()
def clamp_float(value: float, minumum: float, maximum: float) -> float:
return max(min(value, maximum), minumum)
def clamp_vector(vector: Vector3D, length: float) -> Vector3D:
if vector.len() <= length:
return vector
return vector.normalize() * length
def min_dan(v1: Tuple[float, Vector3D], v2: Tuple[float, Vector3D]) -> Tuple[float, Vector3D]:
if v1[0] < -1.0:
raise RuntimeError('distnce too big: %f' % v1[0])
if v2[0] < -1.0:
raise RuntimeError('distnce too big: %f' % v2[0])
if v1[0] < v2[0]:
return v1
else:
return v2
def dan_to_arena_quarter(arena: Arena, point: Vector3D) -> Tuple[float, Vector3D]:
# Ground
dan = dan_to_plane(point, Vector3D(0, 0, 0), Vector3D(0, 1, 0))
# Ceiling
dan = min_dan(dan, dan_to_plane(point, Vector3D(0, arena.height, 0), Vector3D(0, -1, 0)))
# Side x
dan = min_dan(dan, dan_to_plane(point, Vector3D(arena.width / 2, 0, 0), Vector3D(-1, 0, 0)))
# Side z (goal)
dan = min_dan(dan, dan_to_plane(
point,
Vector3D(0, 0, (arena.depth / 2) + arena.goal_depth),
Vector3D(0, 0, -1)))
# Side z
v = Vector3D(point.x, point.y, 0) - \
Vector3D(
(arena.goal_width / 2) - arena.goal_top_radius,
arena.goal_height - arena.goal_top_radius,
0)
if point.x >= (arena.goal_width / 2) + arena.goal_side_radius \
or point.y >= arena.goal_height + arena.goal_side_radius \
or (v.x > 0 and v.y > 0 and v.len() >= arena.goal_top_radius + arena.goal_side_radius):
dan = min_dan(dan, dan_to_plane(point, Vector3D(0, 0, arena.depth / 2), Vector3D(0, 0, -1)))
# Side x & ceiling (goal)
if point.z >= (arena.depth / 2) + arena.goal_side_radius:
# x
dan = min_dan(dan, dan_to_plane(
point,
Vector3D(arena.goal_width / 2, 0, 0),
Vector3D(-1, 0, 0)))
# y
dan = min_dan(dan, dan_to_plane(point, Vector3D(0, arena.goal_height, 0), Vector3D(0, -1, 0)))
# Goal back corners
assert arena.bottom_radius == arena.goal_top_radius
if point.z > (arena.depth / 2) + arena.goal_depth - arena.bottom_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
clamp_float(
point.x,
arena.bottom_radius - (arena.goal_width / 2),
(arena.goal_width / 2) - arena.bottom_radius,
),
clamp_float(
point.y,
arena.bottom_radius,
arena.goal_height - arena.goal_top_radius,
),
(arena.depth / 2) + arena.goal_depth - arena.bottom_radius),
arena.bottom_radius))
# Corner
if point.x > (arena.width / 2) - arena.corner_radius \
and point.z > (arena.depth / 2) - arena.corner_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
(arena.width / 2) - arena.corner_radius,
point.y,
(arena.depth / 2) - arena.corner_radius
),
arena.corner_radius))
# Goal outer corner
if point.z < (arena.depth / 2) + arena.goal_side_radius:
# Side x
if point.x < (arena.goal_width / 2) + arena.goal_side_radius:
dan = min_dan(dan, dan_to_sphere_outer(
point,
Vector3D(
(arena.goal_width / 2) + arena.goal_side_radius,
point.y,
(arena.depth / 2) + arena.goal_side_radius
),
arena.goal_side_radius))
# Ceiling
if point.y < arena.goal_height + arena.goal_side_radius:
dan = min_dan(dan, dan_to_sphere_outer(
point,
Vector3D(
point.x,
arena.goal_height + arena.goal_side_radius,
(arena.depth / 2) + arena.goal_side_radius
),
arena.goal_side_radius))
# Top corner
o = Vector3D(
(arena.goal_width / 2) - arena.goal_top_radius,
arena.goal_height - arena.goal_top_radius,
0)
v = point - o
if v.x > 0 and v.y > 0:
o = o + v.normalize() * (arena.goal_top_radius + arena.goal_side_radius)
dan = min_dan(dan, dan_to_sphere_outer(
point,
Vector3D(o.x, o.y, (arena.depth / 2) + arena.goal_side_radius),
arena.goal_side_radius))
# Goal inside top corners
if point.z > (arena.depth / 2) + arena.goal_side_radius \
and point.y > arena.goal_height - arena.goal_top_radius:
# Side x
if point.x > (arena.goal_width / 2) - arena.goal_top_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
(arena.goal_width / 2) - arena.goal_top_radius,
arena.goal_height - arena.goal_top_radius,
point.z
),
arena.goal_top_radius))
# Side z
if point.z > (arena.depth / 2) + arena.goal_depth - arena.goal_top_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
point.x,
arena.goal_height - arena.goal_top_radius,
(arena.depth / 2) + arena.goal_depth - arena.goal_top_radius
),
arena.goal_top_radius))
# Bottom corners
if point.y < arena.bottom_radius:
# Side x
if point.x > (arena.width / 2) - arena.bottom_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
(arena.width / 2) - arena.bottom_radius,
arena.bottom_radius,
point.z
),
arena.bottom_radius))
# Side z
if point.z > (arena.depth / 2) - arena.bottom_radius \
and point.x >= (arena.goal_width / 2) + arena.goal_side_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
point.x,
arena.bottom_radius,
(arena.depth / 2) - arena.bottom_radius
),
arena.bottom_radius))
# Side z (goal)
if point.z > (arena.depth / 2) + arena.goal_depth - arena.bottom_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
point.x,
arena.bottom_radius,
(arena.depth / 2) + arena.goal_depth - arena.bottom_radius
),
arena.bottom_radius))
# Goal outer corner
o = Vector3D(
(arena.goal_width / 2) + arena.goal_side_radius,
(arena.depth / 2) + arena.goal_side_radius,
0)
v = Vector3D(point.x, point.z, 0) - o
if v.x < 0 and v.y < 0 \
and v.len() < arena.goal_side_radius + arena.bottom_radius:
o = o + v.normalize() * (arena.goal_side_radius + arena.bottom_radius)
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(o.x, arena.bottom_radius, o.y),
arena.bottom_radius))
# Side x (goal)
if point.z >= (arena.depth / 2) + arena.goal_side_radius \
and point.x > (arena.goal_width / 2) - arena.bottom_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
(arena.goal_width / 2) - arena.bottom_radius,
arena.bottom_radius,
point.z
),
arena.bottom_radius))
# Corner
if point.x > (arena.width / 2) - arena.corner_radius \
and point.z > (arena.depth / 2) - arena.corner_radius:
corner_o = Vector3D(
(arena.width / 2) - arena.corner_radius,
(arena.depth / 2) - arena.corner_radius,
0
)
n = Vector3D(point.x, point.z, 0) - corner_o
dist = n.len()
if dist > arena.corner_radius - arena.bottom_radius:
n = n * (1/dist)
o2 = corner_o + n * (arena.corner_radius - arena.bottom_radius)
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(o2.x, arena.bottom_radius, o2.y),
arena.bottom_radius))
# Ceiling corners
if point.y > arena.height - arena.top_radius:
# Side x
if point.x > (arena.width / 2) - arena.top_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
(arena.width / 2) - arena.top_radius,
arena.height - arena.top_radius,
point.z,
),
arena.top_radius))
# Side z
if point.z > (arena.depth / 2) - arena.top_radius:
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(
point.x,
arena.height - arena.top_radius,
(arena.depth / 2) - arena.top_radius,
),
arena.top_radius))
# Corner
if point.x > (arena.width / 2) - arena.corner_radius \
and point.z > (arena.depth / 2) - arena.corner_radius:
corner_o = Vector3D(
(arena.width / 2) - arena.corner_radius,
(arena.depth / 2) - arena.corner_radius,
0
)
dv = Vector3D(point.x, point.z, 0) - corner_o
if dv.len() > arena.corner_radius - arena.top_radius:
n = dv.normalize()
o2 = corner_o + n * (arena.corner_radius - arena.top_radius)
dan = min_dan(dan, dan_to_sphere_inner(
point,
Vector3D(o2.x, arena.height - arena.top_radius, o2.y),
arena.top_radius))
return dan
def dan_to_arena(arena: Arena, point: Vector3D):
point = Vector3D(point.get_x(), point.get_y(), point.get_z())
negate_x = point.x < 0
negate_z = point.z < 0
if negate_x:
point.x = -point.x
if negate_z:
point.z = -point.z
result_distance, result_normal = dan_to_arena_quarter(arena, point)
if negate_x:
result_normal.x = -result_normal.x
if negate_z:
result_normal.z = -result_normal.z
return (result_distance, result_normal)
|
[
"naydenov.p.v@gmail.com"
] |
naydenov.p.v@gmail.com
|
336571ac6ea33727134c2ca4fa831e2edf29be84
|
bf79223acb138bfe077ca34ef72d44579b3efcbe
|
/CrawlerProject/middlewares.py
|
76793c862fabf0dedd745777223a2ead9e9127a8
|
[] |
no_license
|
trinhvanson1997/Crawler
|
e5d9c599d61349ac1f52ce325bf2502ceb8e13b8
|
638014c7ea1cdd096f45ec5350c96ed6fc41c87c
|
refs/heads/master
| 2020-06-21T16:50:28.285596
| 2019-12-17T03:24:46
| 2019-12-17T03:24:46
| 197,506,847
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,605
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class NewprojectSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class NewprojectDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
|
[
"trinhvanson1997@gmail.com"
] |
trinhvanson1997@gmail.com
|
4ecf64346a9186c32e333be143fcd0a82394f5b1
|
78f5fc0518ea7569df872940743772dbd1ca96ab
|
/4_python&database/function_to_insert.py
|
c6995cfe71152e1e9ba2b555bdeda02a08ecd985
|
[] |
no_license
|
Gosiak87/python_training
|
7443974520bd0a3c3335011daed5c2038662859f
|
676a417b62d6d3985d84d469ca11016dda9e6156
|
refs/heads/master
| 2021-04-12T11:45:33.484314
| 2018-04-10T11:00:13
| 2018-04-10T11:00:13
| 126,743,024
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,132
|
py
|
from psycopg2 import connect
USER = 'postgres'
PASSWORD = 'coderslab'
HOST = 'localhost'
DB_NAME = 'exercises_db'
def insert_product(cursor, product):
sql = '''
INSERT INTO product VALUES (DEFAULT, '{}', '{}', {});
'''.format(product['name'], product['description'], product['price'])
cursor.execute(sql)
def insert_order(cursor, order):
sql = '''
INSERT INTO orderx VALUES (DEFAULT, '{}');
'''.format(order['description'])
cursor.execute(sql)
def insert_client(cursor, client):
sql = '''
INSERT INTO client VALUES (DEFAULT, '{}', '{}');
'''.format(client['name'], client['surname'])
cursor.execute(sql)
cnx = connect(
user=USER,
password=PASSWORD,
host=HOST,
database=DB_NAME
)
cnx.autocommit = True
cursor = cnx.cursor()
insert_product(cursor, {'name': 'Mleko', 'description': 'mleko z krowy 2%', 'price': 2.5})
insert_product(cursor, {'name': 'Mleko', 'description': 'mleko z krowy 5%', 'price': 2.7})
insert_order(cursor, {'description': 'no siema'})
insert_client(cursor, {'name': 'Tomy', 'surname': 'Lee Jones'})
cursor.close()
cnx.close()
|
[
"miziolekgosia@gmail.com"
] |
miziolekgosia@gmail.com
|
619a9f0fd967e1b2b3ac08f18ca81153d942dfd4
|
256040ad2e34c042611b526fd1c9c2d39b28afed
|
/cv2test.py
|
29ca4774324a3d3ebf5d85f510dc61b0329c64df
|
[] |
no_license
|
BabaJaguska/unseemlyPythonStuff
|
282b34e5ae912984ba4ed3aa82b18d4fdf2ffe07
|
f159d4eebbca0ee706168b986d53e7ae560c90bb
|
refs/heads/master
| 2022-03-09T20:36:42.940906
| 2022-02-15T21:26:56
| 2022-02-15T21:26:56
| 102,620,779
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,033
|
py
|
# import numpy
# import os
from PyQt4.QtGui import * # GUI, for file dialog
import sys
import cv2 # OpenCV
from matplotlib import pyplot as plt
###########################################################
plt.close("all")
# so this doesn't seem to close previous session
# print(os.getcwd())
# os.chdir("C:/Users/Mini/My Documents")
###########################################################
#### Choose Image to Analyze - DIALOG ####
app = QApplication(sys.argv)
w = QWidget()
dialog = QFileDialog()
dialog.exec_()
file = dialog.selectedFiles()
# Load the chosen image
img = cv2.imread(file[0])
print("Chosen image is of shape: ", img.shape)
##########################################################
#### Calculate COLOR CHANNEL HISTOGRAMS ####
# default OpenCV color scheme is BGR, rather than RGB.
# So if you are displaying color images in matplotlib but read them in open cv,
# they will have inverted channels!! plt: rgb, cv2: bgr
# plt.figure(4)
# plt.imshow(img)
# see? but you can do this:
imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# plt.figure(5)
# plt.imshow(imgRGB)
hists = [cv2.calcHist([imgRGB], [x], None, [256], [0, 255]) for x in range(0, 3)]
# all these arguments need to be in square brackets!!
# the args are: obviously image, channels,mask,histSizeranges
# For color image, you can pass [0], [1] or [2] to calculate histogram of blue,
# green or red channel respectively
# for grayscale, there is only [0]
colors = ('r', 'g', 'b')
# yes you can obviously iterate a tuple, retard
for i,col in enumerate(colors):
plt.plot(hists[i],color=col)
plt.xlim([0,255])
#plt.ylim([0,5000])
plt.xlabel("dark <----- Intensity -----> light")
plt.title("Color histogram: " + col)
###############################################################
##### Calculate gray levels histogram #########
grayImage = cv2.cvtColor(imgRGB, cv2.COLOR_BGR2GRAY)
# Plot the grayscale version of the image
plt.figure(2)
plt.imshow(grayImage, cmap='gray')
# Calculate grayscale histogram
grayHist = cv2.calcHist([grayImage], [0], None, [256], [0, 255])
plt.figure(1)
plt.plot(grayHist, color='k')
plt.xlabel("dark <----- Intensity -----> light")
plt.title("Gray levels histogram")
################################################################
########## THRESHOLDING #############
thresh = 100
a, b = cv2.threshold(grayImage, thresh, 255, cv2.THRESH_BINARY)
# prvi broj je threshold, drugi je value kojom menjas odsecene delove
# ali ovo prikazuje cv2 a plt nece :/
# the output is 2 numbers, a-retval and b-image
# retval has to do with otsu thresholding
# plt.figure(3)
windowName = 'Image thresholded at intensity level: ' + str(thresh);
cv2.imshow(windowName, b)
# cv2.resizeWindow(windowName, 800, 600)
# Ovo kropuje sliku!!! necu to. hocu zoom out
# if you are displaying color images in matplotlib but read them in open cv,
# they will have inverted channels!! plt: rgb, cv2: bgr
plt.show()
|
[
"noreply@github.com"
] |
BabaJaguska.noreply@github.com
|
75f0c2838a6a100e1decfa52f011bc638d32fc23
|
d1e7f86903565dc7aed5e29e14b962fb9f133392
|
/Python_Scripts/colonANDprobchar_count.py
|
f78f43a555349e7a06f77cac5cfe664c1d1dbf8f
|
[] |
no_license
|
brentnixon/WrangleOSM
|
092e8f97fd14955af00366e24cbdacf95363ff17
|
7bdf1dfbdc6f3692e7923131ecf00df9539e8b7d
|
refs/heads/master
| 2021-05-03T21:10:07.024427
| 2018-02-06T16:27:59
| 2018-02-06T16:27:59
| 120,377,397
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,202
|
py
|
## code block to identify and count attribute keys with colon structure or problematic characters
import xml.etree.cElementTree as ET
import pprint
import re
# define regular expressions for compiling
lower = re.compile(r'^([a-z]|_)*$')
lower_colon = re.compile(r'^([a-z]|_)*:([a-z]|_)*$')
problemchars = re.compile(r'[=\+/&<>;\'"\?%#$@\,\. \t\r\n]')
# function to give a count of each of the four tag categories
def count_key_type(element, keys):
if element.tag == "tag":
if lower.search(element.get('k')):
keys["lower"] += 1
elif lower_colon.search(element.get('k')):
keys["lower_colon"] += 1
elif problemchars.search(element.get('k')):
keys["problemchars"] += 1
else:
keys["other"] += 1
pass
return keys
# function to iteratively process lines in XML file and yield dictionary with count of key types
def process_map(filename):
keys = {"lower": 0, "lower_colon": 0, "problemchars": 0, "other": 0}
for _, element in ET.iterparse(filename):
keys = count_key_type(element, keys)
return keys
# execute process_map on XML file
keys = process_map('Charlotte_AOI.xml')
pprint.pprint(keys)
|
[
"noreply@github.com"
] |
brentnixon.noreply@github.com
|
56a7c862a6a90c22e304b8ad1a9597dbfa0bce28
|
e31104db88bf3bc7c93bf2c45b85dbbddefb03c6
|
/partition-array-disjoint-int.py
|
9d76ea7a5290b3ac5b9358785280e409765c030d
|
[] |
no_license
|
ravitej5226/Algorithms
|
13eab6ded0c8bf7ca67d7075cfbb585325b5faff
|
ba2b51ff9f3e25780a3d62573e9c7fc4445d8ef8
|
refs/heads/master
| 2020-05-30T05:50:35.126086
| 2019-04-08T21:16:13
| 2019-04-08T21:16:13
| 42,272,599
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,438
|
py
|
# Given an array A, partition it into two (contiguous) subarrays left and right so that:
# Every element in left is less than or equal to every element in right.
# left and right are non-empty.
# left has the smallest possible size.
# Return the length of left after such a partitioning. It is guaranteed that such a partitioning exists.
# Example 1:
# Input: [5,0,3,8,6]
# Output: 3
# Explanation: left = [5,0,3], right = [8,6]
# Example 2:
# Input: [1,1,1,0,6,12]
# Output: 4
# Explanation: left = [1,1,1,0], right = [6,12]
# Note:
# 2 <= A.length <= 30000
# 0 <= A[i] <= 10^6
# It is guaranteed there is at least one way to partition A as described.
class Solution(object):
def partitionDisjoint(self, A):
"""
:type A: List[int]
:rtype: int
"""
if(len(A)==2):
return 1
i=0
j=len(A)-1
left_max=0
while(j>0):
if(A[j]>=A[0]):
j=j-1
else:
break
# Compute left_max
for x in range(j):
left_max=max(left_max,A[x])
#print(left_max)
#print(j)
if(j==0):
return 1
# From j to max, include all elements which are less than left max
t=False
while(j<len(A)-1):
if(left_max>=A[j]):
t=True
j=j+1
else:
break
#print(left_max)
return j+1 if not t else j-1 if left_max==A[j-1] else j
s=Solution()
print(s.partitionDisjoint([0,7,11,1,12,2,13,96,34,18,82,37,98,68,90,61,20,94,21,86]))
# [26,51,40,58,42,76,30,48,79,91]
# [63,18,8,31,0,21,40,81,100,88,72,82,68,99,73]
# [6,0,8,30,37,6,75,98,39,90,63,74,52,92,64]
# [3,1,8,4,9,7,12,0,0,12,6,12,6,19,24,90,87,54,92,60,31,59,75,90,20,38,52,51,74,70,86,20,27,91,55,47,54,86,15,16,74,32,68,27,19,54,13,22,34,74,76,50,74,97,87,42,58,95,17,93,39,33,22,87,96,90,71,22,48,46,37,18,17,65,54,82,54,29,27,68,53,89,23,12,90,98,42,87,91,23,72,35,14,58,62,79,30,67,44,48]
# [12,75,26,38,56,59,83,55,49,52,27,48,77,21,27,79,54,15,59,22,34,35,81,67,2,41,40,0,73,61,75,8,86,42,49,83,43,16,2,54,26,35,15,63,31,24,51,86,6,35,42,37,83,51,34,21,71,57,61,76,50,1,43,32,19,13,67,87,3,33,38,34,34,84,38,76,52,7,27,49,2,78,56,28,70,6,64,87,100,97,99,97,97,100,100,100,97,89,98,100]
#[0,7,11,1,12,2,13,96,34,18,82,37,98,68,90,61,20,94,21,86]
|
[
"ravitej5226@gmail.com"
] |
ravitej5226@gmail.com
|
123fc126d6045a0528d010006dbe280fb70dea9a
|
004524db0f6c5b5ba2c7ca19250bbfa6c13f2007
|
/cli.py
|
8a15da9d320520a2021568ffb5726753d62b72b1
|
[] |
no_license
|
mrkzncv/FlaskApp-HW1
|
36569542c4e51c6c0c38276166eb93a898e7655c
|
c3375ac2e710952c4fd1f596c94d7100cae69032
|
refs/heads/main
| 2023-09-03T11:13:54.997625
| 2021-10-29T17:45:57
| 2021-10-29T17:45:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 230
|
py
|
from config import HOST, PORT
import sys
import os
def start():
from app import app
app.run(debug=True, host=HOST, port=PORT)
if __name__ == '__main__':
sys.path.insert(0, os.path.abspath(os.getcwd()))
start()
|
[
"noreply@github.com"
] |
mrkzncv.noreply@github.com
|
5ef0715b59faa6086a64e87c379247a2517cbbdb
|
c4bd9a742f9b08fb07bd5c2665884b18623fefba
|
/Web_Scraping/scrape_product_info_and_reviews.py
|
7488f2eec99020106244486fba378f9d6c1c1bd0
|
[] |
no_license
|
maianhtrnguyen/sephora-ratings-nlp
|
8bc792b38062fe0c3a622307843621371c4be88a
|
9828fccdded0e85e2971d025fa21d736218a2215
|
refs/heads/main
| 2023-07-12T04:02:36.702902
| 2021-08-19T15:45:20
| 2021-08-19T15:45:20
| 394,835,104
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,320
|
py
|
import sys
from urllib.request import urlopen
from urllib.request import FancyURLopener
import pandas as pd
import numpy as np
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import ElementNotVisibleException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
browser = webdriver.Chrome('/Applications/BrynMawr/chromedriver')
def get_product_info(product_url):
global browser
browser.get(product_url)
# close pop-up window
try:
login_window = browser.find_element_by_class_name('css-fslzaf').click()
except (NoSuchElementException, ElementNotVisibleException) as exceptions:
pass
# scroll the page with Keys.PAGE_DOWN
elements = browser.find_element_by_tag_name('body')
for i in range(30):
elements.send_keys(Keys.PAGE_DOWN)
time.sleep(0.3)
# def get_page_number(brand_url):
# global browser
# browser.get(brand_url)
# time.sleep(1)
# page_number = 1
# pages = browser.find_element_by_class_name('')
# def get_product_list(product_type, base_url):
# global browser
# products_page_count = get_page_count(base_url)
# product_list = []
# for i in range(1, products_page_count + 1):
# page_url = base_url + "?currentPage=" + str(i)
# browser.get(page_url)
# time.sleep(1)
# elem = browser.find_element_by_tag_name("body")
# no_of_pagedowns = 20
# while no_of_pagedowns:
# elem.send_keys(Keys.PAGE_DOWN)
# time.sleep(0.2)
# no_of_pagedowns -= 1
# post_elems = browser.find_elements_by_class_name("css-ix8km1")
# for post in post_elems:
# product_name = post.get_attribute('aria-label')
# product_link = post.get_attribute('href')
# product_list.append([product_name, product_link])
# df = pd.DataFrame(product_list, columns=['product_name', 'product_link'])
# df.to_csv('tmp/'+product_type+'_product_list.csv', index=True)
|
[
"maianhtrnguyen@gmail.com"
] |
maianhtrnguyen@gmail.com
|
b07ca38131dbcdbe5dd6074e87a1e31ffdf2d973
|
5ff4d30b93543becc357ff3b660cfc07c72a4def
|
/algorithm_and_data_structure/初级算法/链表/删除链表的倒数第N个节点.py
|
6d93af2cc08dd4def5ca81f8299fd742de3eafdd
|
[] |
no_license
|
isaac-wang0913/Python
|
6f3e0c903ae0d9912dcdd12ab82397f046bd21b3
|
c0eb74b08e4be7234ba739390e4ddb08ba34ca38
|
refs/heads/master
| 2023-03-27T23:37:30.208992
| 2021-03-23T03:35:07
| 2021-03-23T03:35:07
| 326,688,810
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,020
|
py
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode:
# # 两次遍历
# # 第一次遍历得到length
# def getlength(head):
# length = 0
# while head:
# length += 1
# head = head.next
# return length
# dummy = ListNode(0, head)
# l = getlength(head)
# cur = dummy
# # 第二次遍历删除节点
# for i in range(1, l-n+1):
# cur = cur.next
# cur.next = cur.next.next
# return dummy.next
# 一次遍历,双指针
dummy = ListNode(0,head)
fast = head
slow = dummy
for i in range(n):
fast = fast.next
while fast:
fast = fast.next
slow = slow.next
slow.next = slow.next.next
return dummy.next
|
[
"noreply@github.com"
] |
isaac-wang0913.noreply@github.com
|
7eb7093ea418cc48f979a964195cdfae48e83f05
|
ba55589c4810d01f0a63b70c23cd3ab4448750dc
|
/Python/thenewboston Tutorial/Python GUI with Tkinter/GUI Tutorial 8.py
|
31da5a9fd4e1f6aac53854eba2d8317f0893b016
|
[] |
no_license
|
kedvall/projects
|
b4769c1829293407656032dbc285739f22ba05e7
|
3cab1933b17a3d5c5ec1d93a0203ceef581bb43d
|
refs/heads/master
| 2021-03-14T05:51:51.703038
| 2020-03-12T04:47:10
| 2020-03-12T04:47:10
| 246,742,836
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 466
|
py
|
from tkinter import *
class Buttons:
def __init__(self, master): # master mean the root or main window
frame = Frame(master)
frame.pack()
self.printButton = Button(frame, text='Print Message', command=self.printMessage)
self.printButton.pack(side=LEFT)
self.quitButton = Button(frame, text='Quit', command=frame.quit)
self.quitButton.pack(side=LEFT)
def printMessage(self):
print('Wow, it worked!')
root = Tk()
b = Buttons(root)
root.mainloop()
|
[
"kedvall123@gmail.com"
] |
kedvall123@gmail.com
|
155add56dfc5537f619d9a94e12bbad28052f370
|
a806f84ad61f22e07f190d0a8c4152aa31621873
|
/parts/migrations/0024_auto_20191211_2019.py
|
a310ce53c532e0b481e65ce1500d879f03300049
|
[] |
no_license
|
merkibek/GymFit
|
d78ee4c7e29a8b7856eb0946d25d45c06f3ed6f6
|
bf31dcf5b142812d5e84b0e520b2df8605735462
|
refs/heads/master
| 2020-10-01T13:42:28.789417
| 2019-12-12T08:25:09
| 2019-12-12T08:25:09
| 227,550,022
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 400
|
py
|
# Generated by Django 3.0 on 2019-12-11 14:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('parts', '0023_auto_20191211_2015'),
]
operations = [
migrations.AlterField(
model_name='client',
name='image',
field=models.ImageField(upload_to='parts/client_images'),
),
]
|
[
"32525150+pexes@users.noreply.github.com"
] |
32525150+pexes@users.noreply.github.com
|
fc3eab265f2836bf309fd4f2573cefd47fdc1ddc
|
7a8b0e76200fd484a36e3e3f5f014d1658bfa2ec
|
/gb_preprocessing.py
|
7afaa8d3605138b1aa02577fce3f82103f633a28
|
[
"MIT"
] |
permissive
|
prashantig25/gaborbandit_analysis
|
524215ff45694f572aa2a2a8d8d00c416743fce2
|
c208be61d44b9e99d9fdc9c1469be1103198ea9a
|
refs/heads/master
| 2022-12-16T01:21:14.551411
| 2020-09-18T14:05:51
| 2020-09-18T14:05:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,369
|
py
|
""" This script runs the preprocessing steps
1. Load BIDS formatted data
2. Create preprocessed data frames for each experiment
3. Save data
"""
import sys
import os
from fnmatch import fnmatch
from gb_loaddata import loaddata
from gb_compute_misses import compute_misses
# ---------------------------
# 1. Load BIDS-formatted data
# ---------------------------
# Set path and directory
my_path = '/Users/rasmus/Dropbox/gabor_bandit/code/gaborbandit_analysis/gb_data'
os.chdir(my_path)
filenames = []
for path, subdirs, files in os.walk(my_path + '/BIDS/behav/'):
for name in files:
if fnmatch(name, "*.tsv"):
filenames.append(os.path.join(path, name))
# Put pseudonomized BIDS data in a single data frame
data_pn = loaddata(filenames)
# Number of subjects
N = len(list(set(data_pn['participant'])))
# ------------------------------------------------------
# 2. Create preprocessed data frames for each experiment
# ------------------------------------------------------
# Experiment 1 - Perceptual decision making
exp1_data = data_pn[(data_pn['d_t'].notna()) & (data_pn['trial_type'] == 'patches') & (data_pn['complete'])].copy()
# Compute number of missed trials
n_misses_1 = 100 - exp1_data['id'].value_counts()
n_misses_min_1, n_misses_max_1, n_misses_mean_1, n_misses_sem_1 = compute_misses(n_misses_1, N)
# Experiment 1 - Perceptual decision making
exp1_data_recov = data_pn[(data_pn['trial_type'] == 'patches') & (data_pn['complete'])].copy()
exp1_data_recov.loc[:, 'b_t'] = 0
# Experiment 2 - Economic decisions making without perceptual uncertainty
exp2_raw = data_pn[(data_pn['trial_type'] == 'main_safe')].copy() # raw data to compute misses
exp2_data = data_pn[(data_pn['missIndex'] == 0) & (data_pn['decision2.corr'].notna()) &
(data_pn['trial_type'] == 'main_safe')].copy() # preprocessed data for analysis
# Extract number of blocks
exp2_nb = len(set(list(exp2_data['b_t'])))
# Compute number of missed trials
n_misses_2 = exp2_raw['id'].value_counts() - 150
n_misses_min_2, n_misses_max_2, n_misses_mean_2, n_misses_sem_2 = compute_misses(n_misses_2, N)
# Experiment 3 - Economic decision making with perceptual uncertainty
exp3_raw = data_pn[(data_pn['trial_type'] == 'main_unc')].copy() # raw data to compute misses
exp3_data = data_pn[(data_pn['missIndex'] == 0) & (data_pn['decision2.corr'].notna()) &
(data_pn['trial_type'] == 'main_unc')].copy() # preprocessed data for analysis
# Compute number of missed trials
n_misses_3 = exp3_raw['id'].value_counts() - 300
n_misses_min_3, n_misses_max_3, n_misses_mean_3, n_misses_sem_3 = compute_misses(n_misses_3, N)
# Extract number of blocks
exp3_nb = len(set(list(exp3_data['blockNumber'])))
# Check if we have the same number of participants for every experiment
# ---------------------------------------------------------------------
n_subj_1 = len(list(set(exp1_data['participant'])))
n_subj_2 = len(list(set(exp2_data['participant'])))
n_subj_3 = len(list(set(exp3_data['participant'])))
if not n_subj_1 == n_subj_2 == n_subj_3:
sys.exit("Unequal number of participants")
# -------------
# 3. Save data
# -------------
exp1_data.to_pickle('gb_exp1_data.pkl')
exp1_data_recov.to_pickle('gb_exp1_data_recov.pkl')
exp2_data.to_pickle('gb_exp2_data.pkl')
exp3_data.to_pickle('gb_exp3_data.pkl')
|
[
"rasmusbruckner@gmail.com"
] |
rasmusbruckner@gmail.com
|
c90a5d1bc09ac04da6827108e557c822bdb48437
|
f654b31d89552504130a59282a6dd14204ebdd04
|
/pages/urls.py
|
535e84d57739f50f42d10a88745c23a570fb9f5b
|
[] |
no_license
|
Tuz-And/Eshopper
|
82c085ef8075932645f7f3e8a9401cae4a681dcb
|
2209186b504000f91671a5ee1ae9bf77ae023ebc
|
refs/heads/master
| 2023-02-12T04:38:10.684225
| 2021-01-11T00:37:59
| 2021-01-11T00:37:59
| 315,384,557
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 487
|
py
|
from django.urls import path, include
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('error', views.error, name='error'),
path('contact-us', views.contact_us, name='contact_us'),
path('login', views.login, name='login'),
path('logout', views.logout, name='logout'),
path('register', views.register, name='register'),
path('dashboard', views.dashboard, name='dashboard'),
path('cart', views.cart, name='cart'),
]
|
[
"kozurnuj89@gmail.com"
] |
kozurnuj89@gmail.com
|
194608dca9ab1c00fceed10af6ca359ee1408963
|
4ed184a588440d39cff1db54970cece84232d406
|
/tts.py
|
0d6c8ad60d6113d6f695d537292f899d4363e81b
|
[] |
no_license
|
jkjh8/lotteplayer
|
56f9c5687eb1be608c2df2946e82ad027982ec47
|
b0e3a2823efd091b266fbc8b6b1544a44033ca57
|
refs/heads/main
| 2023-02-17T11:30:25.595463
| 2021-01-19T11:14:07
| 2021-01-19T11:14:07
| 330,953,612
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 795
|
py
|
import os
import sys
import urllib.request
client_id = "bh51u91nlo"
client_secret = "0zRDttwNbszDNeBMgBrzJsSmmOPtZDRzr6du9Hst"
encText = urllib.parse.quote("반갑습니다 네이버")
data = "speaker=nara&volume=0&speed=0&pitch=0&emotion=0&format=mp3&text=" + encText;
url = "https://naveropenapi.apigw.ntruss.com/voice-premium/v1/tts"
request = urllib.request.Request(url)
request.add_header("X-NCP-APIGW-API-KEY-ID",client_id)
request.add_header("X-NCP-APIGW-API-KEY",client_secret)
response = urllib.request.urlopen(request, data=data.encode('utf-8'))
rescode = response.getcode()
if(rescode==200):
print("TTS mp3 저장")
response_body = response.read()
with open('1111.mp3', 'wb') as f:
f.write(response_body)
else:
print("Error Code:" + rescode)
|
[
"jkjh82@naver.com"
] |
jkjh82@naver.com
|
f8b20bd8f0839c437f4559d5875b8a161264755f
|
eaed2f40fcf58ece2ba0372991dcff11e681989d
|
/bot/chains/admin_panel/state.py
|
50d7d6c2cb10db1757acb677163a4e395007fa91
|
[] |
no_license
|
vapelavsky/khaba-telegram-bot
|
7efd96cde75c3a24c0d6a274a362e5e3e7c9188b
|
b577875f94f65263e7530ffae4631fb01a72d671
|
refs/heads/master
| 2023-06-09T06:40:29.684635
| 2021-04-23T11:29:30
| 2021-04-23T11:29:30
| 357,331,669
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 241
|
py
|
from aiogram.dispatcher.filters.state import StatesGroup, State
class AdminPanel(StatesGroup):
wait_name = State()
wait_faculty = State()
wait_password = State()
wait_faculty_report = State()
wait_name_delete = State()
|
[
"mscn.brtlmo@gmail.com"
] |
mscn.brtlmo@gmail.com
|
4ca15cc7e0db65987926b19da6b356aad1f95f82
|
0e764783c2c60038b55d88fde20b88d5aad9ef03
|
/ShellyMQTT.indigoPlugin/Contents/Server Plugin/Devices/tests/test_Shelly_EM_Relay.py
|
255561b89713bdf3eb635f1642bb3cc90c698fac
|
[
"MIT"
] |
permissive
|
AaronLionsheep/ShellyMQTT
|
1211c49c0c038cf45ac97d82ad211a94dfa3689a
|
d0c6659acc40f33cd2e629f88bdb7a9ec0d28123
|
refs/heads/master
| 2023-03-22T16:17:17.308577
| 2023-02-27T01:40:55
| 2023-02-27T01:40:55
| 243,353,135
| 0
| 0
|
MIT
| 2023-02-27T01:40:56
| 2020-02-26T19:56:56
|
Python
|
UTF-8
|
Python
| false
| false
| 6,337
|
py
|
# coding=utf-8
import unittest
from unittest.mock import patch
import sys
import logging
from Devices.tests.mocking.IndigoDevice import IndigoDevice
from Devices.tests.mocking.IndigoServer import Indigo
from Devices.tests.mocking.IndigoAction import IndigoAction
indigo = Indigo()
sys.modules['indigo'] = indigo
from Devices.Relays.Shelly_EM_Relay import Shelly_EM_Relay
class Test_Shelly_EM_Relay(unittest.TestCase):
def setUp(self):
indigo.__init__()
self.device = IndigoDevice(id=123456, name="New Device")
self.shelly = Shelly_EM_Relay(self.device)
logging.getLogger('Plugin.ShellyMQTT').addHandler(logging.NullHandler())
self.device.pluginProps['address'] = "shellies/shelly-em-test"
self.device.updateStateOnServer("sw-input", False)
self.device.updateStateOnServer("longpush", False)
self.device.updateStateOnServer("ip-address", None)
self.device.updateStateOnServer("mac-address", None)
self.device.updateStateOnServer("online", False)
def test_getSubscriptions_no_address(self):
self.device.pluginProps['address'] = None
self.assertListEqual([], self.shelly.getSubscriptions())
def test_getSubscriptions(self):
topics = [
"shellies/announce",
"shellies/shelly-em-test/online",
"shellies/shelly-em-test/relay/0"
]
self.assertListEqual(topics, self.shelly.getSubscriptions())
def test_handleMessage_relay_on(self):
self.assertTrue(self.shelly.isOff())
self.shelly.handleMessage("shellies/shelly-em-test/relay/0", "on")
self.assertTrue(self.shelly.isOn())
def test_handleMessage_relay_off(self):
self.shelly.turnOn()
self.assertTrue(self.shelly.isOn())
self.shelly.handleMessage("shellies/shelly-em-test/relay/0", "off")
self.assertTrue(self.shelly.isOff())
def test_handleMessage_announce(self):
announcement = '{"id": "shelly-em-test", "mac": "aa:bb:cc:dd", "ip": "192.168.1.100", "fw_ver": "0.0.0", "new_fw": false}'
self.shelly.handleMessage("shellies/announce", announcement)
self.assertEqual("aa:bb:cc:dd", self.shelly.device.states['mac-address'])
self.assertEqual("192.168.1.100", self.shelly.getIpAddress())
self.assertEqual("0.0.0", self.shelly.getFirmware())
self.assertFalse(self.shelly.updateAvailable())
def test_handleMessage_online_true(self):
self.shelly.device.states['online'] = False
self.assertFalse(self.shelly.device.states['online'])
self.shelly.handleMessage("shellies/shelly-em-test/online", "true")
self.assertTrue(self.shelly.device.states['online'])
def test_handleMessage_online_false(self):
self.shelly.device.states['online'] = True
self.assertTrue(self.shelly.device.states['online'])
self.shelly.handleMessage("shellies/shelly-em-test/online", "false")
self.assertFalse(self.shelly.device.states['online'])
@patch('Devices.Shelly.Shelly.publish')
def test_handleAction_turn_on(self, publish):
self.shelly.turnOff()
self.assertTrue(self.shelly.isOff())
turnOn = IndigoAction(indigo.kDeviceAction.TurnOn)
self.shelly.handleAction(turnOn)
self.assertTrue(self.shelly.isOn())
publish.assert_called_with("shellies/shelly-em-test/relay/0/command", "on")
@patch('Devices.Shelly.Shelly.publish')
def test_handleAction_turn_off(self, publish):
self.shelly.turnOn()
self.assertTrue(self.shelly.isOn())
turnOff = IndigoAction(indigo.kDeviceAction.TurnOff)
self.shelly.handleAction(turnOff)
self.assertTrue(self.shelly.isOff())
publish.assert_called_with("shellies/shelly-em-test/relay/0/command", "off")
@patch('Devices.Shelly.Shelly.publish')
def test_handleAction_status_request(self, publish):
statusRequest = IndigoAction(indigo.kDeviceAction.RequestStatus)
self.shelly.handleAction(statusRequest)
publish.assert_called_with("shellies/shelly-em-test/command", "update")
@patch('Devices.Shelly.Shelly.publish')
def test_handleAction_toggle_off_to_on(self, publish):
self.shelly.turnOff()
self.assertTrue(self.shelly.isOff())
toggle = IndigoAction(indigo.kDeviceAction.Toggle)
self.shelly.handleAction(toggle)
self.assertTrue(self.shelly.isOn())
publish.assert_called_with("shellies/shelly-em-test/relay/0/command", "on")
@patch('Devices.Shelly.Shelly.publish')
def test_handleAction_toggle_on_to_off(self, publish):
self.shelly.turnOn()
self.assertTrue(self.shelly.isOn())
toggle = IndigoAction(indigo.kDeviceAction.Toggle)
self.shelly.handleAction(toggle)
self.assertTrue(self.shelly.isOff())
publish.assert_called_with("shellies/shelly-em-test/relay/0/command", "off")
def test_validateConfigUI(self):
values = {
"broker-id": "12345",
"address": "some/address",
"message-type": "a-type",
"announce-message-type-same-as-message-type": True
}
isValid, valuesDict, errors = Shelly_EM_Relay.validateConfigUI(values, None, None)
self.assertTrue(isValid)
def test_validateConfigUI_announce_message_type(self):
values = {
"broker-id": "12345",
"address": "some/address",
"message-type": "a-type",
"announce-message-type-same-as-message-type": False,
"announce-message-type": "another-type"
}
isValid, valuesDict, errors = Shelly_EM_Relay.validateConfigUI(values, None, None)
self.assertTrue(isValid)
def test_validateConfigUI_invalid(self):
values = {
"broker-id": "",
"address": "",
"message-type": "",
"announce-message-type-same-as-message-type": False,
"announce-message-type": ""
}
isValid, valuesDict, errors = Shelly_EM_Relay.validateConfigUI(values, None, None)
self.assertFalse(isValid)
self.assertTrue("broker-id" in errors)
self.assertTrue("address" in errors)
self.assertTrue("message-type" in errors)
self.assertTrue("announce-message-type" in errors)
|
[
"aaronr@vt.edu"
] |
aaronr@vt.edu
|
14d3c676148de7b649fe5236285e0bed7a7a2b96
|
1a2e477c5beee5ec1fc42bc537f6b68b14e4a522
|
/main.py
|
5665624cf652e268e8673d0768eeba1728d9f4f4
|
[] |
no_license
|
alvendarthy/ast-lib-test
|
3ea25188b188122be55b4677d0f540f91bed53ec
|
9919525d66702b175c7e1a501f991a76665a57b0
|
refs/heads/master
| 2022-12-22T23:58:41.225765
| 2020-09-23T10:18:35
| 2020-09-23T10:18:35
| 297,928,816
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,387
|
py
|
import ast
add_arg = "test()"
with open("target.py") as f:
tree = ast.parse(f.read())
import astpretty
astpretty.pprint(tree)
class Visitor(ast.NodeVisitor):
def visit_Import(self, node):
for alias in node.names:
print("package {} imported.".format(alias.name))
def visit_Call(self, node):
print("calling func: ", node.func.id)
if node.func.id != "print":
return
print("origin args:")
args = node.args
for arg in args:
if isinstance(arg, ast.Constant):
print("type:", type(arg), arg.value)
else:
print("None static arg.")
print("set arg to 'hello world'")
args[0] = ast.Constant(s = "hello world", kind=None)
ast.fix_missing_locations(args[0])
print("add another arg")
last_index = len(args)
print("last index: ", last_index)
args.insert(last_index, ast.Constant(s = "!!!!!", kind=None))
ast.fix_missing_locations(args[last_index])
print("add test call one more time.")
node = ast.parse(add_arg).body[0].value
args.insert(last_index + 1, node)
ast.fix_missing_locations(args[last_index + 1])
v = Visitor()
v.visit(tree)
co = compile(tree, "log.log", "exec")
exec(co)
import astunparse
print(astunparse.unparse(tree))
|
[
"alvendarthy@gmail.com"
] |
alvendarthy@gmail.com
|
98846862a2f458e43bcc033b9ff40f5a7d1aecc2
|
21b02268b85e4be78a6c4e5c425f8950f3a8c982
|
/samples/adsense/get_all_ad_clients.py
|
ae76aa1235928967695657ebd68bff68915c55fd
|
[
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"Apache-2.0"
] |
permissive
|
skygate/GoogleApiPython3x
|
cd1148366d4d544c3e2215da25485ee73a272fff
|
c21bd6ae630f967ef65dc45e35ad69789991c819
|
refs/heads/master
| 2020-04-08T20:16:29.841694
| 2018-11-05T01:33:23
| 2018-11-05T01:33:23
| 31,593,687
| 3
| 2
|
NOASSERTION
| 2018-11-05T01:33:24
| 2015-03-03T10:52:40
|
Python
|
UTF-8
|
Python
| false
| false
| 1,889
|
py
|
#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all ad clients for the logged in user's default account.
Tags: adclients.list
"""
__author__ = 'jalc@google.com (Jose Alcerreca)'
import sys
from apiclient import sample_tools
from oauth2client import client
MAX_PAGE_SIZE = 50
def main(argv):
# Authenticate and construct service.
service, flags = sample_tools.init(
argv, 'adsense', 'v1.3', __doc__, __file__, parents=[],
scope='https://www.googleapis.com/auth/adsense.readonly')
try:
# Retrieve ad client list in pages and display data as we receive it.
request = service.adclients().list(maxResults=MAX_PAGE_SIZE)
while request is not None:
result = request.execute()
ad_clients = result['items']
for ad_client in ad_clients:
print ('Ad client for product "%s" with ID "%s" was found. '
% (ad_client['productCode'], ad_client['id']))
print ('\tSupports reporting: %s' %
(ad_client['supportsReporting'] and 'Yes' or 'No'))
request = service.adclients().list_next(request, result)
except client.AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run the '
'application to re-authorize')
if __name__ == '__main__':
main(sys.argv)
|
[
"fredrik.blomqvist.95@gmail.com"
] |
fredrik.blomqvist.95@gmail.com
|
d8df2cdf271b21d6f5cb826a38c36653ee3a0281
|
898d2e3d2b0b46818a8cb939407ae79e43446389
|
/gen/se/ttypes.py
|
2c5842bee6b465051d9758685e4863d58cf38c40
|
[] |
no_license
|
yinting123/Tools
|
158cfaddd577034c2955c18c37d406362f7dd812
|
150003ac3765246b7e7b1fdb8e98d0bc79d68b85
|
refs/heads/master
| 2021-01-19T10:08:02.910607
| 2017-07-26T12:13:23
| 2017-07-26T12:13:23
| 87,827,752
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| true
| 114,881
|
py
|
#
# Autogenerated by Thrift Compiler (0.9.2)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
import sys
sys.path.append('../')
sys.path.append('../gen/gen.cm/')
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import gen.cm.ttypes
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class InnerSearchType:
kRegionSearch = 1
kNearBySearch = 2
kBoundSearch = 3
kDetailSearch = 4
kStaticDigest = 5
_VALUES_TO_NAMES = {
1: "kRegionSearch",
2: "kNearBySearch",
3: "kBoundSearch",
4: "kDetailSearch",
5: "kStaticDigest",
}
_NAMES_TO_VALUES = {
"kRegionSearch": 1,
"kNearBySearch": 2,
"kBoundSearch": 3,
"kDetailSearch": 4,
"kStaticDigest": 5,
}
class FilterSearchType:
kFilterSearch = 1
_VALUES_TO_NAMES = {
1: "kFilterSearch",
}
_NAMES_TO_VALUES = {
"kFilterSearch": 1,
}
class StrategyMode:
kBoundTraitPriority = 1
kInvalidStrategyMode = 999
_VALUES_TO_NAMES = {
1: "kBoundTraitPriority",
999: "kInvalidStrategyMode",
}
_NAMES_TO_VALUES = {
"kBoundTraitPriority": 1,
"kInvalidStrategyMode": 999,
}
class TraitType:
kCommendTrait = 1
kInvalidTraitType = 999
_VALUES_TO_NAMES = {
1: "kCommendTrait",
999: "kInvalidTraitType",
}
_NAMES_TO_VALUES = {
"kCommendTrait": 1,
"kInvalidTraitType": 999,
}
class ListRequest:
"""
Attributes:
- region_id
- check_in_date
- check_out_date
- poi_id
- poi_str
- hotel_num
- hotel_name
- hotel_price_range
- hotel_star
- hotel_brand
- hotel_type
- hotel_facilitie
- rank_type
- rank_distance
- page_info
- hotel_distance
- session_id
- user_info
- filter_ota
- room_info
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'region_id', None, None, ), # 1
(2, TType.I64, 'check_in_date', None, None, ), # 2
(3, TType.I64, 'check_out_date', None, None, ), # 3
(4, TType.I32, 'poi_id', None, None, ), # 4
(5, TType.STRING, 'poi_str', None, None, ), # 5
(6, TType.I32, 'hotel_num', None, -1, ), # 6
(7, TType.STRING, 'hotel_name', None, None, ), # 7
(8, TType.LIST, 'hotel_price_range', (TType.STRUCT,(gen.cm.ttypes.PriceRange,gen.cm.ttypes.PriceRange.thrift_spec)), None, ), # 8
(9, TType.LIST, 'hotel_star', (TType.I32,None), None, ), # 9
(10, TType.LIST, 'hotel_brand', (TType.I32,None), None, ), # 10
(11, TType.LIST, 'hotel_type', (TType.I32,None), None, ), # 11
(12, TType.LIST, 'hotel_facilitie', (TType.I32,None), None, ), # 12
(13, TType.I32, 'rank_type', None, None, ), # 13
(14, TType.I32, 'rank_distance', None, None, ), # 14
None, # 15
(16, TType.STRUCT, 'page_info', (gen.cm.ttypes.PageInfo, gen.cm.ttypes.PageInfo.thrift_spec), None, ), # 16
(17, TType.I32, 'hotel_distance', None, None, ), # 17
(18, TType.STRING, 'session_id', None, None, ), # 18
(19, TType.STRUCT, 'user_info', (gen.cm.ttypes.UserInfo, gen.cm.ttypes.UserInfo.thrift_spec), None, ), # 19
(20, TType.I64, 'filter_ota', None, None, ), # 20
(21, TType.LIST, 'room_info', (TType.STRUCT,(gen.cm.ttypes.RoomInfo, gen.cm.ttypes.RoomInfo.thrift_spec)), None, ), # 21
)
def __init__(self, region_id=None, check_in_date=None, check_out_date=None, poi_id=None, poi_str=None, hotel_num=thrift_spec[6][4], hotel_name=None, hotel_price_range=None, hotel_star=None, hotel_brand=None, hotel_type=None, hotel_facilitie=None, rank_type=None, rank_distance=None, page_info=None, hotel_distance=None, session_id=None, user_info=None, filter_ota=None, room_info=None,):
self.region_id = region_id
self.check_in_date = check_in_date
self.check_out_date = check_out_date
self.poi_id = poi_id
self.poi_str = poi_str
self.hotel_num = hotel_num
self.hotel_name = hotel_name
self.hotel_price_range = hotel_price_range
self.hotel_star = hotel_star
self.hotel_brand = hotel_brand
self.hotel_type = hotel_type
self.hotel_facilitie = hotel_facilitie
self.rank_type = rank_type
self.rank_distance = rank_distance
self.page_info = page_info
self.hotel_distance = hotel_distance
self.session_id = session_id
self.user_info = user_info
self.filter_ota = filter_ota
self.room_info = room_info
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.region_id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.check_in_date = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.check_out_date = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.poi_id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.poi_str = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.hotel_num = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.hotel_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.LIST:
self.hotel_price_range = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in xrange(_size0):
_elem5 = gen.cm.ttypes.PriceRange()
_elem5.read(iprot)
self.hotel_price_range.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.LIST:
self.hotel_star = []
(_etype9, _size6) = iprot.readListBegin()
for _i10 in xrange(_size6):
_elem11 = iprot.readI32();
self.hotel_star.append(_elem11)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.LIST:
self.hotel_brand = []
(_etype15, _size12) = iprot.readListBegin()
for _i16 in xrange(_size12):
_elem17 = iprot.readI32();
self.hotel_brand.append(_elem17)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.LIST:
self.hotel_type = []
(_etype21, _size18) = iprot.readListBegin()
for _i22 in xrange(_size18):
_elem23 = iprot.readI32();
self.hotel_type.append(_elem23)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.LIST:
self.hotel_facilitie = []
(_etype27, _size24) = iprot.readListBegin()
for _i28 in xrange(_size24):
_elem29 = iprot.readI32();
self.hotel_facilitie.append(_elem29)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.I32:
self.rank_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.I32:
self.rank_distance = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.STRUCT:
self.page_info = gen.cm.ttypes.PageInfo()
self.page_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.I32:
self.hotel_distance = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.STRING:
self.session_id = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.STRUCT:
self.user_info = gen.cm.ttypes.UserInfo()
self.user_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.I64:
self.filter_ota = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 21:
if ftype == TType.LIST:
self.room_info = []
(_etype33, _size30) = iprot.readListBegin()
for _i34 in xrange(_size30):
_elem35 = gen.cm.ttypes.RoomInfo()
_elem35.read(iprot)
self.room_info.append(_elem35)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ListRequest')
if self.region_id is not None:
oprot.writeFieldBegin('region_id', TType.I32, 1)
oprot.writeI32(self.region_id)
oprot.writeFieldEnd()
if self.check_in_date is not None:
oprot.writeFieldBegin('check_in_date', TType.I64, 2)
oprot.writeI64(self.check_in_date)
oprot.writeFieldEnd()
if self.check_out_date is not None:
oprot.writeFieldBegin('check_out_date', TType.I64, 3)
oprot.writeI64(self.check_out_date)
oprot.writeFieldEnd()
if self.poi_id is not None:
oprot.writeFieldBegin('poi_id', TType.I32, 4)
oprot.writeI32(self.poi_id)
oprot.writeFieldEnd()
if self.poi_str is not None:
oprot.writeFieldBegin('poi_str', TType.STRING, 5)
oprot.writeString(self.poi_str)
oprot.writeFieldEnd()
if self.hotel_num is not None:
oprot.writeFieldBegin('hotel_num', TType.I32, 6)
oprot.writeI32(self.hotel_num)
oprot.writeFieldEnd()
if self.hotel_name is not None:
oprot.writeFieldBegin('hotel_name', TType.STRING, 7)
oprot.writeString(self.hotel_name)
oprot.writeFieldEnd()
if self.hotel_price_range is not None:
oprot.writeFieldBegin('hotel_price_range', TType.LIST, 8)
oprot.writeListBegin(TType.STRUCT, len(self.hotel_price_range))
for iter36 in self.hotel_price_range:
iter36.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.hotel_star is not None:
oprot.writeFieldBegin('hotel_star', TType.LIST, 9)
oprot.writeListBegin(TType.I32, len(self.hotel_star))
for iter37 in self.hotel_star:
oprot.writeI32(iter37)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.hotel_brand is not None:
oprot.writeFieldBegin('hotel_brand', TType.LIST, 10)
oprot.writeListBegin(TType.I32, len(self.hotel_brand))
for iter38 in self.hotel_brand:
oprot.writeI32(iter38)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.hotel_type is not None:
oprot.writeFieldBegin('hotel_type', TType.LIST, 11)
oprot.writeListBegin(TType.I32, len(self.hotel_type))
for iter39 in self.hotel_type:
oprot.writeI32(iter39)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.hotel_facilitie is not None:
oprot.writeFieldBegin('hotel_facilitie', TType.LIST, 12)
oprot.writeListBegin(TType.I32, len(self.hotel_facilitie))
for iter40 in self.hotel_facilitie:
oprot.writeI32(iter40)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.rank_type is not None:
oprot.writeFieldBegin('rank_type', TType.I32, 13)
oprot.writeI32(self.rank_type)
oprot.writeFieldEnd()
if self.rank_distance is not None:
oprot.writeFieldBegin('rank_distance', TType.I32, 14)
oprot.writeI32(self.rank_distance)
oprot.writeFieldEnd()
if self.page_info is not None:
oprot.writeFieldBegin('page_info', TType.STRUCT, 16)
self.page_info.write(oprot)
oprot.writeFieldEnd()
if self.hotel_distance is not None:
oprot.writeFieldBegin('hotel_distance', TType.I32, 17)
oprot.writeI32(self.hotel_distance)
oprot.writeFieldEnd()
if self.session_id is not None:
oprot.writeFieldBegin('session_id', TType.STRING, 18)
oprot.writeString(self.session_id)
oprot.writeFieldEnd()
if self.user_info is not None:
oprot.writeFieldBegin('user_info', TType.STRUCT, 19)
self.user_info.write(oprot)
oprot.writeFieldEnd()
if self.filter_ota is not None:
oprot.writeFieldBegin('filter_ota', TType.I64, 20)
oprot.writeI64(self.filter_ota)
oprot.writeFieldEnd()
if self.room_info is not None:
oprot.writeFieldBegin('room_info', TType.LIST, 21)
oprot.writeListBegin(TType.STRUCT, len(self.room_info))
for iter41 in self.room_info:
iter41.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.region_id)
value = (value * 31) ^ hash(self.check_in_date)
value = (value * 31) ^ hash(self.check_out_date)
value = (value * 31) ^ hash(self.poi_id)
value = (value * 31) ^ hash(self.poi_str)
value = (value * 31) ^ hash(self.hotel_num)
value = (value * 31) ^ hash(self.hotel_name)
value = (value * 31) ^ hash(self.hotel_price_range)
value = (value * 31) ^ hash(self.hotel_star)
value = (value * 31) ^ hash(self.hotel_brand)
value = (value * 31) ^ hash(self.hotel_type)
value = (value * 31) ^ hash(self.hotel_facilitie)
value = (value * 31) ^ hash(self.rank_type)
value = (value * 31) ^ hash(self.rank_distance)
value = (value * 31) ^ hash(self.page_info)
value = (value * 31) ^ hash(self.hotel_distance)
value = (value * 31) ^ hash(self.session_id)
value = (value * 31) ^ hash(self.user_info)
value = (value * 31) ^ hash(self.filter_ota)
value = (value * 31) ^ hash(self.room_info)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ListResponse:
"""
Attributes:
- search_id
- lowest_price
- result_hotel_num
- page_num
- hotel_list
- hotel_filter
- center_coordinate
- region_polygon
- region_info
- poi_info
- rank_type
- poi_lang
- status
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'search_id', None, None, ), # 1
(2, TType.I32, 'lowest_price', None, None, ), # 2
(3, TType.I32, 'result_hotel_num', None, None, ), # 3
(4, TType.I32, 'page_num', None, None, ), # 4
(5, TType.LIST, 'hotel_list', (TType.STRUCT,(gen.cm.ttypes.HotelInfoList, gen.cm.ttypes.HotelInfoList.thrift_spec)), None, ), # 5
(6, TType.STRUCT, 'hotel_filter', (gen.cm.ttypes.HotelFilter, gen.cm.ttypes.HotelFilter.thrift_spec), None, ), # 6
(7, TType.STRUCT, 'center_coordinate', (gen.cm.ttypes.GeoInfo, gen.cm.ttypes.GeoInfo.thrift_spec), None, ), # 7
(8, TType.LIST, 'region_polygon', (TType.STRUCT,(gen.cm.ttypes.GeoInfo, gen.cm.ttypes.GeoInfo.thrift_spec)), None, ), # 8
(9, TType.STRUCT, 'region_info', (gen.cm.ttypes.RegionInfo, gen.cm.ttypes.RegionInfo.thrift_spec), None, ), # 9
(10, TType.STRUCT, 'poi_info', (gen.cm.ttypes.POIData, gen.cm.ttypes.POIData.thrift_spec), None, ), # 10
(11, TType.I32, 'rank_type', None, None, ), # 11
(12, TType.I32, 'poi_lang', None, None, ), # 12
(13, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 13
)
def __init__(self, search_id=None, lowest_price=None, result_hotel_num=None, page_num=None, hotel_list=None, hotel_filter=None, center_coordinate=None, region_polygon=None, region_info=None, poi_info=None, rank_type=None, poi_lang=None, status=None,):
self.search_id = search_id
self.lowest_price = lowest_price
self.result_hotel_num = result_hotel_num
self.page_num = page_num
self.hotel_list = hotel_list
self.hotel_filter = hotel_filter
self.center_coordinate = center_coordinate
self.region_polygon = region_polygon
self.region_info = region_info
self.poi_info = poi_info
self.rank_type = rank_type
self.poi_lang = poi_lang
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.lowest_price = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.result_hotel_num = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.page_num = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.hotel_list = []
(_etype45, _size42) = iprot.readListBegin()
for _i46 in xrange(_size42):
_elem47 = gen.cm.ttypes.HotelInfoList()
_elem47.read(iprot)
self.hotel_list.append(_elem47)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.hotel_filter = gen.cm.ttypes.HotelFilter()
self.hotel_filter.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.center_coordinate = gen.cm.ttypes.GeoInfo()
self.center_coordinate.read(iprot)
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.LIST:
self.region_polygon = []
(_etype51, _size48) = iprot.readListBegin()
for _i52 in xrange(_size48):
_elem53 = gen.cm.ttypes.GeoInfo()
_elem53.read(iprot)
self.region_polygon.append(_elem53)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
self.region_info = gen.cm.ttypes.RegionInfo()
self.region_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.STRUCT:
self.poi_info = gen.cm.ttypes.POIData()
self.poi_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.I32:
self.rank_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.I32:
self.poi_lang = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ListResponse')
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 1)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.lowest_price is not None:
oprot.writeFieldBegin('lowest_price', TType.I32, 2)
oprot.writeI32(self.lowest_price)
oprot.writeFieldEnd()
if self.result_hotel_num is not None:
oprot.writeFieldBegin('result_hotel_num', TType.I32, 3)
oprot.writeI32(self.result_hotel_num)
oprot.writeFieldEnd()
if self.page_num is not None:
oprot.writeFieldBegin('page_num', TType.I32, 4)
oprot.writeI32(self.page_num)
oprot.writeFieldEnd()
if self.hotel_list is not None:
oprot.writeFieldBegin('hotel_list', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.hotel_list))
for iter54 in self.hotel_list:
iter54.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.hotel_filter is not None:
oprot.writeFieldBegin('hotel_filter', TType.STRUCT, 6)
self.hotel_filter.write(oprot)
oprot.writeFieldEnd()
if self.center_coordinate is not None:
oprot.writeFieldBegin('center_coordinate', TType.STRUCT, 7)
self.center_coordinate.write(oprot)
oprot.writeFieldEnd()
if self.region_polygon is not None:
oprot.writeFieldBegin('region_polygon', TType.LIST, 8)
oprot.writeListBegin(TType.STRUCT, len(self.region_polygon))
for iter55 in self.region_polygon:
iter55.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.region_info is not None:
oprot.writeFieldBegin('region_info', TType.STRUCT, 9)
self.region_info.write(oprot)
oprot.writeFieldEnd()
if self.poi_info is not None:
oprot.writeFieldBegin('poi_info', TType.STRUCT, 10)
self.poi_info.write(oprot)
oprot.writeFieldEnd()
if self.rank_type is not None:
oprot.writeFieldBegin('rank_type', TType.I32, 11)
oprot.writeI32(self.rank_type)
oprot.writeFieldEnd()
if self.poi_lang is not None:
oprot.writeFieldBegin('poi_lang', TType.I32, 12)
oprot.writeI32(self.poi_lang)
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 13)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.lowest_price)
value = (value * 31) ^ hash(self.result_hotel_num)
value = (value * 31) ^ hash(self.page_num)
value = (value * 31) ^ hash(self.hotel_list)
value = (value * 31) ^ hash(self.hotel_filter)
value = (value * 31) ^ hash(self.center_coordinate)
value = (value * 31) ^ hash(self.region_polygon)
value = (value * 31) ^ hash(self.region_info)
value = (value * 31) ^ hash(self.poi_info)
value = (value * 31) ^ hash(self.rank_type)
value = (value * 31) ^ hash(self.poi_lang)
value = (value * 31) ^ hash(self.status)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ListMapResponse:
"""
Attributes:
- search_id
- hotel_info
- status
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'search_id', None, None, ), # 1
(2, TType.LIST, 'hotel_info', (TType.STRUCT,(gen.cm.ttypes.HotelInfoMap, gen.cm.ttypes.HotelInfoMap.thrift_spec)), None, ), # 2
None, # 3
None, # 4
None, # 5
None, # 6
None, # 7
None, # 8
None, # 9
None, # 10
None, # 11
None, # 12
(13, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 13
)
def __init__(self, search_id=None, hotel_info=None, status=None,):
self.search_id = search_id
self.hotel_info = hotel_info
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.hotel_info = []
(_etype59, _size56) = iprot.readListBegin()
for _i60 in xrange(_size56):
_elem61 = gen.cm.ttypes.HotelInfoMap()
_elem61.read(iprot)
self.hotel_info.append(_elem61)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ListMapResponse')
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 1)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.hotel_info is not None:
oprot.writeFieldBegin('hotel_info', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.hotel_info))
for iter62 in self.hotel_info:
iter62.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 13)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.hotel_info)
value = (value * 31) ^ hash(self.status)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ListRtsRequest:
"""
Attributes:
- hotel_list
- check_in_date
- check_out_date
- search_id
- session_id
- user_info
- filter_ota
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'hotel_list', (TType.I64,None), None, ), # 1
(2, TType.I64, 'check_in_date', None, None, ), # 2
(3, TType.I64, 'check_out_date', None, None, ), # 3
(4, TType.I64, 'search_id', None, None, ), # 4
None, # 5
None, # 6
(7, TType.STRING, 'session_id', None, None, ), # 7
None, # 8
(9, TType.STRUCT, 'user_info', (gen.cm.ttypes.UserInfo, gen.cm.ttypes.UserInfo.thrift_spec), None, ), # 9
(10, TType.I64, 'filter_ota', None, 0, ), # 10
)
def __init__(self, hotel_list=None, check_in_date=None, check_out_date=None, search_id=None, session_id=None, user_info=None, filter_ota=thrift_spec[10][4],):
self.hotel_list = hotel_list
self.check_in_date = check_in_date
self.check_out_date = check_out_date
self.search_id = search_id
self.session_id = session_id
self.user_info = user_info
self.filter_ota = filter_ota
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.hotel_list = []
(_etype66, _size63) = iprot.readListBegin()
for _i67 in xrange(_size63):
_elem68 = iprot.readI64();
self.hotel_list.append(_elem68)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.check_in_date = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.check_out_date = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.session_id = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
self.user_info = gen.cm.ttypes.UserInfo()
self.user_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I64:
self.filter_ota = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ListRtsRequest')
if self.hotel_list is not None:
oprot.writeFieldBegin('hotel_list', TType.LIST, 1)
oprot.writeListBegin(TType.I64, len(self.hotel_list))
for iter69 in self.hotel_list:
oprot.writeI64(iter69)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.check_in_date is not None:
oprot.writeFieldBegin('check_in_date', TType.I64, 2)
oprot.writeI64(self.check_in_date)
oprot.writeFieldEnd()
if self.check_out_date is not None:
oprot.writeFieldBegin('check_out_date', TType.I64, 3)
oprot.writeI64(self.check_out_date)
oprot.writeFieldEnd()
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 4)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.session_id is not None:
oprot.writeFieldBegin('session_id', TType.STRING, 7)
oprot.writeString(self.session_id)
oprot.writeFieldEnd()
if self.user_info is not None:
oprot.writeFieldBegin('user_info', TType.STRUCT, 9)
self.user_info.write(oprot)
oprot.writeFieldEnd()
if self.filter_ota is not None:
oprot.writeFieldBegin('filter_ota', TType.I64, 10)
oprot.writeI64(self.filter_ota)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.hotel_list)
value = (value * 31) ^ hash(self.check_in_date)
value = (value * 31) ^ hash(self.check_out_date)
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.session_id)
value = (value * 31) ^ hash(self.user_info)
value = (value * 31) ^ hash(self.filter_ota)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ListRtsResponse:
"""
Attributes:
- search_id
- hotel_rts_info
- status
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'search_id', None, None, ), # 1
(2, TType.LIST, 'hotel_rts_info', (TType.STRUCT,(gen.cm.ttypes.HotelRtsInfo, gen.cm.ttypes.HotelRtsInfo.thrift_spec)), None, ), # 2
None, # 3
None, # 4
None, # 5
None, # 6
None, # 7
None, # 8
None, # 9
None, # 10
None, # 11
None, # 12
(13, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 13
)
def __init__(self, search_id=None, hotel_rts_info=None, status=None,):
self.search_id = search_id
self.hotel_rts_info = hotel_rts_info
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.hotel_rts_info = []
(_etype73, _size70) = iprot.readListBegin()
for _i74 in xrange(_size70):
_elem75 = gen.cm.ttypes.HotelRtsInfo()
_elem75.read(iprot)
self.hotel_rts_info.append(_elem75)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ListRtsResponse')
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 1)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.hotel_rts_info is not None:
oprot.writeFieldBegin('hotel_rts_info', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.hotel_rts_info))
for iter76 in self.hotel_rts_info:
iter76.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 13)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.hotel_rts_info)
value = (value * 31) ^ hash(self.status)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DetailRequest:
"""
Attributes:
- hotel_id
- check_in_date
- check_out_date
- region_str
- region_id
- session_id
- user_info
- filter_ota
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'hotel_id', None, None, ), # 1
(2, TType.I64, 'check_in_date', None, None, ), # 2
(3, TType.I64, 'check_out_date', None, None, ), # 3
(4, TType.STRING, 'region_str', None, None, ), # 4
(5, TType.I32, 'region_id', None, 0, ), # 5
None, # 6
(7, TType.STRING, 'session_id', None, None, ), # 7
None, # 8
(9, TType.STRUCT, 'user_info', (gen.cm.ttypes.UserInfo, gen.cm.ttypes.UserInfo.thrift_spec), None, ), # 9
(10, TType.I64, 'filter_ota', None, 0, ), # 10
)
def __init__(self, hotel_id=None, check_in_date=None, check_out_date=None, region_str=None, region_id=thrift_spec[5][4], session_id=None, user_info=None, filter_ota=thrift_spec[10][4],):
self.hotel_id = hotel_id
self.check_in_date = check_in_date
self.check_out_date = check_out_date
self.region_str = region_str
self.region_id = region_id
self.session_id = session_id
self.user_info = user_info
self.filter_ota = filter_ota
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.hotel_id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.check_in_date = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.check_out_date = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.region_str = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.region_id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.session_id = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
self.user_info = gen.cm.ttypes.UserInfo()
self.user_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I64:
self.filter_ota = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('DetailRequest')
if self.hotel_id is not None:
oprot.writeFieldBegin('hotel_id', TType.I32, 1)
oprot.writeI32(self.hotel_id)
oprot.writeFieldEnd()
if self.check_in_date is not None:
oprot.writeFieldBegin('check_in_date', TType.I64, 2)
oprot.writeI64(self.check_in_date)
oprot.writeFieldEnd()
if self.check_out_date is not None:
oprot.writeFieldBegin('check_out_date', TType.I64, 3)
oprot.writeI64(self.check_out_date)
oprot.writeFieldEnd()
if self.region_str is not None:
oprot.writeFieldBegin('region_str', TType.STRING, 4)
oprot.writeString(self.region_str)
oprot.writeFieldEnd()
if self.region_id is not None:
oprot.writeFieldBegin('region_id', TType.I32, 5)
oprot.writeI32(self.region_id)
oprot.writeFieldEnd()
if self.session_id is not None:
oprot.writeFieldBegin('session_id', TType.STRING, 7)
oprot.writeString(self.session_id)
oprot.writeFieldEnd()
if self.user_info is not None:
oprot.writeFieldBegin('user_info', TType.STRUCT, 9)
self.user_info.write(oprot)
oprot.writeFieldEnd()
if self.filter_ota is not None:
oprot.writeFieldBegin('filter_ota', TType.I64, 10)
oprot.writeI64(self.filter_ota)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.hotel_id)
value = (value * 31) ^ hash(self.check_in_date)
value = (value * 31) ^ hash(self.check_out_date)
value = (value * 31) ^ hash(self.region_str)
value = (value * 31) ^ hash(self.region_id)
value = (value * 31) ^ hash(self.session_id)
value = (value * 31) ^ hash(self.user_info)
value = (value * 31) ^ hash(self.filter_ota)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DetailResponse:
"""
Attributes:
- search_id
- hotel_info
- provider_detail
- region_info
- status
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'search_id', None, None, ), # 1
(2, TType.STRUCT, 'hotel_info', (gen.cm.ttypes.HotelInfoDetail, gen.cm.ttypes.HotelInfoDetail.thrift_spec), None, ), # 2
(3, TType.LIST, 'provider_detail', (TType.STRUCT,(gen.cm.ttypes.ProviderDetail, gen.cm.ttypes.ProviderDetail.thrift_spec)), None, ), # 3
(4, TType.STRUCT, 'region_info', (gen.cm.ttypes.RegionInfo, gen.cm.ttypes.RegionInfo.thrift_spec), None, ), # 4
None, # 5
None, # 6
None, # 7
None, # 8
None, # 9
None, # 10
None, # 11
None, # 12
(13, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 13
)
def __init__(self, search_id=None, hotel_info=None, provider_detail=None, region_info=None, status=None,):
self.search_id = search_id
self.hotel_info = hotel_info
self.provider_detail = provider_detail
self.region_info = region_info
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.hotel_info = gen.cm.ttypes.HotelInfoDetail()
self.hotel_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.provider_detail = []
(_etype80, _size77) = iprot.readListBegin()
for _i81 in xrange(_size77):
_elem82 = gen.cm.ttypes.ProviderDetail()
_elem82.read(iprot)
self.provider_detail.append(_elem82)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.region_info = gen.cm.ttypes.RegionInfo()
self.region_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('DetailResponse')
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 1)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.hotel_info is not None:
oprot.writeFieldBegin('hotel_info', TType.STRUCT, 2)
self.hotel_info.write(oprot)
oprot.writeFieldEnd()
if self.provider_detail is not None:
oprot.writeFieldBegin('provider_detail', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.provider_detail))
for iter83 in self.provider_detail:
iter83.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.region_info is not None:
oprot.writeFieldBegin('region_info', TType.STRUCT, 4)
self.region_info.write(oprot)
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 13)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.hotel_info)
value = (value * 31) ^ hash(self.provider_detail)
value = (value * 31) ^ hash(self.region_info)
value = (value * 31) ^ hash(self.status)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HotHotelResponse:
"""
Attributes:
- search_id
- hot_hotel
- status
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'search_id', None, None, ), # 1
(2, TType.LIST, 'hot_hotel', (TType.STRUCT,(gen.cm.ttypes.HotHotelInfo, gen.cm.ttypes.HotHotelInfo.thrift_spec)), None, ), # 2
None, # 3
None, # 4
None, # 5
None, # 6
None, # 7
None, # 8
None, # 9
None, # 10
None, # 11
None, # 12
(13, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 13
)
def __init__(self, search_id=None, hot_hotel=None, status=None,):
self.search_id = search_id
self.hot_hotel = hot_hotel
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.hot_hotel = []
(_etype87, _size84) = iprot.readListBegin()
for _i88 in xrange(_size84):
_elem89 = gen.cm.ttypes.HotHotelInfo()
_elem89.read(iprot)
self.hot_hotel.append(_elem89)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('HotHotelResponse')
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 1)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.hot_hotel is not None:
oprot.writeFieldBegin('hot_hotel', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.hot_hotel))
for iter90 in self.hot_hotel:
iter90.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 13)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.hot_hotel)
value = (value * 31) ^ hash(self.status)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DetailRtsRequest:
"""
Attributes:
- hotel_id
- check_in_date
- check_out_date
- search_id
- provider_list
- session_id
- user_info
- filter_ota
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'hotel_id', None, None, ), # 1
(2, TType.I64, 'check_in_date', None, None, ), # 2
(3, TType.I64, 'check_out_date', None, None, ), # 3
(4, TType.I64, 'search_id', None, None, ), # 4
(5, TType.LIST, 'provider_list', (TType.I64,None), None, ), # 5
None, # 6
None, # 7
(8, TType.STRING, 'session_id', None, None, ), # 8
(9, TType.STRUCT, 'user_info', (gen.cm.ttypes.UserInfo, gen.cm.ttypes.UserInfo.thrift_spec), None, ), # 9
(10, TType.I64, 'filter_ota', None, 0, ), # 10
)
def __init__(self, hotel_id=None, check_in_date=None, check_out_date=None, search_id=None, provider_list=None, session_id=None, user_info=None, filter_ota=thrift_spec[10][4],):
self.hotel_id = hotel_id
self.check_in_date = check_in_date
self.check_out_date = check_out_date
self.search_id = search_id
self.provider_list = provider_list
self.session_id = session_id
self.user_info = user_info
self.filter_ota = filter_ota
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.hotel_id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.check_in_date = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.check_out_date = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.provider_list = []
(_etype94, _size91) = iprot.readListBegin()
for _i95 in xrange(_size91):
_elem96 = iprot.readI64();
self.provider_list.append(_elem96)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.session_id = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
self.user_info = gen.cm.ttypes.UserInfo()
self.user_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I64:
self.filter_ota = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('DetailRtsRequest')
if self.hotel_id is not None:
oprot.writeFieldBegin('hotel_id', TType.I32, 1)
oprot.writeI32(self.hotel_id)
oprot.writeFieldEnd()
if self.check_in_date is not None:
oprot.writeFieldBegin('check_in_date', TType.I64, 2)
oprot.writeI64(self.check_in_date)
oprot.writeFieldEnd()
if self.check_out_date is not None:
oprot.writeFieldBegin('check_out_date', TType.I64, 3)
oprot.writeI64(self.check_out_date)
oprot.writeFieldEnd()
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 4)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.provider_list is not None:
oprot.writeFieldBegin('provider_list', TType.LIST, 5)
oprot.writeListBegin(TType.I64, len(self.provider_list))
for iter97 in self.provider_list:
oprot.writeI64(iter97)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.session_id is not None:
oprot.writeFieldBegin('session_id', TType.STRING, 8)
oprot.writeString(self.session_id)
oprot.writeFieldEnd()
if self.user_info is not None:
oprot.writeFieldBegin('user_info', TType.STRUCT, 9)
self.user_info.write(oprot)
oprot.writeFieldEnd()
if self.filter_ota is not None:
oprot.writeFieldBegin('filter_ota', TType.I64, 10)
oprot.writeI64(self.filter_ota)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.hotel_id)
value = (value * 31) ^ hash(self.check_in_date)
value = (value * 31) ^ hash(self.check_out_date)
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.provider_list)
value = (value * 31) ^ hash(self.session_id)
value = (value * 31) ^ hash(self.user_info)
value = (value * 31) ^ hash(self.filter_ota)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DetailRtsResponse:
"""
Attributes:
- search_id
- provider_detail
- session_id
- status
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'search_id', None, None, ), # 1
(2, TType.LIST, 'provider_detail', (TType.STRUCT,(gen.cm.ttypes.ProviderDetailInc, gen.cm.ttypes.ProviderDetailInc.thrift_spec)), None, ), # 2
None, # 3
None, # 4
None, # 5
None, # 6
(7, TType.STRING, 'session_id', None, None, ), # 7
None, # 8
None, # 9
None, # 10
None, # 11
None, # 12
(13, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 13
)
def __init__(self, search_id=None, provider_detail=None, session_id=None, status=None,):
self.search_id = search_id
self.provider_detail = provider_detail
self.session_id = session_id
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.provider_detail = []
(_etype101, _size98) = iprot.readListBegin()
for _i102 in xrange(_size98):
_elem103 = gen.cm.ttypes.ProviderDetailInc()
_elem103.read(iprot)
self.provider_detail.append(_elem103)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.session_id = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('DetailRtsResponse')
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 1)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.provider_detail is not None:
oprot.writeFieldBegin('provider_detail', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.provider_detail))
for iter104 in self.provider_detail:
iter104.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.session_id is not None:
oprot.writeFieldBegin('session_id', TType.STRING, 7)
oprot.writeString(self.session_id)
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 13)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.provider_detail)
value = (value * 31) ^ hash(self.session_id)
value = (value * 31) ^ hash(self.status)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class FastFilterInfo:
"""
Attributes:
- type_id
- keyword_cn
- keyword_en
- count
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'type_id', None, None, ), # 1
(2, TType.STRING, 'keyword_cn', None, None, ), # 2
(3, TType.STRING, 'keyword_en', None, None, ), # 3
(4, TType.I32, 'count', None, None, ), # 4
)
def __init__(self, type_id=None, keyword_cn=None, keyword_en=None, count=None,):
self.type_id = type_id
self.keyword_cn = keyword_cn
self.keyword_en = keyword_en
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type_id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.keyword_cn = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.keyword_en = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('FastFilterInfo')
if self.type_id is not None:
oprot.writeFieldBegin('type_id', TType.I32, 1)
oprot.writeI32(self.type_id)
oprot.writeFieldEnd()
if self.keyword_cn is not None:
oprot.writeFieldBegin('keyword_cn', TType.STRING, 2)
oprot.writeString(self.keyword_cn)
oprot.writeFieldEnd()
if self.keyword_en is not None:
oprot.writeFieldBegin('keyword_en', TType.STRING, 3)
oprot.writeString(self.keyword_en)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 4)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.type_id)
value = (value * 31) ^ hash(self.keyword_cn)
value = (value * 31) ^ hash(self.keyword_en)
value = (value * 31) ^ hash(self.count)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HotHotelRequest:
"""
Attributes:
- region_id
- user_info
- session_id
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'region_id', (TType.I32,None), None, ), # 1
(2, TType.STRUCT, 'user_info', (gen.cm.ttypes.UserInfo, gen.cm.ttypes.UserInfo.thrift_spec), None, ), # 2
None, # 3
None, # 4
None, # 5
None, # 6
(7, TType.STRING, 'session_id', None, None, ), # 7
)
def __init__(self, region_id=None, user_info=None, session_id=None,):
self.region_id = region_id
self.user_info = user_info
self.session_id = session_id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.region_id = []
(_etype108, _size105) = iprot.readListBegin()
for _i109 in xrange(_size105):
_elem110 = iprot.readI32();
self.region_id.append(_elem110)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.user_info = gen.cm.ttypes.UserInfo()
self.user_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.session_id = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('HotHotelRequest')
if self.region_id is not None:
oprot.writeFieldBegin('region_id', TType.LIST, 1)
oprot.writeListBegin(TType.I32, len(self.region_id))
for iter111 in self.region_id:
oprot.writeI32(iter111)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.user_info is not None:
oprot.writeFieldBegin('user_info', TType.STRUCT, 2)
self.user_info.write(oprot)
oprot.writeFieldEnd()
if self.session_id is not None:
oprot.writeFieldBegin('session_id', TType.STRING, 7)
oprot.writeString(self.session_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.region_id)
value = (value * 31) ^ hash(self.user_info)
value = (value * 31) ^ hash(self.session_id)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NearbyRequest:
"""
Attributes:
- hotel_id
- region_id
- session_id
- user_info
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'hotel_id', None, None, ), # 1
(2, TType.I32, 'region_id', None, None, ), # 2
None, # 3
None, # 4
None, # 5
None, # 6
(7, TType.STRING, 'session_id', None, None, ), # 7
None, # 8
(9, TType.STRUCT, 'user_info', (gen.cm.ttypes.UserInfo, gen.cm.ttypes.UserInfo.thrift_spec), None, ), # 9
)
def __init__(self, hotel_id=None, region_id=None, session_id=None, user_info=None,):
self.hotel_id = hotel_id
self.region_id = region_id
self.session_id = session_id
self.user_info = user_info
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.hotel_id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.region_id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.session_id = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
self.user_info = gen.cm.ttypes.UserInfo()
self.user_info.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NearbyRequest')
if self.hotel_id is not None:
oprot.writeFieldBegin('hotel_id', TType.I32, 1)
oprot.writeI32(self.hotel_id)
oprot.writeFieldEnd()
if self.region_id is not None:
oprot.writeFieldBegin('region_id', TType.I32, 2)
oprot.writeI32(self.region_id)
oprot.writeFieldEnd()
if self.session_id is not None:
oprot.writeFieldBegin('session_id', TType.STRING, 7)
oprot.writeString(self.session_id)
oprot.writeFieldEnd()
if self.user_info is not None:
oprot.writeFieldBegin('user_info', TType.STRUCT, 9)
self.user_info.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.hotel_id)
value = (value * 31) ^ hash(self.region_id)
value = (value * 31) ^ hash(self.session_id)
value = (value * 31) ^ hash(self.user_info)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NearbyResponse:
"""
Attributes:
- search_id
- hotel_name_cn
- hotel_name_en
- poi
- status
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'search_id', None, None, ), # 1
(2, TType.STRING, 'hotel_name_cn', None, None, ), # 2
(3, TType.STRING, 'hotel_name_en', None, None, ), # 3
(4, TType.LIST, 'poi', (TType.STRUCT,(gen.cm.ttypes.NearbyPOI, gen.cm.ttypes.NearbyPOI.thrift_spec)), None, ), # 4
None, # 5
None, # 6
None, # 7
None, # 8
None, # 9
None, # 10
None, # 11
None, # 12
(13, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 13
)
def __init__(self, search_id=None, hotel_name_cn=None, hotel_name_en=None, poi=None, status=None,):
self.search_id = search_id
self.hotel_name_cn = hotel_name_cn
self.hotel_name_en = hotel_name_en
self.poi = poi
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.search_id = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.hotel_name_cn = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.hotel_name_en = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.poi = []
(_etype115, _size112) = iprot.readListBegin()
for _i116 in xrange(_size112):
_elem117 = gen.cm.ttypes.NearbyPOI()
_elem117.read(iprot)
self.poi.append(_elem117)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NearbyResponse')
if self.search_id is not None:
oprot.writeFieldBegin('search_id', TType.I64, 1)
oprot.writeI64(self.search_id)
oprot.writeFieldEnd()
if self.hotel_name_cn is not None:
oprot.writeFieldBegin('hotel_name_cn', TType.STRING, 2)
oprot.writeString(self.hotel_name_cn)
oprot.writeFieldEnd()
if self.hotel_name_en is not None:
oprot.writeFieldBegin('hotel_name_en', TType.STRING, 3)
oprot.writeString(self.hotel_name_en)
oprot.writeFieldEnd()
if self.poi is not None:
oprot.writeFieldBegin('poi', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.poi))
for iter118 in self.poi:
iter118.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 13)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.search_id)
value = (value * 31) ^ hash(self.hotel_name_cn)
value = (value * 31) ^ hash(self.hotel_name_en)
value = (value * 31) ^ hash(self.poi)
value = (value * 31) ^ hash(self.status)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InnerSearchRequest:
"""
Attributes:
- inner_search_type
- hotel_attr
- room_attr
- product_attr
- geo_attr
- customer_attr
- page_rank_attr
- caller_attr
- return_attr
- user_info
- filter_attr
- rec_attr
- fastfilter_attr
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'inner_search_type', None, None, ), # 1
(2, TType.STRUCT, 'hotel_attr', (gen.cm.ttypes.HotelAttribute, gen.cm.ttypes.HotelAttribute.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'room_attr', (gen.cm.ttypes.RoomAttribute, gen.cm.ttypes.RoomAttribute.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'product_attr', (gen.cm.ttypes.ProductAttribute, gen.cm.ttypes.ProductAttribute.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'geo_attr', (gen.cm.ttypes.GeoAttribute, gen.cm.ttypes.GeoAttribute.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'customer_attr', (gen.cm.ttypes.CustomerAttribute, gen.cm.ttypes.CustomerAttribute.thrift_spec), None, ), # 6
(7, TType.STRUCT, 'page_rank_attr', (gen.cm.ttypes.PageRankAttribute, gen.cm.ttypes.PageRankAttribute.thrift_spec), None, ), # 7
(8, TType.STRUCT, 'caller_attr', (gen.cm.ttypes.CallerAttribute, gen.cm.ttypes.CallerAttribute.thrift_spec), None, ), # 8
(9, TType.STRUCT, 'return_attr', (gen.cm.ttypes.ReturnAttribute, gen.cm.ttypes.ReturnAttribute.thrift_spec), None, ), # 9
(10, TType.STRUCT, 'user_info', (gen.cm.ttypes.UserInfo, gen.cm.ttypes.UserInfo.thrift_spec), None, ), # 10
(11, TType.STRUCT, 'filter_attr', (gen.cm.ttypes.FilterAttribute, gen.cm.ttypes.FilterAttribute.thrift_spec), None, ), # 11
(12, TType.STRUCT, 'rec_attr', (gen.cm.ttypes.RecommendAttribute, gen.cm.ttypes.RecommendAttribute.thrift_spec), None, ), # 12
(13, TType.STRUCT, 'fastfilter_attr', (gen.cm.ttypes.FastFilterAttribute, gen.cm.ttypes.FastFilterAttribute.thrift_spec), None, ), # 13
)
def __init__(self, inner_search_type=None, hotel_attr=None, room_attr=None, product_attr=None, geo_attr=None, customer_attr=None, page_rank_attr=None, caller_attr=None, return_attr=None, user_info=None, filter_attr=None, rec_attr=None, fastfilter_attr=None,):
self.inner_search_type = inner_search_type
self.hotel_attr = hotel_attr
self.room_attr = room_attr
self.product_attr = product_attr
self.geo_attr = geo_attr
self.customer_attr = customer_attr
self.page_rank_attr = page_rank_attr
self.caller_attr = caller_attr
self.return_attr = return_attr
self.user_info = user_info
self.filter_attr = filter_attr
self.rec_attr = rec_attr
self.fastfilter_attr = fastfilter_attr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.inner_search_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.hotel_attr = gen.cm.ttypes.HotelAttribute()
self.hotel_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.room_attr = gen.cm.ttypes.RoomAttribute()
self.room_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.product_attr = gen.cm.ttypes.ProductAttribute()
self.product_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.geo_attr = gen.cm.ttypes.GeoAttribute()
self.geo_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.customer_attr = gen.cm.ttypes.CustomerAttribute()
self.customer_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.page_rank_attr = gen.cm.ttypes.PageRankAttribute()
self.page_rank_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRUCT:
self.caller_attr = gen.cm.ttypes.CallerAttribute()
self.caller_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
self.return_attr = gen.cm.ttypes.ReturnAttribute()
self.return_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.STRUCT:
self.user_info = gen.cm.ttypes.UserInfo()
self.user_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.STRUCT:
self.filter_attr = gen.cm.ttypes.FilterAttribute()
self.filter_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.STRUCT:
self.rec_attr = gen.cm.ttypes.RecommendAttribute()
self.rec_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
self.fastfilter_attr = gen.cm.ttypes.FastFilterAttribute()
self.fastfilter_attr.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('InnerSearchRequest')
if self.inner_search_type is not None:
oprot.writeFieldBegin('inner_search_type', TType.I32, 1)
oprot.writeI32(self.inner_search_type)
oprot.writeFieldEnd()
if self.hotel_attr is not None:
oprot.writeFieldBegin('hotel_attr', TType.STRUCT, 2)
self.hotel_attr.write(oprot)
oprot.writeFieldEnd()
if self.room_attr is not None:
oprot.writeFieldBegin('room_attr', TType.STRUCT, 3)
self.room_attr.write(oprot)
oprot.writeFieldEnd()
if self.product_attr is not None:
oprot.writeFieldBegin('product_attr', TType.STRUCT, 4)
self.product_attr.write(oprot)
oprot.writeFieldEnd()
if self.geo_attr is not None:
oprot.writeFieldBegin('geo_attr', TType.STRUCT, 5)
self.geo_attr.write(oprot)
oprot.writeFieldEnd()
if self.customer_attr is not None:
oprot.writeFieldBegin('customer_attr', TType.STRUCT, 6)
self.customer_attr.write(oprot)
oprot.writeFieldEnd()
if self.page_rank_attr is not None:
oprot.writeFieldBegin('page_rank_attr', TType.STRUCT, 7)
self.page_rank_attr.write(oprot)
oprot.writeFieldEnd()
if self.caller_attr is not None:
oprot.writeFieldBegin('caller_attr', TType.STRUCT, 8)
self.caller_attr.write(oprot)
oprot.writeFieldEnd()
if self.return_attr is not None:
oprot.writeFieldBegin('return_attr', TType.STRUCT, 9)
self.return_attr.write(oprot)
oprot.writeFieldEnd()
if self.user_info is not None:
oprot.writeFieldBegin('user_info', TType.STRUCT, 10)
self.user_info.write(oprot)
oprot.writeFieldEnd()
if self.filter_attr is not None:
oprot.writeFieldBegin('filter_attr', TType.STRUCT, 11)
self.filter_attr.write(oprot)
oprot.writeFieldEnd()
if self.rec_attr is not None:
oprot.writeFieldBegin('rec_attr', TType.STRUCT, 12)
self.rec_attr.write(oprot)
oprot.writeFieldEnd()
if self.fastfilter_attr is not None:
oprot.writeFieldBegin('fastfilter_attr', TType.STRUCT, 13)
self.fastfilter_attr.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.inner_search_type)
value = (value * 31) ^ hash(self.hotel_attr)
value = (value * 31) ^ hash(self.room_attr)
value = (value * 31) ^ hash(self.product_attr)
value = (value * 31) ^ hash(self.geo_attr)
value = (value * 31) ^ hash(self.customer_attr)
value = (value * 31) ^ hash(self.page_rank_attr)
value = (value * 31) ^ hash(self.caller_attr)
value = (value * 31) ^ hash(self.return_attr)
value = (value * 31) ^ hash(self.user_info)
value = (value * 31) ^ hash(self.filter_attr)
value = (value * 31) ^ hash(self.rec_attr)
value = (value * 31) ^ hash(self.fastfilter_attr)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InnerSearchResponse:
"""
Attributes:
- status
- total
- count
- page_size
- page_index
- hotels_details
- statistics
- query_parse_result
- min_weifang_price
- promotion_group
- debug_info
- filter_result
- fast_filter_info
- debug_response
- filter_list
- rec_response
- common_conf
- user_track
- grandson
- discounts
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 1
(2, TType.I32, 'total', None, None, ), # 2
(3, TType.I32, 'count', None, None, ), # 3
(4, TType.I32, 'page_size', None, None, ), # 4
(5, TType.I32, 'page_index', None, None, ), # 5
(6, TType.LIST, 'hotels_details', (TType.STRUCT,(gen.cm.ttypes.HotelDetail, gen.cm.ttypes.HotelDetail.thrift_spec)), None, ), # 6
(7, TType.STRUCT, 'statistics', (gen.cm.ttypes.Statistics, gen.cm.ttypes.Statistics.thrift_spec), None, ), # 7
(8, TType.STRUCT, 'query_parse_result', (gen.cm.ttypes.QueryParseResult, gen.cm.ttypes.QueryParseResult.thrift_spec), None, ), # 8
(9, TType.I32, 'min_weifang_price', None, None, ), # 9
(10, TType.LIST, 'promotion_group', (TType.STRUCT,(gen.cm.ttypes.PromotionGroup, gen.cm.ttypes.PromotionGroup.thrift_spec)), None, ), # 10
None, # 11
(12, TType.STRUCT, 'filter_result', (gen.cm.ttypes.FilterResult, gen.cm.ttypes.FilterResult.thrift_spec), None, ), # 12
(13, TType.LIST, 'fast_filter_info', (TType.STRUCT,(FastFilterInfo, FastFilterInfo.thrift_spec)), None, ), # 13
None, # 14
None, # 15
None, # 16
None, # 17
None, # 18
None, # 19
None, # 20
(21, TType.STRING, 'debug_info', None, None, ), # 21
(22, TType.LIST, 'debug_response', (TType.STRUCT,(gen.cm.ttypes.DebugResponse, gen.cm.ttypes.DebugResponse.thrift_spec)), None, ), # 22
(23, TType.LIST, 'filter_list', (TType.STRUCT,(gen.cm.ttypes.FilterList, gen.cm.ttypes.FilterList.thrift_spec)), None, ), # 23
(24, TType.STRUCT, 'rec_response', (gen.cm.ttypes.Recommend, gen.cm.ttypes.Recommend.thrift_spec), None, ), # 24
(25, TType.STRUCT, 'common_conf', (gen.cm.ttypes.CommonConf, gen.cm.ttypes.CommonConf.thrift_spec), None, ), # 25
(26, TType.STRUCT, 'user_track', (gen.cm.ttypes.UserTrack, gen.cm.ttypes.UserTrack.thrift_spec), None, ), # 26
(27, TType.I32, 'grandson', None, None, ), # 27
(28, TType.LIST, 'discounts', (TType.DOUBLE,None), None, ), # 28
)
def __init__(self, status=None, total=None, count=None, page_size=None, page_index=None, hotels_details=None, statistics=None, query_parse_result=None, min_weifang_price=None, promotion_group=None, debug_info=None, filter_result=None, fast_filter_info=None, debug_response=None, filter_list=None, rec_response=None, common_conf=None, user_track=None, grandson=None, discounts=None,):
self.status = status
self.total = total
self.count = count
self.page_size = page_size
self.page_index = page_index
self.hotels_details = hotels_details
self.statistics = statistics
self.query_parse_result = query_parse_result
self.min_weifang_price = min_weifang_price
self.promotion_group = promotion_group
self.debug_info = debug_info
self.filter_result = filter_result
self.fast_filter_info = fast_filter_info
self.debug_response = debug_response
self.filter_list = filter_list
self.rec_response = rec_response
self.common_conf = common_conf
self.user_track = user_track
self.grandson = grandson
self.discounts = discounts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.total = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.page_size = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.page_index = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.hotels_details = []
(_etype122, _size119) = iprot.readListBegin()
for _i123 in xrange(_size119):
_elem124 = gen.cm.ttypes.HotelDetail()
_elem124.read(iprot)
self.hotels_details.append(_elem124)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.statistics = gen.cm.ttypes.Statistics()
self.statistics.read(iprot)
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRUCT:
self.query_parse_result = gen.cm.ttypes.QueryParseResult()
self.query_parse_result.read(iprot)
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.I32:
self.min_weifang_price = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.LIST:
self.promotion_group = []
(_etype128, _size125) = iprot.readListBegin()
for _i129 in xrange(_size125):
_elem130 = gen.cm.ttypes.PromotionGroup()
_elem130.read(iprot)
self.promotion_group.append(_elem130)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 21:
if ftype == TType.STRING:
self.debug_info = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.STRUCT:
self.filter_result = gen.cm.ttypes.FilterResult()
self.filter_result.read(iprot)
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.LIST:
self.fast_filter_info = []
(_etype134, _size131) = iprot.readListBegin()
for _i135 in xrange(_size131):
_elem136 = FastFilterInfo()
_elem136.read(iprot)
self.fast_filter_info.append(_elem136)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 22:
if ftype == TType.LIST:
self.debug_response = []
(_etype140, _size137) = iprot.readListBegin()
for _i141 in xrange(_size137):
_elem142 = gen.cm.ttypes.DebugResponse()
_elem142.read(iprot)
self.debug_response.append(_elem142)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 23:
if ftype == TType.LIST:
self.filter_list = []
(_etype146, _size143) = iprot.readListBegin()
for _i147 in xrange(_size143):
_elem148 = gen.cm.ttypes.FilterList()
_elem148.read(iprot)
self.filter_list.append(_elem148)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 24:
if ftype == TType.STRUCT:
self.rec_response = gen.cm.ttypes.Recommend()
self.rec_response.read(iprot)
else:
iprot.skip(ftype)
elif fid == 25:
if ftype == TType.STRUCT:
self.common_conf = gen.cm.ttypes.CommonConf()
self.common_conf.read(iprot)
else:
iprot.skip(ftype)
elif fid == 26:
if ftype == TType.STRUCT:
self.user_track = gen.cm.ttypes.UserTrack()
self.user_track.read(iprot)
else:
iprot.skip(ftype)
elif fid == 27:
if ftype == TType.I32:
self.grandson = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 28:
if ftype == TType.LIST:
self.discounts = []
(_etype152, _size149) = iprot.readListBegin()
for _i153 in xrange(_size149):
_elem154 = iprot.readDouble();
self.discounts.append(_elem154)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('InnerSearchResponse')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.total is not None:
oprot.writeFieldBegin('total', TType.I32, 2)
oprot.writeI32(self.total)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 3)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
if self.page_size is not None:
oprot.writeFieldBegin('page_size', TType.I32, 4)
oprot.writeI32(self.page_size)
oprot.writeFieldEnd()
if self.page_index is not None:
oprot.writeFieldBegin('page_index', TType.I32, 5)
oprot.writeI32(self.page_index)
oprot.writeFieldEnd()
if self.hotels_details is not None:
oprot.writeFieldBegin('hotels_details', TType.LIST, 6)
oprot.writeListBegin(TType.STRUCT, len(self.hotels_details))
for iter155 in self.hotels_details:
iter155.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.statistics is not None:
oprot.writeFieldBegin('statistics', TType.STRUCT, 7)
self.statistics.write(oprot)
oprot.writeFieldEnd()
if self.query_parse_result is not None:
oprot.writeFieldBegin('query_parse_result', TType.STRUCT, 8)
self.query_parse_result.write(oprot)
oprot.writeFieldEnd()
if self.min_weifang_price is not None:
oprot.writeFieldBegin('min_weifang_price', TType.I32, 9)
oprot.writeI32(self.min_weifang_price)
oprot.writeFieldEnd()
if self.promotion_group is not None:
oprot.writeFieldBegin('promotion_group', TType.LIST, 10)
oprot.writeListBegin(TType.STRUCT, len(self.promotion_group))
for iter156 in self.promotion_group:
iter156.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.filter_result is not None:
oprot.writeFieldBegin('filter_result', TType.STRUCT, 12)
self.filter_result.write(oprot)
oprot.writeFieldEnd()
if self.fast_filter_info is not None:
oprot.writeFieldBegin('fast_filter_info', TType.LIST, 13)
oprot.writeListBegin(TType.STRUCT, len(self.fast_filter_info))
for iter157 in self.fast_filter_info:
iter157.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.debug_info is not None:
oprot.writeFieldBegin('debug_info', TType.STRING, 21)
oprot.writeString(self.debug_info)
oprot.writeFieldEnd()
if self.debug_response is not None:
oprot.writeFieldBegin('debug_response', TType.LIST, 22)
oprot.writeListBegin(TType.STRUCT, len(self.debug_response))
for iter158 in self.debug_response:
iter158.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.filter_list is not None:
oprot.writeFieldBegin('filter_list', TType.LIST, 23)
oprot.writeListBegin(TType.STRUCT, len(self.filter_list))
for iter159 in self.filter_list:
iter159.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.rec_response is not None:
oprot.writeFieldBegin('rec_response', TType.STRUCT, 24)
self.rec_response.write(oprot)
oprot.writeFieldEnd()
if self.common_conf is not None:
oprot.writeFieldBegin('common_conf', TType.STRUCT, 25)
self.common_conf.write(oprot)
oprot.writeFieldEnd()
if self.user_track is not None:
oprot.writeFieldBegin('user_track', TType.STRUCT, 26)
self.user_track.write(oprot)
oprot.writeFieldEnd()
if self.grandson is not None:
oprot.writeFieldBegin('grandson', TType.I32, 27)
oprot.writeI32(self.grandson)
oprot.writeFieldEnd()
if self.discounts is not None:
oprot.writeFieldBegin('discounts', TType.LIST, 28)
oprot.writeListBegin(TType.DOUBLE, len(self.discounts))
for iter160 in self.discounts:
oprot.writeDouble(iter160)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.status)
value = (value * 31) ^ hash(self.total)
value = (value * 31) ^ hash(self.count)
value = (value * 31) ^ hash(self.page_size)
value = (value * 31) ^ hash(self.page_index)
value = (value * 31) ^ hash(self.hotels_details)
value = (value * 31) ^ hash(self.statistics)
value = (value * 31) ^ hash(self.query_parse_result)
value = (value * 31) ^ hash(self.min_weifang_price)
value = (value * 31) ^ hash(self.promotion_group)
value = (value * 31) ^ hash(self.debug_info)
value = (value * 31) ^ hash(self.filter_result)
value = (value * 31) ^ hash(self.fast_filter_info)
value = (value * 31) ^ hash(self.debug_response)
value = (value * 31) ^ hash(self.filter_list)
value = (value * 31) ^ hash(self.rec_response)
value = (value * 31) ^ hash(self.common_conf)
value = (value * 31) ^ hash(self.user_track)
value = (value * 31) ^ hash(self.grandson)
value = (value * 31) ^ hash(self.discounts)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class FilterSearchRequest:
"""
Attributes:
- filter_search_type
- geo_attr
- caller_attr
- filter_attr
- user_info
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'filter_search_type', None, None, ), # 1
(2, TType.STRUCT, 'geo_attr', (gen.cm.ttypes.GeoAttribute, gen.cm.ttypes.GeoAttribute.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'caller_attr', (gen.cm.ttypes.CallerAttribute, gen.cm.ttypes.CallerAttribute.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'filter_attr', (gen.cm.ttypes.FilterAttribute, gen.cm.ttypes.FilterAttribute.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'user_info', (gen.cm.ttypes.UserInfo, gen.cm.ttypes.UserInfo.thrift_spec), None, ), # 5
)
def __init__(self, filter_search_type=None, geo_attr=None, caller_attr=None, filter_attr=None, user_info=None,):
self.filter_search_type = filter_search_type
self.geo_attr = geo_attr
self.caller_attr = caller_attr
self.filter_attr = filter_attr
self.user_info = user_info
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.filter_search_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.geo_attr = gen.cm.ttypes.GeoAttribute()
self.geo_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.caller_attr = gen.cm.ttypes.CallerAttribute()
self.caller_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.filter_attr = gen.cm.ttypes.FilterAttribute()
self.filter_attr.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.user_info = gen.cm.ttypes.UserInfo()
self.user_info.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('FilterSearchRequest')
if self.filter_search_type is not None:
oprot.writeFieldBegin('filter_search_type', TType.I32, 1)
oprot.writeI32(self.filter_search_type)
oprot.writeFieldEnd()
if self.geo_attr is not None:
oprot.writeFieldBegin('geo_attr', TType.STRUCT, 2)
self.geo_attr.write(oprot)
oprot.writeFieldEnd()
if self.caller_attr is not None:
oprot.writeFieldBegin('caller_attr', TType.STRUCT, 3)
self.caller_attr.write(oprot)
oprot.writeFieldEnd()
if self.filter_attr is not None:
oprot.writeFieldBegin('filter_attr', TType.STRUCT, 4)
self.filter_attr.write(oprot)
oprot.writeFieldEnd()
if self.user_info is not None:
oprot.writeFieldBegin('user_info', TType.STRUCT, 5)
self.user_info.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.filter_search_type)
value = (value * 31) ^ hash(self.geo_attr)
value = (value * 31) ^ hash(self.caller_attr)
value = (value * 31) ^ hash(self.filter_attr)
value = (value * 31) ^ hash(self.user_info)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class FilterSearchResponse:
"""
Attributes:
- status
- filter_result
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (gen.cm.ttypes.ServerStatus, gen.cm.ttypes.ServerStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'filter_result', (gen.cm.ttypes.FilterResult, gen.cm.ttypes.FilterResult.thrift_spec), None, ), # 2
)
def __init__(self, status=None, filter_result=None,):
self.status = status
self.filter_result = filter_result
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = gen.cm.ttypes.ServerStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.filter_result = gen.cm.ttypes.FilterResult()
self.filter_result.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('FilterSearchResponse')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.filter_result is not None:
oprot.writeFieldBegin('filter_result', TType.STRUCT, 2)
self.filter_result.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.status)
value = (value * 31) ^ hash(self.filter_result)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NearByInfoRequest:
"""
Attributes:
- nearbyitem_list
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'nearbyitem_list', (TType.STRUCT,(gen.cm.ttypes.NearBySearchInfo, gen.cm.ttypes.NearBySearchInfo.thrift_spec)), None, ), # 1
)
def __init__(self, nearbyitem_list=None,):
self.nearbyitem_list = nearbyitem_list
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.nearbyitem_list = []
(_etype164, _size161) = iprot.readListBegin()
for _i165 in xrange(_size161):
_elem166 = gen.cm.ttypes.NearBySearchInfo()
_elem166.read(iprot)
self.nearbyitem_list.append(_elem166)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NearByInfoRequest')
if self.nearbyitem_list is not None:
oprot.writeFieldBegin('nearbyitem_list', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.nearbyitem_list))
for iter167 in self.nearbyitem_list:
iter167.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.nearbyitem_list)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NearByInfoResponse:
"""
Attributes:
- nearbyitem_list
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'nearbyitem_list', (TType.STRUCT,(gen.cm.ttypes.NearBySearchResult, gen.cm.ttypes.NearBySearchResult.thrift_spec)), None, ), # 1
)
def __init__(self, nearbyitem_list=None,):
self.nearbyitem_list = nearbyitem_list
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.nearbyitem_list = []
(_etype171, _size168) = iprot.readListBegin()
for _i172 in xrange(_size168):
_elem173 = gen.cm.ttypes.NearBySearchResult()
_elem173.read(iprot)
self.nearbyitem_list.append(_elem173)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NearByInfoResponse')
if self.nearbyitem_list is not None:
oprot.writeFieldBegin('nearbyitem_list', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.nearbyitem_list))
for iter174 in self.nearbyitem_list:
iter174.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.nearbyitem_list)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class PersonalTraitRequest:
"""
Attributes:
- personal_info
- trait_type
- strategy_mode
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'personal_info', (gen.cm.ttypes.PersonalInfo, gen.cm.ttypes.PersonalInfo.thrift_spec), None, ), # 1
(2, TType.I32, 'trait_type', None, None, ), # 2
(3, TType.I32, 'strategy_mode', None, None, ), # 3
)
def __init__(self, personal_info=None, trait_type=None, strategy_mode=None,):
self.personal_info = personal_info
self.trait_type = trait_type
self.strategy_mode = strategy_mode
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.personal_info = gen.cm.ttypes.PersonalInfo()
self.personal_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.trait_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.strategy_mode = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('PersonalTraitRequest')
if self.personal_info is not None:
oprot.writeFieldBegin('personal_info', TType.STRUCT, 1)
self.personal_info.write(oprot)
oprot.writeFieldEnd()
if self.trait_type is not None:
oprot.writeFieldBegin('trait_type', TType.I32, 2)
oprot.writeI32(self.trait_type)
oprot.writeFieldEnd()
if self.strategy_mode is not None:
oprot.writeFieldBegin('strategy_mode', TType.I32, 3)
oprot.writeI32(self.strategy_mode)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.personal_info)
value = (value * 31) ^ hash(self.trait_type)
value = (value * 31) ^ hash(self.strategy_mode)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class PersonalTraitResponse:
"""
Attributes:
- personaltrait_list
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'personaltrait_list', (TType.STRUCT,(gen.cm.ttypes.PersonalTraitResult, gen.cm.ttypes.PersonalTraitResult.thrift_spec)), None, ), # 1
)
def __init__(self, personaltrait_list=None,):
self.personaltrait_list = personaltrait_list
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.personaltrait_list = []
(_etype178, _size175) = iprot.readListBegin()
for _i179 in xrange(_size175):
_elem180 = gen.cm.ttypes.PersonalTraitResult()
_elem180.read(iprot)
self.personaltrait_list.append(_elem180)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('PersonalTraitResponse')
if self.personaltrait_list is not None:
oprot.writeFieldBegin('personaltrait_list', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.personaltrait_list))
for iter181 in self.personaltrait_list:
iter181.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.personaltrait_list)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
[
"2496500330@qq.com"
] |
2496500330@qq.com
|
1314315381b32f297c8b5ebb1101568888603507
|
87928277a8ce68d305930930ea2e5ac68321f21b
|
/Vegan_Cosmetics.py
|
821bcd05307d7f11285c2d8d63760a7a9ebe42a7
|
[] |
no_license
|
gzone2000/Weather_Bigdata_contest
|
8e3857a2adb06b0ec2d2969b522b8c4861a16b37
|
1bbfd0d93f19dadffca6f3ef6c3bd1e677711c09
|
refs/heads/master
| 2023-07-18T10:34:13.077843
| 2021-09-03T08:43:25
| 2021-09-03T08:43:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,170
|
py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['font.family'] = 'Malgun Gothic'
import seaborn as sns
from datetime import datetime
from bs4 import BeautifulSoup
import requests
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.ensemble import ExtraTreesRegressor
import pickle
import joblib
import re
import warnings
warnings.filterwarnings('ignore')
class Vegan_Cosmetics:
def __init__(self):
self.gender, self.age = input('성별과 연령대를 입력하세요 : ').split()
self.beauty = pd.read_csv('./data/total_beauty.csv')
self.봄 = pd.read_csv('./data/total_봄.csv')
self.여름 = pd.read_csv('./data/total_여름.csv')
self.가을 = pd.read_csv('./data/total_가을.csv')
self.겨울 = pd.read_csv('./data/total_겨울.csv')
self.cosmetics = pd.read_csv('./cosmetic/total_cosmetic.csv')
# 오늘의 날씨 데이터 수집
def weather_i(self):
req = requests.get('https://www.weatheri.co.kr/forecast/forecast10.php')
# 한글이 깨지는 문제를 해결하고자 decode 사용
soup = BeautifulSoup(req.content.decode('utf-8','replace'), 'html.parser')
# 월, 일, 요일
date = soup.find('font', color='#124d79').text
year, month, day = date[:4], date[6:8], date[10:12]
ymd = year + '-' + month + '-' + day
ymd = pd.to_datetime(ymd)
week = ymd.weekday()
# 공휴일
holiday = pd.read_excel('./data/국가공휴일2.xlsx')
holi_idx = holiday[(holiday['년'] == int(year)) & (holiday['월'] == int(month)) & (holiday['일'] == int(day))].index
if len(holi_idx) == 0:
holi = 0 # 공휴일이면 1, 아니면 0
else:
holi = 1
data = []
# 강수량, 풍속, 체감온도 (서울)
table = pd.read_html(str(soup.select('table')[10]))[0]
table = table.rename(columns=table.iloc[0]).drop(table.index[0])
table.reset_index(drop=True, inplace=True)
table['풍 속(m/s)'] = table['풍 속(m/s)'].apply(lambda x: float(x)*3.6)
table = table.rename(columns={'풍 속(m/s)':'풍 속(km/h)'})
try:
re.search('^[+-]?\d*(\.?\d*)$', table.loc[0]['강수량(mm)'])
rain = table.loc[0]['강수량(mm)']
except:
rain = 0
wind = table.loc[0]['풍 속(km/h)']
temp = float(table.loc[0]['현재기온(℃)'])
a_temp = 13.12 + 0.6215*temp - 11.37*(wind**0.16) + 0.3965*(wind**0.16)*temp
# 미세먼지
req2 = requests.get('https://www.weatheri.co.kr/special/special05_1.php?a=1')
soup2 = BeautifulSoup(req2.content.decode('utf-8','replace'), 'html.parser')
table2 = pd.read_html(str(soup2.select('table')[11]))[1]
table2 = table2.rename(columns=table2.iloc[0]).drop(table2.index[0])
table2.reset_index(drop=True, inplace=True)
pm10 = table2.iloc[0,2]
data.append([month, day, week, holi, self.gender, self.age, rain, wind, a_temp, pm10])
df = np.transpose(pd.DataFrame(data[0]))
df.columns = ['월','일','요일','공휴일','성별','연령대','평균일강수량(mm)','평균풍속(km/h)','체감온도(℃)','일 미세먼지 농도(㎍/㎥)']
return df
# 오늘의 날씨를 포함한 테스트셋 생성
def weather_testset(self):
df2 = self.weather_i()
for i in range(self.beauty.소분류명.nunique()-1):
df2 = df2.append(pd.Series(df2[0:1].values[0], index=df2.columns), ignore_index=True)
df2['소분류명'] = self.beauty.소분류명.unique()
df2 = df2[['월','일','요일','공휴일','성별','연령대','소분류명','평균일강수량(mm)','평균풍속(km/h)','체감온도(℃)','일 미세먼지 농도(㎍/㎥)']]
return df2
# 10만건당 건수 모델에 따른 테스트셋 전처리
def preprocessing(self, data):
# 범주형 변수 레이블 인코딩
le = LabelEncoder()
le = le.fit(data['성별'])
le2 = LabelEncoder()
le2 = le2.fit(data['소분류명'])
# 연속형 변수 스케일링
nu = data.drop(['계절','성별','소분류명','10만건당 건수'], axis=1)
scaler = StandardScaler()
scaler = scaler.fit(nu)
return le, le2, scaler
# 계절별로 테스트셋에 전처리 적용
def testset_preprocessing(self):
df2 = self.weather_testset()
if int(df2['월'][0]) in [3,4,5]: # 봄
le, le2, scaler = self.preprocessing(self.봄)
elif int(df2['월'][0]) in [6,7,8]: # 여름
le, le2, scaler = self.preprocessing(self.여름)
elif int(df2['월'][0]) in [9,10,11]: # 가을
le, le2, scaler = self.preprocessing(self.가을)
else: #겨울
le, le2, scaler = self.preprocessing(self.겨울)
# 범주형 변수 레이블 인코딩
df2['성별'] = le.transform(df2['성별'])
df2['소분류명'] = le2.transform(df2['소분류명'])
label_df2 = df2[['성별','소분류명']]
# 연속형 변수 스케일링
nu = df2.drop(['성별','소분류명'], axis=1)
scaled = scaler.transform(nu)
scaled_df2 = pd.DataFrame(scaled, columns=nu.columns)
df3 = pd.concat([scaled_df2, label_df2], axis=1)
return le2, df3
# 계절별로 10만건당 건수 예측 (ExtraTreesRegressor)
def extratrees_social(self):
df = self.weather_i()
le2, df3 = self.testset_preprocessing()
if int(df['월'][0]) in [3,4,5]: # 봄
et_model = joblib.load('./model/ExtraTreesRegressor(봄)')
pred = et_model.predict(df3)
elif int(df['월'][0]) in [6,7,8]: # 여름
et_model = joblib.load('./model/ExtraTreesRegressor(여름)')
pred = et_model.predict(df3)
elif int(df['월'][0]) in [9,10,11]: # 가을
et_model = joblib.load('./model/ExtraTreesRegressor(가을)')
pred = et_model.predict(df3)
else: #겨울
et_model = joblib.load('./model/ExtraTreesRegressor(겨울)')
pred = et_model.predict(df3)
df3['10만건당 건수'] = np.expm1(pred)
df3['소분류명'] = le2.inverse_transform(df3['소분류명'])
return df3[['소분류명','10만건당 건수']]
# 구매건수 모델에 따른 테스트셋 전처리
def preprocessing2(self, data):
# 범주형 변수 레이블 인코딩
le = LabelEncoder()
le = le.fit(data['소분류명'])
return le
# 계절별로 테스트셋(예측한 10만건당 건수 포함) 전처리 적용
def total_testset_preprocessing(self):
df2 = self.weather_testset()
df3 = self.extratrees_social()
df4 = pd.merge(df2, df3, on='소분류명')
if int(df4['월'][0]) in [3,4,5]: # 봄
df4['계절'] = '봄'
elif int(df4['월'][0]) in [6,7,8]: # 여름
df4['계절'] = '여름'
elif int(df4['월'][0]) in [9,10,11]: # 가을
df4['계절'] = '가을'
else: #겨울
df4['계절'] = '겨울'
le = self.preprocessing2(self.beauty)
# 범주형 변수 레이블 인코딩
df4['계절'] = df4['계절'].replace(['봄','여름','가을','겨울'],[0,1,2,3])
df4['성별'] = df4['성별'].replace(['F','M'],[0,1])
df4['소분류명'] = le.transform(df4['소분류명'])
return le, df4
# 계절별로 구매건수(예측한 10만건당 건수 포함) 예측 (RandomForest)
def randomforest_buy(self):
df = self.weather_i()
le3, df4 = self.total_testset_preprocessing()
if int(df['월'][0]) in [3,4,5]: # 봄
rf_model = joblib.load('./model/RandomForest(봄).pkl')
pred = rf_model.predict(df4)
elif int(df['월'][0]) in [6,7,8]: # 여름
rf_model = joblib.load('./model/RandomForest(여름).pkl')
pred = rf_model.predict(df4)
elif int(df['월'][0]) in [9,10,11]: # 가을
rf_model = joblib.load('./model/RandomForest(가을).pkl')
pred = rf_model.predict(df4)
else: #겨울
rf_model = joblib.load('./model/RandomForest(겨울).pkl')
pred = rf_model.predict(df4)
df4['구매건수'] = np.expm1(pred)
df4['소분류명'] = le3.inverse_transform(df4['소분류명'])
df4 = df4.loc[df4.구매건수.sort_values(ascending=False).index].reset_index(drop=True)
return df4[['소분류명','구매건수']]
# 화장품 추천 리스트 출력
def cosmetics_recomm(self):
data = self.randomforest_buy()
cosmetics = self.cosmetics
data['중분류명'] = np.nan
for i in range(len(data)):
if '기능성' in data.소분류명[i]:
data.중분류명[i] = '기능성 화장품'
elif ('기초' in data.소분류명[i]) | ('남성 로션' in data.소분류명[i]) | ('남성 스킨' in data.소분류명[i]) | ('남성 에센스' in data.소분류명[i]) | ('남성 크림' in data.소분류명[i]):
data.중분류명[i] = '기초 화장품'
elif ('남성 메이크업' in data.소분류명[i]) | ('남성 세트' in data.소분류명[i]) | ('남성 쉐이빙' in data.소분류명[i]):
data.중분류명[i] = '남성 기타'
elif ('선' in data.소분류명[i]) | ('남성 선케어' in data.소분류명[i]):
data.중분류명[i] = '선 제품'
elif ('클렌징' in data.소분류명[i]) | ('립앤아이 리무버' in data.소분류명[i]) | ('화장 비누' in data.소분류명[i]):
data.중분류명[i] = '클렌징 용품'
elif ('향수' in data.소분류명[i]) | ('샤워코롱' in data.소분류명[i]):
data.중분류명[i] = '향수'
elif '네일' in data.소분류명[i]:
data.중분류명[i] = '네일 제품'
elif ('바디' in data.소분류명[i]) | ('데오드란트' in data.소분류명[i]) | ('애프터선' in data.소분류명[i]):
data.중분류명[i] = '바디 케어'
elif ('메이크업 박스' in data.소분류명[i]) | ('메이크업 브러쉬' in data.소분류명[i]):
data.중분류명[i] = '메이크업 도구'
elif ('뷰티' in data.소분류명[i]) | ('미용가위' in data.소분류명[i]) | ('도구' in data.소분류명[i]) | ('헤어 브러쉬' in data.소분류명[i]) | ('화장 퍼프' in data.소분류명[i]):
data.중분류명[i] = '뷰티 도구'
elif ('샴푸' in data.소분류명[i]) | ('린스' in data.소분류명[i]) | ('트리트먼트' in data.소분류명[i]) | ('헤어에센스' in data.소분류명[i]) | ('헤어젤' in data.소분류명[i]) | ('헤어케어' in data.소분류명[i]):
data.중분류명[i] = '헤어 케어'
elif '헤어' in data.소분류명[i]:
data.중분류명[i] = '헤어 미용'
elif '베이스 메이크업' in data.소분류명[i]:
data.중분류명[i] = '베이스 메이크업 제품'
elif '색조 메이크업' in data.소분류명[i]:
data.중분류명[i] = '색조 메이크업 기타'
if '립' in data.소분류명[i]: data.중분류명[i] = '색조 메이크업 립'
elif ('마스카라' in data.소분류명[i]) | ('속눈썹' in data.소분류명[i]) | ('아이' in data.소분류명[i]):
data.중분류명[i] = '색조 메이크업 아이'
elif ('스킨케어' in data.소분류명[i]) | ('스크럽/필링크림' in data.소분류명[i]):
data.중분류명[i] = '스킨 케어'
elif '입욕제' in data.소분류명[i]:
data.중분류명[i] = '입욕제'
elif '풋' in data.소분류명[i]:
data.중분류명[i] = '풋 제품'
elif '핸드' in data.소분류명[i]:
data.중분류명[i] = '핸드 제품'
max_val = data[data.구매건수 == data.구매건수.max()]['소분류명'][0]
max_val2 = data[data.구매건수 == data.구매건수.max()]['중분류명'][0]
if max_val in cosmetics.소분류.unique():
print(f'{max_val} 상품이 추천되었습니다.')
cos_idx = cosmetics[cosmetics.소분류 == max_val].index
elif max_val2 in cosmetics.중분류.unique():
print(f'{max_val}와(과) 관련한 {max_val2} 상품이 추천되었습니다.')
cos_idx = cosmetics[cosmetics.중분류 == max_val2].index
return cosmetics.loc[cos_idx].sort_values(by='정가').reset_index(drop=True)
|
[
"dalgoon02121@naver.com"
] |
dalgoon02121@naver.com
|
805aab6c2c2b50a743625a7437f049bcd223240d
|
341c5d8177e06b8668d7d2f81a0a0c022344e3a6
|
/Graphs/graph_kruskal_alog.py
|
d076317d523e2e8b6b351391d3fc64d1e896a5ef
|
[] |
no_license
|
vsunilc/Python-Datastructures
|
e060e378eb8dd2324981039bf4d22a712a58926d
|
37fb0c1a9b02ae449f484d0bdb33db2dfc0cdcea
|
refs/heads/master
| 2021-10-25T21:11:26.498573
| 2019-04-07T19:30:43
| 2019-04-07T19:30:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,395
|
py
|
def make_sets(vertice):
return set(vertice)
def list_iterator(vertices):
lst=[]
for vertice in vertices:
s=make_sets(vertice)
lst.append(s)
return lst
def kruskal_algo(edges,lst):
print (lst)
for ed in edges:
#to assign variable as blank and if there is no match from the list then by pass the condition
x=''
y=''
for j in range(len(lst)):
if ed[0] in lst[j] and ed[1] in lst[j]:
"""
Bypass the check if the edges are present in the same
"""
pass
"""
remove the element as soon as there is match in the list so that it can be included in the union
"""
elif ed[0] in lst[j]:
x=lst[j]
lst.remove(lst[j])
break
for k in range(len(lst)):
if ed[0] in lst[k] and ed[1] in lst[k]:
pass
elif ed[1] in lst[k]:
y=lst[k]
lst.remove(lst[k])
break
if x==y or x=='' or y=='':
pass
else:
print ed[0]+ed[1]
lst.append(x.union(y))
vertices= ['A', 'B', 'C', 'D', 'E', 'F']
lst=list_iterator(vertices)
edges=[('A','D'),('B','C'),('C','D'),('E','F'),('B','D'),('A','B'),('C','F'),('C','E'),('D','E')]
kruskal_algo(edges,lst)
|
[
"noreply@github.com"
] |
vsunilc.noreply@github.com
|
a0208ba3bab7f7287535e12b32f544e67232e5da
|
0213636db1ff1d4085b15976b5c2cfc21b8cc36b
|
/onlinesale/orders/models.py
|
31284f223b2fd1a2def254809f50228e5a0d9bfd
|
[] |
no_license
|
sneha-lohana/django-wd111nov
|
a28500098420112bf54baa1364a3fa61dc06f99c
|
b854529de5248442ecc2f81651a5f4876c064ec1
|
refs/heads/master
| 2020-11-29T09:39:43.259831
| 2020-01-09T07:55:41
| 2020-01-09T07:55:41
| 230,082,431
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,040
|
py
|
from django.db import models
from billing.models import BillingProfile
from addresses.models import Address
from carts.models import Cart
from django.db.models.signals import pre_save
from products.utils import unique_orderid_generator
from decimal import Decimal
ORDER_STATUS_CHOICES = (('created', 'Order is created'),
('paid','You have paid for order'))
class OrderManager(models.Manager):
def get_or_new(self, cart_obj, bill_obj):
order_obj = self.get_queryset().filter(cart=cart_obj, billingProfile=bill_obj, status='created').first() or None
if order_obj is None:
order_obj = self.get_queryset().create(cart=cart_obj, billingProfile=bill_obj)
if order_obj.order_total != cart_obj.total:
order_obj.update_order(cart_obj)
return order_obj
class Order(models.Model):
order_id = models.CharField(max_length=120, blank=True) #ASFGHH3345678SDFGHJK
billingProfile = models.ForeignKey(BillingProfile, on_delete=models.PROTECT)
address = models.ForeignKey(Address, null=True, blank=True, on_delete=models.CASCADE)
cart = models.ForeignKey(Cart, on_delete=models.PROTECT)
status = models.CharField(max_length=20, default='created', choices=ORDER_STATUS_CHOICES)
order_total = models.DecimalField(max_digits=8, decimal_places=2, default=0.0)
total = models.DecimalField(max_digits=8, decimal_places=2,default=0.0) #GST
razor_pay_id = models.CharField(max_length=120, null=True, blank=True)
objects = OrderManager()
def __str__(self):
return self.order_id
def update_order(self, cart_obj):
if self.order_total != cart_obj.total:
self.order_total = cart_obj.total
self.total = round(self.order_total * Decimal(1.18), 2)
self.save()
def orderid_pre_save_receiver(sender, instance, *args, **kwargs):
if instance.order_id is None or instance.order_id=="":
instance.order_id = unique_orderid_generator(instance)
pre_save.connect(orderid_pre_save_receiver, sender=Order)
|
[
"sneha.lohana16@gmail.com"
] |
sneha.lohana16@gmail.com
|
11defba8402b89281604401f48104e46d8ac8d01
|
68e48a65069a1929afebe47e731d5e788ac2a024
|
/test.py
|
b06b4f8b75b0c5398c7abf2f0c752edb0f8fb24b
|
[] |
no_license
|
James0618/RL_algorithms
|
da8f9e93cb8aa3904b14de0c06a3476cdc33e8e4
|
c6f845fb68dafea670a315166628fff2243fe47e
|
refs/heads/master
| 2020-09-30T11:26:41.769505
| 2020-02-19T07:55:39
| 2020-02-19T07:55:39
| 227,278,798
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,122
|
py
|
import sources.PPO as PPO
import torch
import torch.nn as nn
import multiprocessing as mp
import torchvision.transforms as transforms
from torch.distributions import Categorical
import gym
import time
import cv2
import numpy as np
import common.atari_wrappers as wrappers
from PIL import Image
class AtariNet(nn.Module):
def __init__(self, output_shape):
super(AtariNet, self).__init__()
self.discrete = True
self.conv = nn.Sequential( # input_shape: 4*84*84
nn.Conv2d(
in_channels=4, # Gray: 1 channels * 4 images/
out_channels=16, # filters' number
kernel_size=8, # kernel's size
stride=4
),
nn.ReLU(),
nn.Conv2d(
in_channels=16,
out_channels=32,
kernel_size=4,
stride=2
),
nn.ReLU()
)
self.feature_layer = nn.Sequential(
nn.Linear(2592, 1024),
nn.ReLU(),
nn.Linear(1024, 256),
nn.ReLU()
)
self.state_value = nn.Linear(256, 1)
self.policy = nn.Linear(256, output_shape)
def forward(self, state):
output = self.conv(state)
features = output.view(output.size(0), -1)
features = self.feature_layer(features)
action_values = self.state_value(features)
actions = self.policy(features)
action_prob = torch.nn.functional.softmax(actions, dim=-1)
# print('action_prob: ', action_prob)
distribution = Categorical(action_prob)
return distribution, action_values
def preprocess(obs):
return torch.from_numpy(obs).float()
class Worker:
def __init__(self, worker_id, agent):
super(Worker, self).__init__()
self.worker_id = worker_id
self.horizon = 64
# self.agent = PPO.Model(net=PPO.Net, agent_id=worker_id, learn=LEARN, device=device, n_state=n_state,
# n_action=n_action, discrete=discrete, learning_rate=0.000025, epsilon=0.1)
self.agent = agent
def cal_adv(self, states, rewards, dones, final_state, gamma):
"""
:param states: Tensor[T, state_features]
:param rewards: Tensor[T]
:param dones: Tensor[T+1]
:param gamma: float
:return: advantages -> Tensor[T] & value_target -> Tensor[T]
"""
T = states.shape[0]
advantages = torch.zeros(T)
last_adv = 0
value_pred = torch.zeros(self.horizon + 1)
# calculate value_predict
for i in range(self.horizon):
value_pred[i] = self.agent.net.forward(states[i].unsqueeze(0).to(device))[1]
value_pred[self.horizon] = self.agent.net.forward(final_state.unsqueeze(0).to(device))[1]
for t in reversed(range(T)):
non_terminal = 1 - dones[t + 1]
delta = rewards[t] + gamma * value_pred[t + 1] * non_terminal - value_pred[t]
last_adv = delta + gamma * non_terminal * last_adv # * 0.95
advantages[t] = last_adv
value_target = advantages + value_pred[:T]
# advantages = (advantages - torch.mean(advantages)) / torch.std(advantages)
return advantages, value_target
def work(self, discrete, barrier, queue):
# env = wrappers.make_atari("BreakoutNoFrameskip-v4")
# env = wrappers.wrap_deepmind(env, frame_stack=True)
env = gym.make('CartPole-v1')
LEARN = True
# device = torch.device("cuda:0")
device = torch.device("cpu")
global episode
t, T, total_reward, horizon = 0, 0, 0, self.horizon
done = True
# reward_array = np.array([])
observation = env.reset()
state = preprocess(observation)
# state_collections = torch.zeros(horizon, 4, 84, 84)
state_collections = torch.zeros(horizon, env.observation_space.shape[0])
action_collections = torch.zeros(horizon)
# action_collections = torch.zeros(horizon, env.action_space.shape[0])
reward_collections = torch.zeros(horizon)
done_collections = torch.zeros(horizon + 1)
while True:
if done:
observation = env.reset()
state = preprocess(observation)
print("Episode {}: finished after {} steps".format(episode.value, t - T))
print(" total_reward -> {}".format(total_reward))
T = t
with mp.Lock():
episode.value += 1
total_reward = 0
if LEARN is False:
env.render()
action = self.agent.choose_action(state.unsqueeze(0).to(device))
# print('action: {}'.format(action))
if discrete:
observation, reward, done, info = env.step(int(action))
else:
observation, reward, done, info = env.step([float(action)])
# reward = (reward + 8) / 8
# img = Image.fromarray(255 * state.numpy()[0])
# img.show()
if t > 0 and t % horizon == 0:
done_collections[horizon] = done
queue.put([state_collections, action_collections, done_collections, reward_collections, state])
barrier.wait()
# state_collections = torch.zeros(horizon, 4, 84, 84)
state_collections = torch.zeros(horizon, env.observation_space.shape[0])
action_collections = torch.zeros(horizon)
# action_collections = torch.zeros(horizon, env.action_space.shape[0])
reward_collections = torch.zeros(horizon)
done_collections = torch.zeros(horizon + 1)
state_collections[t % horizon] = state
action_collections[t % horizon] = action
reward_collections[t % horizon] = reward
done_collections[t % horizon] = done
state = preprocess(observation) # next state
t += 1
total_reward += reward
if __name__ == '__main__':
# env = wrappers.make_atari("BreakoutNoFrameskip-v4")
# env = wrappers.wrap_deepmind(env, frame_stack=True)
THREAD_NUMBER = 3
env = gym.make('CartPole-v1')
# device = torch.device("cuda:0")
device = torch.device('cpu')
barrier = mp.Barrier(THREAD_NUMBER + 1)
queue = mp.Queue()
LEARN = True
i = 0
episode = mp.Value('i', 0)
n_state = env.observation_space.shape[0]
if isinstance(env.action_space, gym.spaces.Box):
discrete = False
n_action = env.action_space.shape[0]
else:
discrete = True
n_action = env.action_space.n
net = PPO.Net(n_state, n_action, discrete).to(device)
net.share_memory()
old_net = PPO.Net(n_state, n_action, discrete).to(device)
old_net.share_memory()
ppo = PPO.Model(net=net, old_net=old_net, learn=LEARN, device=device, n_state=n_state, n_action=n_action,
discrete=discrete, learning_rate=0.0001, epsilon=0.1)
workers = [Worker(worker_id=i, agent=ppo) for i in range(THREAD_NUMBER)]
process_list = []
for worker in workers:
process = mp.Process(target=worker.work, args=(discrete, barrier, queue))
process_list.append(process)
process.start()
while i < 10000:
for worker in workers:
results = queue.get()
s, a, d_collect, r_collect, final_s = results[0], results[1], results[2], results[3], results[4]
ad, vt = worker.cal_adv(states=s, rewards=r_collect, dones=d_collect, final_state=final_s, gamma=0.99)
agent_id, loss = worker.agent.learn(state_collections=s, action_collections=a, advantage_collections=ad,
value_target=vt)
# ppo.set_net(worker.agent.net)
# print(agent_id, loss)
#
# for worker in workers:
# worker.agent.set_net(ppo.net)
barrier.wait()
i += 1
for process in process_list:
process.join()
|
[
"cjj_xjtu@163.com"
] |
cjj_xjtu@163.com
|
4b7c633de1f50bdc2f4783c6aae3dcaeb6d661c2
|
dac1dd134013bced08e9c16123d9398d5413b71e
|
/Chapter07_prak1.py
|
c4219ea50c0adc1048153a179e67034b7ec1607f
|
[] |
no_license
|
yeremiwesly/Python-Preject-Chapter7
|
b9e99e94baf57b88a6096ac870707851f53186c2
|
ed050e51b0b8c523a2a60738e7c9158eab36550b
|
refs/heads/main
| 2023-01-23T05:42:50.136137
| 2020-12-06T14:45:18
| 2020-12-06T14:45:18
| 319,056,314
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 158
|
py
|
try:
file = open("h:/myfile.txt", "r")
print(file.read())
except FileNotFoundError:
print('File tidak ditemukan')
#saya menggunakan direktori h:\
|
[
"yeremiwesly@users.noreply.github.com"
] |
yeremiwesly@users.noreply.github.com
|
4d9996e799785094a1593a0af661a2d1c183b849
|
2979c56cdb830c7accf62981e77b43c1f6299e22
|
/app/utils/__init__.py
|
70f9d00470c1a96b46e7f23d08fed5cda43d976c
|
[
"Apache-2.0"
] |
permissive
|
ZhouRR/quotations-gateway-api
|
5e4964bea510529a975888f1eb80bdd6e9aab752
|
ef433fe8e461344a6c59e5edec206ad4ba7eeff6
|
refs/heads/main
| 2023-07-08T17:40:23.092810
| 2021-08-10T08:10:42
| 2021-08-10T08:10:42
| 394,570,577
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 259
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2020/6/5 17:37
# @Author : CoderCharm
# @File : __init__.py.py
# @Software: PyCharm
# @Desc :
"""
"""
import app.utils.config_utils
import app.utils.response_code
import app.utils.custom_exc
|
[
"zhourr_jp@yahoo.co.jp"
] |
zhourr_jp@yahoo.co.jp
|
b32a19c6c31163b91cb4222cfe4d33fcfd77ca1e
|
f52b99e4c490a0cffc3548c78c8c22607c3a3aec
|
/model/strategy.py
|
b4ab00371c3ba6eba2faf5ebaad43a7729063ded
|
[] |
no_license
|
915-Muscalagiu-AncaIoana/ConnectFour
|
93cd50da0570923fde6c71696bc1e8b7f1090c78
|
45f3985e1d8ced9e9c8891f838fb5acc98138818
|
refs/heads/main
| 2023-08-04T17:23:59.656600
| 2021-10-01T07:31:05
| 2021-10-01T07:31:05
| 412,356,117
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,489
|
py
|
import random
import copy
class StrategyAI:
"""
This class represents the Strategy used by the Computer(the AI) to play the Game
(using the minmax algorithm for finding the best move)
"""
def minimax(self, board, depth, alpha, beta, maximizing_player, computer, human):
"""
This function tries to find the best move that the maximizing player (the Computer) can make
by anticipating a few moves ahead in the game, assuming that both the opponent (the Human) and the Computer
play in an optimal manner.
:param board: the board on which the minimax algorithm tries to find the best possible move
:param depth: the depth of the tree for the minimax algorithm -> the number of moves the AI tries to foresee
:param alpha: the best score that the maximizing player (the Computer) can guarantee at a certain level
:param beta: the best score that the minimizing player (the Human) can guarantee at a certain level
:param maximizing_player: True - we generate a move for the computer
False - we generate a move for the human
:param computer: the score the Computer plays with
:return: column - the column on which a piece is about to be dropped to get the score
score - heuristic score of a player at a certain point after making certain moves
"""
valid_moves = board.get_valid_moves()
# if the computer has one after one move then he must definitely make that move
if depth == 3 and board.game_won() == computer:
return (None, 3000000000000000000000)
# if the computer can win in two moves then the score has to be high but lower than the previous case
if depth == 1 and board.game_won() == computer:
return (None, 30000000000)
is_terminal = self.is_terminal_node(board)
if depth == 0 or is_terminal:
if is_terminal:
# the game gets won by the human in the future moves
if board.game_won() == human:
return (None, float("-inf"))
# the game stops because there are no more possible moves on the board (it is full)
else:
return (None, 0)
else:
# We have reached the full depth of the tree -> we return the heuristic score of the leaf nodes
# by calculating the score of the participant at that point in the game considering his pieces on the board
return (None, board.score_position(computer))
# ALPHA BETA PRUNING FOR OPTIMIZATION
# the maximizing computer = Computer
if maximizing_player:
score = float('-inf')
column = random.choice(valid_moves)
for col in valid_moves:
ai_board = copy.deepcopy(board)
ai_board.move_on_board(col, computer)
new_score = self.minimax(ai_board, depth - 1, alpha, beta, False, computer, human)[1]
if new_score > score:
score = new_score
column = col
alpha = max(alpha, score)
if alpha >= beta:
break
return column, score
# the minimizing computer = Human
else:
score = float('inf')
column = random.choice(valid_moves)
for col in valid_moves:
ai_board = copy.deepcopy(board)
ai_board.move_on_board(col, computer - 1)
new_score = self.minimax(ai_board, depth - 1, alpha, beta, True, computer, human)[1]
if new_score < score:
score = new_score
column = col
beta = min(beta, score)
if alpha >= beta:
break
return column, score
def is_terminal_node(self, board):
"""
This function checks if we have reached a terminal node in the tree generated by the minimax algorithm.
A certain node is a terminal node if one of the players has won or if there are no more possible moves to be made.
:param board: the board of the current game anticipated by AI
:return: True - it is a terminal node
False - otherwise
"""
return board.winning_move(1) or board.winning_move(2) or len(board.get_valid_moves()) == 0
|
[
"anca.muscalagiu@stud.ubbcluj.ro"
] |
anca.muscalagiu@stud.ubbcluj.ro
|
19d11f4ab63e8ff41b52f2947c8322cd5064a755
|
5825d45f497b879b9c5bd2b140b2ebcbbd537138
|
/poo_abstracao_encapsulamento.py
|
b00b044c0e3eac3caf658915c1982cd5883aaad2
|
[] |
no_license
|
isabelaaug/Python
|
5fa696b77d1aa67026478fdd4182789df8cbc76e
|
ea3ab15ac7db75b77ad48fc11009640a569c0cab
|
refs/heads/master
| 2021-04-24T06:27:24.426902
| 2020-06-10T03:09:58
| 2020-06-10T03:09:58
| 250,092,962
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,343
|
py
|
"""
Abstracao e Encapsulamento
Abstracao eh o ato de expor apenas os dados relevantes de uma classe, escondendo atributos e metodos privados.
"""
class Conta:
contador = 4999
def __init__(self, titular, limite, saldo):
self.__numero = Conta.contador + 1
self.__titular = titular
self.__limite = limite
self.__saldo = saldo
Conta.contador = self.__numero
def extrato(self):
print(f'Saldo de {self.__saldo} do titular {self.__titular} com limite de {self.__limite}')
def depositar(self, valor):
if valor > 0:
self.__saldo += valor
else:
print('o valor precisa ser positivo')
def sacar(self, valor):
if valor > 0:
if valor < self.__saldo:
self.__saldo -= valor
else:
print('saldo insuficiente')
else:
print('o valor deve ser positivo')
def transferencia(self, valor, conta_destino):
self.__saldo -= valor
conta_destino.__saldo += valor
conta1 = Conta('isa', 1000, 8800)
conta2 = Conta('joao', 5260, 10000)
print(conta1.__dict__)
print(conta2.__dict__)
conta1.transferencia(100, conta2)
print(conta1.__dict__)
print(conta2.__dict__)
conta1.sacar(500)
conta2.depositar(500)
print(conta1.__dict__)
print(conta2.__dict__)
|
[
"isabela.augusta@hotmail.com"
] |
isabela.augusta@hotmail.com
|
d9e2b3d4a24d04dffdc4fd9f234fca2e048457e0
|
934107eaba17b352bf7bf3a9c0a45af4f263fd54
|
/tests/integration/tests.py
|
7411d70ab3d5ec27f1b21641cd3f467b3285d47e
|
[] |
no_license
|
quentin338/Purebeurre-p8
|
d0324f0fbc7e96a1418b367ea41ef3f4f51cc437
|
15bb4192df331d790ef28140e65213ac604cf96e
|
refs/heads/master
| 2023-08-07T22:25:10.975870
| 2023-07-26T10:21:04
| 2023-07-26T10:21:04
| 207,123,046
| 0
| 0
| null | 2023-07-26T10:21:06
| 2019-09-08T14:17:16
|
CSS
|
UTF-8
|
Python
| false
| false
| 1,278
|
py
|
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from django.test import LiveServerTestCase
from users.models import User
chrome_options = Options()
chrome_options.add_argument("--headless")
class AccountTestCase(LiveServerTestCase):
def setUp(self) -> None:
self.selenium = webdriver.Chrome(options=chrome_options)
self.selenium.implicitly_wait(5)
super(AccountTestCase, self).setUp()
User.objects.create_user(email="test@test.com", password="Test#2020")
def tearDown(self) -> None:
self.selenium.quit()
super(AccountTestCase, self).tearDown()
def test_existing_user_can_login(self):
self.selenium.get(f"{self.live_server_url}/users/login")
email = self.selenium.find_element_by_id("id_email")
password = self.selenium.find_element_by_id("id_password")
self.assertIn("Se connecter", self.selenium.title)
email.send_keys("test@test.com")
password.send_keys("Test#2020")
submit_button = self.selenium.find_element_by_css_selector("input.btn-light")
submit_button.click()
self.selenium.find_element_by_class_name("fa-sign-out-alt")
self.assertIn("users/logout/", self.selenium.page_source)
|
[
"quentin.bertrand@yahoo.fr"
] |
quentin.bertrand@yahoo.fr
|
5ccab8e21c11052fa3ffacd6198ff234d83f4db5
|
4bc8e370e557a8670dee8d5cc4253fc779c6f95a
|
/pelicanconf.py
|
c75fdb5811da0268cae8ce5a3287b011771b55ab
|
[] |
no_license
|
aurielfournier/wrightaprilm.github.io
|
9f505e5dce5210beba567d6e9700a8c055138029
|
826577dac100b822a581c99a71a0247d61fe4451
|
refs/heads/master
| 2020-12-29T18:48:32.670007
| 2015-11-05T17:26:11
| 2015-11-05T17:26:11
| 46,506,712
| 0
| 0
| null | 2015-11-19T16:53:35
| 2015-11-19T16:53:34
| null |
UTF-8
|
Python
| false
| false
| 794
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'April Wright'
SITENAME = u'Wrighting April'
SITEURL = 'http://wrightaprilm.github.io/'
FEED_ALL_RSS = 'indexrss.xml'
FEED_ALL_ATOM = 'index.xml'
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
# Feed generation is usually not desired when developing
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
# Blogroll
GITHUB_LINK='https://github.com/wrightaprilm'
DISQUS_SITENAME='wrightaprilm'
THEME = '/home/april/pelican-themes/nmnlist/'
# Social widget
SOCIAL = (('Twitter', 'twitter.com/wrightingapril'),
('Github', 'https://github.com/wrightaprilm'),)
DEFAULT_PAGINATION = 3
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
[
"wright.aprilm@gmail.com"
] |
wright.aprilm@gmail.com
|
dcd9c77e063619bfe0d0197970794ce7a9ee4a80
|
4aaac54b8d02d93e7055740ccafdfe673fb42aaf
|
/g01/graph/graph_operators.py
|
90ee31ed08e074a6ca631f030bc5f0859d1cda56
|
[
"MIT"
] |
permissive
|
tor4z/G01
|
aff941b4e522901aa9d9030658962496cd4a72fc
|
54e69d5ef0650d8903d215bb0514feb762c6446f
|
refs/heads/main
| 2023-07-17T21:39:41.384943
| 2021-09-13T01:47:51
| 2021-09-13T01:47:51
| 405,638,407
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 825
|
py
|
from typing import List
from .node import Node
from .utils import Queue
def top_sort(node: Node) -> List[Node]:
dfs_seq: List[Node] = dfs_sort(node)
return reversed(dfs_seq)
def dfs_sort(node: Node) -> List[Node]:
result: List[Node] = []
def _dfs_sort(node: Node) -> None:
if node.name not in result:
result.append(node)
for child in node.children_iter():
_dfs_sort(child)
_dfs_sort(node)
return result
def bfs_sort(node: Node) -> List[Node]:
result: List[Node] = []
queue = Queue[Node]()
queue.en_queue(node)
while not queue.empty:
node = queue.de_queue()
result.append(node)
for child in node.children_iter():
if child not in result:
queue.en_queue(child)
return result
|
[
"vwenjie@hotmail.com"
] |
vwenjie@hotmail.com
|
50c5146f88a14bc391b83e2fa3e936f044ca17a5
|
c0f4104194a7989e44d7f0161b2425c5a5bc3a98
|
/freezer_api/tests/unit/test_jobs.py
|
78c2ba4b5f9155cc4248434cf8a48cf76fd1adb7
|
[] |
no_license
|
bopopescu/Openstack-2
|
f65470bdd0ee4736c45b6f869f0453cb8eb446c8
|
6f06133562e3dfd490695a92c9ddf1a322675104
|
refs/heads/master
| 2022-11-28T09:19:21.633850
| 2016-06-23T07:55:32
| 2016-06-23T07:55:32
| 282,095,817
| 0
| 0
| null | 2020-07-24T01:44:49
| 2020-07-24T01:44:48
| null |
UTF-8
|
Python
| false
| false
| 18,356
|
py
|
"""Freezer swift.py related tests
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mock import Mock, patch
import random
import json
from .common import *
from freezer_api.common.exceptions import *
from freezer_api.api.v1 import jobs as v1_jobs
class TestJobsBaseResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.resource = v1_jobs.JobsBaseResource(self.mock_db)
def test_get_action_returns_found_action(self):
self.mock_db.get_action.return_value = 'awesome_result'
result = self.resource.get_action('user-id', 'action-id')
self.assertEquals(result, 'awesome_result')
def test_get_action_returns_none_when_action_not_found(self):
self.mock_db.get_action.side_effect = DocumentNotFound('regular test failure')
result = self.resource.get_action('user-id', 'action-id')
self.assertIsNone(result)
def test_update_actions_in_job_no_action_id(self):
self.resource.get_action = Mock()
self.resource.get_action.return_value = None
action_doc = {
#"action_id": "ottonero",
"freezer_action": {
"mode" : "mysql",
"container": "freezer_backup_test"
},
"max_retries": 3
}
job_doc = {"job_actions": [action_doc.copy()],
"description": "three actions backup"
}
self.resource.update_actions_in_job('duder', job_doc=job_doc)
self.mock_db.add_action.assert_called_with(user_id='duder',
doc=action_doc)
def test_update_actions_in_job_action_id_not_found(self):
self.resource.get_action = Mock()
self.resource.get_action.return_value = None
action_doc = {
"action_id": "ottonero",
"freezer_action": {
"mode" : "mysql",
"container": "freezer_backup_test"
},
"max_retries": 3
}
job_doc = {"job_actions": [action_doc.copy()],
"description": "three actions backup"
}
self.resource.update_actions_in_job('duder', job_doc=job_doc)
self.mock_db.add_action.assert_called_with(user_id='duder',
doc=action_doc)
def test_update_actions_in_job_action_id_found_and_same_action(self):
self.resource.get_action = Mock()
action_doc = {
"action_id": "ottonero",
"freezer_action": {
"mode" : "mysql",
"container": "freezer_backup_test"
},
"max_retries": 3
}
job_doc = {"job_actions": [action_doc.copy()],
"description": "three actions backup"
}
self.resource.get_action.return_value = action_doc.copy()
self.resource.update_actions_in_job('duder', job_doc=job_doc)
self.mock_db.add_action.assert_not_called()
def test_update_actions_in_job_action_id_found_and_different_action(self):
self.resource.get_action = Mock()
action_doc = {
"action_id": "ottonero",
"freezer_action": {
"mode" : "mysql",
"container": "freezer_backup_test"
},
"max_retries": 3
}
job_doc = {"job_actions": [action_doc.copy()],
"description": "three actions backup"
}
found_action = {
"action_id": "ottonero",
"freezer_action": {
"mode" : "mysql",
"container": "different_drum"
},
"max_retries": 4
}
new_doc = action_doc.copy()
new_doc['action_id'] = ''
self.resource.get_action.return_value = found_action
self.resource.update_actions_in_job('duder', job_doc=job_doc)
self.mock_db.add_action.assert_called_with(user_id='duder',
doc=new_doc)
class TestJobsCollectionResource(unittest.TestCase):
def setUp(self):
self.mock_json_body = Mock()
self.mock_json_body.return_value = {}
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_job_0_user_id
self.mock_req.status = falcon.HTTP_200
self.resource = v1_jobs.JobsCollectionResource(self.mock_db)
self.resource.json_body = self.mock_json_body
def test_on_get_return_empty_list(self):
self.mock_db.search_job.return_value = []
expected_result = {'jobs': []}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.body
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_get_return_correct_list(self):
self.mock_db.search_job.return_value = [get_fake_job_0(), get_fake_job_1()]
expected_result = {'jobs': [get_fake_job_0(), get_fake_job_1()]}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.body
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_post_inserts_correct_data(self):
job = get_fake_job_0()
self.mock_json_body.return_value = job
self.mock_db.add_job.return_value = 'pjiofrdslaikfunr'
expected_result = {'job_id': 'pjiofrdslaikfunr'}
self.resource.on_post(self.mock_req, self.mock_req)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.body, expected_result)
class TestJobsResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.stream.read.return_value = {}
self.mock_req.get_header.return_value = fake_job_0_user_id
self.mock_req.status = falcon.HTTP_200
self.resource = v1_jobs.JobsResource(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_jobs.JobsResource)
def test_on_get_return_no_result_and_404_when_not_found(self):
self.mock_req.body = None
self.mock_db.get_job.return_value = None
self.resource.on_get(self.mock_req, self.mock_req, fake_job_0_job_id)
self.assertIsNone(self.mock_req.body)
self.assertEqual(self.mock_req.status, falcon.HTTP_404)
def test_on_get_return_correct_data(self):
self.mock_db.get_job.return_value = get_fake_job_0()
self.resource.on_get(self.mock_req, self.mock_req, fake_job_0_job_id)
result = self.mock_req.body
self.assertEqual(result, get_fake_job_0())
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_delete_removes_proper_data(self):
self.resource.on_delete(self.mock_req, self.mock_req, fake_job_0_job_id)
result = self.mock_req.body
expected_result = {'job_id': fake_job_0_job_id}
self.assertEquals(self.mock_req.status, falcon.HTTP_204)
self.assertEqual(result, expected_result)
def test_on_patch_ok_with_some_fields(self):
new_version = random.randint(0, 99)
self.mock_db.update_job.return_value = new_version
patch_doc = {'some_field': 'some_value',
'because': 'size_matters',
'job_schedule': {}}
self.mock_req.stream.read.return_value = json.dumps(patch_doc)
expected_result = {'job_id': fake_job_0_job_id,
'version': new_version}
self.resource.update_actions_in_job = Mock()
self.resource.on_patch(self.mock_req, self.mock_req, fake_job_0_job_id)
self.mock_db.update_job.assert_called_with(
user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id,
patch_doc=patch_doc)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.body
self.assertEqual(result, expected_result)
def test_on_post_ok(self):
new_version = random.randint(0, 99)
self.mock_db.replace_job.return_value = new_version
job = get_fake_job_0()
self.mock_req.stream.read.return_value = json.dumps(job)
expected_result = {'job_id': fake_job_0_job_id,
'version': new_version}
self.resource.on_post(self.mock_req, self.mock_req, fake_job_0_job_id)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.body, expected_result)
def test_on_post_raises_when_db_replace_job_raises(self):
self.mock_db.replace_job.side_effect = AccessForbidden('regular test failure')
job = get_fake_job_0()
self.mock_req.stream.read.return_value = json.dumps(job)
self.assertRaises(AccessForbidden, self.resource.on_post,
self.mock_req,
self.mock_req,
fake_job_0_job_id)
class TestJobsEvent(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_session_0['user_id']
self.mock_req.status = falcon.HTTP_200
self.resource = v1_jobs.JobsEvent(self.mock_db)
self.mock_json_body = Mock()
self.mock_json_body.return_value = {}
self.resource.json_body = self.mock_json_body
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_jobs.JobsEvent)
def test_on_post_raises_when_unable_to_read_event_from_body(self):
self.mock_json_body.return_value = {}
self.assertRaises(BadDataFormat, self.resource.on_post,
self.mock_req,
self.mock_req,
'my_job_id')
def test_on_post_start_event_ok(self):
new_version = random.randint(0, 99)
self.mock_db.get_job.return_value = {
'job_schedule': {
'status': 'stop'
}
}
self.mock_db.replace_job.return_value = new_version
event = {"start": None}
self.mock_json_body.return_value = event
expected_result = {'result': 'success'}
self.resource.on_post(self.mock_req, self.mock_req, 'my_job_id')
self.assertEqual(self.mock_req.status, falcon.HTTP_202)
self.assertEqual(self.mock_req.body, expected_result)
class TestJobs(unittest.TestCase):
def test_start_raises_BadDataFormat_when_jobstatus_unexpected(self):
job_doc = {'job_schedule':
{
'status': 'complicated',
'event': 'boost'
}
}
job = v1_jobs.Job(job_doc)
self.assertRaises(BadDataFormat, job.start)
def test_start_scheduled_job(self):
job_doc = {'job_schedule':
{
'status': 'scheduled'
}
}
job = v1_jobs.Job(job_doc)
res = job.start()
self.assertEquals(res, 'already active')
self.assertFalse(job.need_update)
def test_start_running_job(self):
job_doc = {'job_schedule':
{
'status': 'running'
}
}
job = v1_jobs.Job(job_doc)
res = job.start()
self.assertEquals(res, 'already active')
self.assertFalse(job.need_update)
def test_start_stopped_job(self):
job_doc = {'job_schedule':
{
'status': 'stop'
}
}
job = v1_jobs.Job(job_doc)
res = job.start()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], 'stop')
self.assertEqual(job.doc['job_schedule']['event'], 'start')
self.assertTrue(job.need_update)
def test_start_completed_job(self):
job_doc = {'job_schedule':
{
'status': 'completed'
}
}
job = v1_jobs.Job(job_doc)
res = job.start()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], 'stop')
self.assertEqual(job.doc['job_schedule']['event'], 'start')
self.assertTrue(job.need_update)
def test_start_job_with_no_status(self):
job_doc = {'job_schedule':
{
'status': ''
}
}
job = v1_jobs.Job(job_doc)
res = job.start()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], 'stop')
self.assertEqual(job.doc['job_schedule']['event'], 'start')
self.assertTrue(job.need_update)
def test_stop_scheduled_job(self):
job_doc = {'job_schedule':
{
'status': 'scheduled'
}
}
job = v1_jobs.Job(job_doc)
res = job.stop()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], 'scheduled')
self.assertEqual(job.doc['job_schedule']['event'], 'stop')
self.assertTrue(job.need_update)
def test_stop_running_job(self):
job_doc = {'job_schedule':
{
'status': 'running'
}
}
job = v1_jobs.Job(job_doc)
res = job.stop()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], 'running')
self.assertEqual(job.doc['job_schedule']['event'], 'stop')
self.assertTrue(job.need_update)
def test_stop_job_with_no_status(self):
job_doc = {'job_schedule':
{
'status': ''
}
}
job = v1_jobs.Job(job_doc)
res = job.stop()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], '')
self.assertEqual(job.doc['job_schedule']['event'], 'stop')
self.assertTrue(job.need_update)
def test_stop_not_active_job(self):
job_doc = {'job_schedule':
{
'status': 'whatever'
}
}
job = v1_jobs.Job(job_doc)
res = job.stop()
self.assertEquals(res, 'already stopped')
self.assertFalse(job.need_update)
def test_abort_scheduled_job(self):
job_doc = {'job_schedule':
{
'status': 'scheduled'
}
}
job = v1_jobs.Job(job_doc)
res = job.abort()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], 'scheduled')
self.assertEqual(job.doc['job_schedule']['event'], 'abort')
self.assertTrue(job.need_update)
def test_abort_running_job(self):
job_doc = {'job_schedule':
{
'status': 'running'
}
}
job = v1_jobs.Job(job_doc)
res = job.abort()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], 'running')
self.assertEqual(job.doc['job_schedule']['event'], 'abort')
self.assertTrue(job.need_update)
def test_abort_job_with_no_status(self):
job_doc = {'job_schedule':
{
'status': ''
}
}
job = v1_jobs.Job(job_doc)
res = job.abort()
self.assertEquals(res, 'success')
self.assertEqual(job.doc['job_schedule']['status'], '')
self.assertEqual(job.doc['job_schedule']['event'], 'abort')
self.assertTrue(job.need_update)
def test_abort_not_active_job(self):
job_doc = {'job_schedule':
{
'status': 'whatever'
}
}
job = v1_jobs.Job(job_doc)
res = job.abort()
self.assertEquals(res, 'already stopped')
self.assertFalse(job.need_update)
@patch.object(v1_jobs.Job, 'start')
def test_execute_start_event(self, mock_start):
job = v1_jobs.Job({})
res = job.execute_event('start', 'my_params')
mock_start.assert_called_once_with('my_params')
@patch.object(v1_jobs.Job, 'stop')
def test_execute_stop_event(self, mock_stop):
job = v1_jobs.Job({})
res = job.execute_event('stop', 'my_params')
mock_stop.assert_called_once_with('my_params')
@patch.object(v1_jobs.Job, 'abort')
def test_execute_abort_event(self, mock_abort):
job = v1_jobs.Job({})
res = job.execute_event('abort', 'my_params')
mock_abort.assert_called_once_with('my_params')
def test_execute_raises_BadDataFormat_when_event_not_implemented(self):
job = v1_jobs.Job({})
self.assertRaises(BadDataFormat, job.execute_event, 'smile', 'my_params')
def test_expand_action_defaults(self):
job_doc = {
'action_defaults': {'that_field': 'that_value'},
'job_actions': [
{'freezer_action': {'not_that_field': 'some_value'}},
{'freezer_action': {'that_field': 'another_value'}}
]
}
expected_job_doc = {
'job_actions': [
{'freezer_action': {'not_that_field': 'some_value',
'that_field': 'that_value'}},
{'freezer_action': {'that_field': 'another_value'}}
],
'job_schedule': {}
}
job = v1_jobs.Job(job_doc)
self.assertEqual(job.doc, expected_job_doc)
|
[
"egonmin@CN00119199"
] |
egonmin@CN00119199
|
5c9bd3ddf914af0796960636999e26503d3eeb2b
|
04a643a77927bc56ab58c7df91d4733321e61e51
|
/tools/parse_browser_sheet.py
|
43271b6ddad6eb78f6006ba483ba31cf0609e196
|
[] |
no_license
|
dcollins4096/p19_newscripts
|
d2fae1807170a4d70cf4c87222a6258211f993ff
|
23c780dd15b60944ed354406706de85282d0bee6
|
refs/heads/master
| 2023-07-21T11:53:55.188383
| 2023-07-18T17:38:21
| 2023-07-18T17:38:21
| 215,159,839
| 0
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,071
|
py
|
from starter2 import *
import pandas as pd
sim_list=['u501','u502','u503']
for sim in sim_list:
core_ids = []
modes = []
if 0:
D1 = pd.read_excel("browser_data/core_browser_plots.xlsx", sheet_name=sim)
for nc,core_name in enumerate(D1['Core']):
core_id = int( core_name.split("c")[1])
core_ids.append(core_id)
modes.append( D1['Mode'][nc])
print(modes)
else:
fname = "browser_data/Core Browser Plots - %s.tsv"%sim
fptr = open(fname)
lines = fptr.readlines()
fptr.close()
for nc,line in enumerate(lines[1:]):
stuff = line.split('\t')
core_name = stuff[0]
core_id = int( core_name.split("c")[1])
core_ids.append(core_id)
modes.append( stuff[1])
fptr = h5py.File('browser_data/core_formation_mode_%s.h5'%sim,'w')
try:
argsort = np.argsort(core_ids)
fptr['core_ids'] = core_ids
fptr['modes'] = modes
except:
raise
finally:
fptr.close()
|
[
"dccollins@fsu.edu"
] |
dccollins@fsu.edu
|
a11552e4daab8cd3d2cce65ce4ad1f287cbe3e3f
|
ce11e10d32fd2c4a6fcd6a499e89ee8adce112ba
|
/Arnaque/urls.py
|
f27a97b2f796c54a47edd6f26e87edde1a7295a2
|
[] |
no_license
|
bendaouda/TPAL
|
e400a666061cf796bc1ee5ea774e2b5987e7cc2f
|
ed49af5bc141612a9785a4dea5e50a5d2302bd12
|
refs/heads/main
| 2023-03-11T01:32:47.685024
| 2021-02-25T00:35:49
| 2021-02-25T00:35:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 133
|
py
|
from django.contrib import admin
from django.urls import path
from . import views
urlpatterns = [
path('',views.list_Arnaque),
]
|
[
"ben.juniors.bd@gmail.com"
] |
ben.juniors.bd@gmail.com
|
403301db71aeb6b6c984de6a02169e6b671f554a
|
5e00df3056e94c6ca5775f478f2d64adfecf3406
|
/primeraPruebaASI/GYM/views.py
|
64f3d5ea6966501369eaa6fbbf462b0af6ef08f4
|
[] |
no_license
|
diegooa116/proyectoDjango
|
91cc06fca89e94ed3acc56412b5b59529354d59b
|
fe83bcb622ee0be5f27bc10417a9f06e59fadfad
|
refs/heads/master
| 2022-12-24T23:14:43.979351
| 2020-09-08T17:19:13
| 2020-09-08T17:19:13
| 293,879,150
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 979
|
py
|
from django.shortcuts import render
from django.http import HttpResponse
#Api rest imports
from django.contrib.auth.models import User, Group
from rest_framework import viewsets
from rest_framework import permissions
from GYM.serializers import UserSerializer, GroupSerializer
# Create your views here.
def saludo(request):
return HttpResponse("Hola mundo en Django...")
def despedida(request):
return HttpResponse("Adios a todos...")
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = User.objects.all().order_by('-date_joined')
serializer_class = UserSerializer
permission_classes = [permissions.IsAuthenticated]
class GroupViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
queryset = Group.objects.all()
serializer_class = GroupSerializer
permission_classes = [permissions.IsAuthenticated]
|
[
"61633319+diegooa116@users.noreply.github.com"
] |
61633319+diegooa116@users.noreply.github.com
|
12e8e2dbd1b2e32655d0f8dd63debf4b7d3c2b74
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03495/s406265129.py
|
ca2b3992da240d611688382742c8a72462e030fc
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 175
|
py
|
from collections import Counter as CC
N, K = map(int, input().split())
A = list(map(int, input().split()))
B = CC(A)
C = sorted(B.values())
print(sum(C[:max(0, len(C) - K)]))
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
13998877f896e1e07a169bfdc4b7a2681f79f1cb
|
0b21ee33231d4548594c4e173e48f55d15915244
|
/practise2.py
|
a764dfe3ac240d02d9a79646f2095a6de160301a
|
[] |
no_license
|
karendi/python
|
366163f8253cd36a36f46e09253e2ae0090583bf
|
50028ab1f4323ec2d61a8780e68cfa5cd82429b5
|
refs/heads/master
| 2022-08-30T09:28:18.347593
| 2022-08-12T17:39:25
| 2022-08-12T17:39:25
| 63,315,631
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 616
|
py
|
def breakwords(stuff):
words = stuff.split(' ')
return words
def sort_words(words):
return sorted(words)
def print_first_words(words):
word = words.pop(0)
print(word)
def print_last_words(words):
word = words.pop(-1)
print(word)
def sort_sentence(sentence):
words = break_words(sentence)
return sort_words(words)
def printlastandfirst(sentence):
words = break_words(sentence)
print_first_words(words)
print_last_words(words)
def print_first_and_last_sorted(sentence):
words = sort_sentence(sentence)
print_first_words(words)
print_last_words(words)
|
[
"sharonkarendi16@gmail.com"
] |
sharonkarendi16@gmail.com
|
2fc190c8d1e0b64caf51a0f5376f297d4cdc40b6
|
aea8b2ab005fed48b0fc1b458c3efbd77cdf0dad
|
/research/_tests/test_selenium.py
|
d73153ee9c818b0220d41393d36381e1f638e636
|
[] |
no_license
|
vcrl/Projet11
|
4dff4d1ba3986a7ab93aa4cc4b2d13cb0bb03330
|
694581003d0f21e17a48d4987bcad64d22f36ec6
|
refs/heads/master
| 2023-04-10T23:58:04.860332
| 2021-04-13T14:48:27
| 2021-04-13T14:48:27
| 357,165,979
| 0
| 0
| null | 2021-04-12T21:37:34
| 2021-04-12T11:23:18
|
CSS
|
UTF-8
|
Python
| false
| false
| 906
|
py
|
from django.test import TestCase, Client, RequestFactory
from django.urls import reverse, resolve
from django.core.paginator import Page
from django.contrib.auth.models import User
from django.contrib import auth
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class Test_Selenium():
def testform(self):
selenium = webdriver.Chrome()
selenium.get('http://127.0.0.1:8000')
search = selenium.find_element_by_id('searchinput')
submit = selenium.find_element_by_id('searchsubmit')
search.send_keys('coca')
submit.send_keys(Keys.RETURN)
assert 'Coca-Cola' in selenium.page_source
search.send_keys('biscuits')
submit.send_keys(Keys.RETURN)
assert 'Biscuits' in selenium.page_source
search.send_keys('eau')
submit.send_keys(Keys.RETURN)
assert 'Eau' in selenium.page_source
|
[
"vincentruelle.pro@gmail.com"
] |
vincentruelle.pro@gmail.com
|
e8055d8d0a43d18614f567570810602aa0601cc2
|
81db87bc6a6e6ebc82f382df3841a61ce5a2270f
|
/match_column_names.py
|
4b98ac3696cc3df1f096c75737439739cd34f207
|
[] |
no_license
|
EMAT31530/ai-group-project-team-football
|
01b5d385bf4ae86625703940238524bd8afe2b4d
|
357aadeb2c492ccd9ff29266e88f139ca0797c58
|
refs/heads/master
| 2023-05-07T21:38:35.002996
| 2021-05-18T23:25:23
| 2021-05-18T23:25:23
| 316,462,349
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 261
|
py
|
import sqlite3
con = sqlite3.connect(r'C:\Users\Luca\PycharmProjects\IntroToAI-Group5-TeamB(football)\database.sqlite')
cur = con.cursor()
cur.execute("SELECT * from Match")
names = [description[0] for description in cur.description]
print(names)
|
[
"zy18811@bristol.ac.uk"
] |
zy18811@bristol.ac.uk
|
c8833f9fc9622b7c3d19b6abee8fc0c5db3ddeab
|
979a9918f317ec4ab8326500e6057fae9b935f9f
|
/migrations/versions/e88462d015da_add_standard_run.py
|
651e1abc0d56c1d53d032b9fcd15760c5819b3ad
|
[
"MIT"
] |
permissive
|
lparsons/metabolite_database
|
08f213b75b11835d528f8e7d150c2bafbb2189f8
|
f1a5aa3e31d00e13ba4862e5cbb666b44dc67ce0
|
refs/heads/master
| 2020-04-06T07:24:11.120729
| 2019-03-08T20:01:44
| 2019-03-08T20:01:44
| 157,271,013
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,915
|
py
|
"""add standard run
Revision ID: e88462d015da
Revises:
Create Date: 2018-06-21 16:54:46.692617
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e88462d015da'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('chromatography_method',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_chromatography_method')),
sa.UniqueConstraint('name', name=op.f('uq_chromatography_method_name'))
)
op.create_table('compound',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=256), nullable=True),
sa.Column('molecular_formula', sa.String(length=128), nullable=True),
sa.Column('molecular_weight', sa.Float(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_compound'))
)
with op.batch_alter_table('compound', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_compound_molecular_formula'), ['molecular_formula'], unique=False)
batch_op.create_index(batch_op.f('ix_compound_molecular_weight'), ['molecular_weight'], unique=False)
batch_op.create_index(batch_op.f('ix_compound_name'), ['name'], unique=True)
op.create_table('external_database',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=256), nullable=True),
sa.Column('url', sa.String(length=256), nullable=True),
sa.Column('compound_url', sa.String(length=256), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_external_database')),
sa.UniqueConstraint('name', name=op.f('uq_external_database_name'))
)
op.create_table('dbxref',
sa.Column('compound_id', sa.Integer(), nullable=False),
sa.Column('external_database_id', sa.Integer(), nullable=False),
sa.Column('external_compound_id', sa.String(length=128), nullable=True),
sa.ForeignKeyConstraint(['compound_id'], ['compound.id'], name=op.f('fk_dbxref_compound_id_compound')),
sa.ForeignKeyConstraint(['external_database_id'], ['external_database.id'], name=op.f('fk_dbxref_external_database_id_external_database')),
sa.PrimaryKeyConstraint('compound_id', 'external_database_id', name=op.f('pk_dbxref'))
)
op.create_table('standard_run',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('date', sa.DateTime(), nullable=True),
sa.Column('operator', sa.String(length=256), nullable=True),
sa.Column('mzxml_file', sa.String(length=256), nullable=True),
sa.Column('chromatography_method_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['chromatography_method_id'], ['chromatography_method.id'], name=op.f('fk_standard_run_chromatography_method_id_chromatography_method')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_standard_run'))
)
with op.batch_alter_table('standard_run', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_standard_run_date'), ['date'], unique=False)
batch_op.create_index(batch_op.f('ix_standard_run_mzxml_file'), ['mzxml_file'], unique=False)
batch_op.create_index(batch_op.f('ix_standard_run_operator'), ['operator'], unique=False)
op.create_table('retention_time',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('compound_id', sa.Integer(), nullable=True),
sa.Column('standard_run_id', sa.Integer(), nullable=True),
sa.Column('retention_time', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['compound_id'], ['compound.id'], name=op.f('fk_retention_time_compound_id_compound')),
sa.ForeignKeyConstraint(['standard_run_id'], ['standard_run.id'], name=op.f('fk_retention_time_standard_run_id_standard_run')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_retention_time'))
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('retention_time')
with op.batch_alter_table('standard_run', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_standard_run_operator'))
batch_op.drop_index(batch_op.f('ix_standard_run_mzxml_file'))
batch_op.drop_index(batch_op.f('ix_standard_run_date'))
op.drop_table('standard_run')
op.drop_table('dbxref')
op.drop_table('external_database')
with op.batch_alter_table('compound', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_compound_name'))
batch_op.drop_index(batch_op.f('ix_compound_molecular_weight'))
batch_op.drop_index(batch_op.f('ix_compound_molecular_formula'))
op.drop_table('compound')
op.drop_table('chromatography_method')
# ### end Alembic commands ###
|
[
"lparsons@princeton.edu"
] |
lparsons@princeton.edu
|
82c18a4f0c8dbd74d2185d2e99b1081d973c5004
|
5cf715950102f59fcb71d7a76d1fc8517012ff4c
|
/main.py
|
a44ac4ae8f331fe72089767899e61d9c9d7eb0b6
|
[] |
no_license
|
vivanks/System-Vault-And-It-s-Monitoring-using-Python
|
eb03063047932c40cb5d9f957f508a89a904170b
|
a87a23c544ba592b08045a57086f7885ba0e68d9
|
refs/heads/master
| 2020-05-01T08:26:01.049018
| 2019-09-22T19:10:41
| 2019-09-22T19:10:41
| 177,378,819
| 21
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,428
|
py
|
import readline # arrow keys
import os # This imports your OS.
from loginmail import login_mailfunc
from mail import un_mailfunc
import pyrebase
i=0
config = {
"apiKey": "AIzaSyCk-zz4Tm9YYxyhX2C4NGTpbNO-zFDplkk",
"authDomain": "pythonlock-e5319.firebaseapp.com",
"databaseURL": "https://pythonlock-e5319.firebaseio.com",
"projectId": "pythonlock-e5319",
"storageBucket": "pythonlock-e5319.appspot.com",
"messagingSenderId": "97710168175"
}
firebase = pyrebase.initialize_app(config)
db = firebase.database()
color = {
'RED' : '\033[1;91m',
'UNDERLINE_PURPLE' : '\033[4;34m',
'GREEN' : '\033[1;92m',
'YELLOW' : '\033[1;33m',
'CYAN' : '\033[0;36m',
'PURPLE' : '\033[0;34m',
'MAGENTA' : '\033[0;35m',
'DEFAULT' : '\033[0m',
'TWITTER_BLUE' : '\033[38;5;33m',
}
alias = {
'idiot' : ["password", "12345", "synthx", "qwerty"],
'help' : ["help", "?", "/help"],
'exit' : ["exit", "quit", "stop"],
'info' : ["info", "credits"],
'cancel': ["cancel", "return", "back"]
}
incorrect = 'Wrong password! ' # You can change this to say whatever you like!
exitSentence = color['RED'] + '[Now Exiting!] ' + color['DEFAULT'] +'Folder Locker\n'
exit = False
idiot = 'You must be a idiot, you really think I would set that as my password? -_-\n' + exitSentence # You can change this to say whatever you like!
noRun = 'You are not allowed to enter that command here!'
credits = '©2019 Vivank\n' + ' Mansi'
os.system("clear") # This will clear the screen for a nice and clean interface!
# make sure this file is in same directory that you are running this script from.
# make sure this file is in same directory that you are running this script from.
secretPassword = "vivank"
user = ['vivank','mansi']
pwd = ['','']
header = color['RED'] + '''
_____ _ _ _ _
| ___|__ | | __| | ___ _ __ | | ___ ___| | _____ _ __
| |_ / _ \| |/ _` |/ _ \ '__| | | / _ \ / __| |/ / _ \ '__|
| _| (_) | | (_| | __/ | | |__| (_) | (__| < __/ |
|_| \___/|_|\__,_|\___|_| |_____\___/ \___|_|\_\___|_|
''' + color['DEFAULT'] + ''' +-----------------------+\n | Created by ''' + color['TWITTER_BLUE'] + '''AccessDenied''' + color['DEFAULT'] + ''' |\n +-----------------------+
''' + color['DEFAULT']
print(header + 'Type "help" to begin!\n')
def Main():
global exit
user = input("Enter User Id : ")
password = input("Enter you password : ")
a = db.child("user").child(user).get()
if(password == a.val()):
login_mailfunc(user)
maininput = input(color['TWITTER_BLUE'] + 'Folder Locker> ' + color['DEFAULT']).lower() # You can change this to whatever you like! Dont forget the space after it.
while not maininput:
maininput = input(color['TWITTER_BLUE'] + 'Folder Locker> ' + color['DEFAULT']).lower() # You can change this to whatever you like! Dont forget the space after it.
if maininput in alias['help']:
helpme()
elif maininput == 'unlock':
unlockst()
elif maininput == 'lock':
lockFolder()
elif maininput in alias['exit']:
os.system('clear')
print(exitSentence)
exit = True
elif maininput == 'clear':
os.system('clear')
print(header)
Main()
elif maininput in alias['info']:
os.system('clear')
print(credits)
Main()
elif maininput in alias['cancel']:
os.system('clear')
print(header + '\n' + color['RED'] + noRun + color['DEFAULT'] + '\nThis command is only valid when trying to unlock a folder!')
Main()
else:
print(color['RED'] + '[ERROR] COMMAND "' + maininput + '" NOT FOUND\n' + color['DEFAULT'] + 'PLEASE USE A PROPER COMMAND!\n')
def helpme():
global exit
os.system('clear')
print(color['YELLOW'] + '''
+--------+-------------------------------------+
| help | shows this dialog |
+--------+-------------------------------------+
| unlock | lets you unlock a specified folder |
+--------+-------------------------------------+
| lock | lets you lock a specified folder |
+--------+-------------------------------------+
| exit | exits folder locker :/ |
+--------+-------------------------------------+
| clear | clears the screen |
+--------+-------------------------------------+
| info | shows credits for folder locker |
+--------+-------------------------------------+
''')
Main()
def unlockst():
os.system('clear')
print(header + 'What folder would you like to unlock? Please type a Directory!')
folderSelect = input(color['TWITTER_BLUE'] + "(e.x. ~/Desktop/FOLDER_NAME)> " + color['DEFAULT'])
if "help" in folderSelect.lower() or "unlock" in folderSelect.lower() or "lock" in folderSelect.lower() or "exit" in folderSelect.lower() or "clear" in folderSelect.lower() or "info" in folderSelect.lower():
unlockst()
else:
unlockMain(folderSelect)
def unlockMain(folder_select):
global exit
os.system('clear')
print(header + 'Please enter in password to unlock the folder!')
typedPassword = input(color['TWITTER_BLUE'] + "Folder Locker> " + color['DEFAULT']) # You can change this to whatever you like! Dont forget the space after it.
typedPasswordLow = typedPassword.lower()
if typedPassword == secretPassword:
os.system('clear;' + 'chflags nohidden ' + folder_select + ';clear')
print('Folder Unlocked!')
theExit()
elif typedPasswordLow in alias['idiot'] and not secretPassword in alias['idiot']:
os.system('clear')
print(idiot)
exit = True
elif typedPasswordLow in alias['exit']:
os.system('clear')
print(exitSentence)
exit = True
elif typedPasswordLow == 'lock':
os.system('clear')
print(color['RED'] + 'You cannot use this command as the folder is already locked :/' + color['DEFAULT'])
elif typedPasswordLow == 'clear':
os.system('clear')
elif typedPasswordLow in alias['info']:
os.system('clear')
print(noRun + '\nIf you would like to see credits, please type "cancel"!\nOtherwise, Please enter in password to unlock the folder!')
unlockMain()
elif typedPasswordLow in alias['cancel']:
os.system('clear')
print(header)
Main()
else:
os.system("clear")
print(incorrect + exitSentence)
un_mailfunc()
def lockFolder():
os.system('clear')
print(header + 'What folder would you like to lock? Please type a Directory!')
lockInput = input(color['TWITTER_BLUE'] + "(e.x. ~/Desktop/FOLDER_NAME)> " + color['DEFAULT'])
os.system('clear;' + 'chflags hidden ' + lockInput + ';clear')
print('Folder Locked!')
theExit()
def theExit():
global exit
print(header + 'Would you like to exit Folder Locker? Yes/no or Y/n')
exitInput = input(color['TWITTER_BLUE'] + "Folder Locker> " + color['DEFAULT'])
if "yes" in exitInput.lower() or "y" in exitInput.lower():
os.system('clear')
print(exitSentence)
exit = True
elif "no" in exitInput.lower() or "n" in exitInput.lower():
os.system('clear')
print(header + 'Please type "help" to begin!')
Main()
elif exitInput.lower() == 'exit':
os.system('clear')
print(exitSentence)
exit = True
else:
os.system('clear')
print('You may only say yes or no!')
theExit()
while exit == False:
Main()
|
[
"vivanksharma@ymail.com"
] |
vivanksharma@ymail.com
|
4b043375ed3f0e4bf1e5f7e7266d0f5bd02a5ed0
|
fbd04a6ef0a3f5b20df93b789b7464716e2f88c1
|
/sql_queries.py
|
8e0cfc41ab03fb1564a5bbde05a0eb22be519662
|
[] |
no_license
|
yuanliu1210/Data_Warehouse_Project
|
33bb719ea6bc497095588f9e2388fb9ce8176213
|
f819988ac3948b8a32bb75777711373eaf297790
|
refs/heads/master
| 2022-07-04T03:02:19.546065
| 2020-05-14T13:43:25
| 2020-05-14T13:43:25
| 263,924,926
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,080
|
py
|
import configparser
# CONFIG
config = configparser.ConfigParser()
config.read('dwh.cfg')
# DROP TABLES
staging_events_table_drop = "DROP TABLE IF EXISTS staging_events;"
staging_songs_table_drop = "DROP TABLE IF EXISTS staging_songs;"
songplay_table_drop = "DROP TABLE IF EXISTS songplay"
user_table_drop = "DROP TABLE IF EXISTS users;"
song_table_drop = "DROP TABLE IF EXISTS song;"
artist_table_drop = "DROP TABLE IF EXISTS artist;"
time_table_drop = "DROP TABLE IF EXISTS time;"
# CREATE TABLES
staging_events_table_create= ("""
CREATE TABLE IF NOT EXISTS staging_events(
artist VARCHAR,
auth VARCHAR,
firstName VARCHAR,
gender VARCHAR,
itemInSession INTEGER,
lastName VARCHAR,
length FLOAT,
level VARCHAR,
location VARCHAR,
method VARCHAR,
page VARCHAR,
registration FLOAT,
sessionId INTEGER,
song VARCHAR,
status INTEGER,
ts TIMESTAMP,
userAgent VARCHAR,
userId INTEGER
)
""")
staging_songs_table_create = ("""
CREATE TABLE IF NOT EXISTS staging_songs (
num_songs INTEGER,
artist_id VARCHAR,
artist_latitude FLOAT,
artist_longitude FLOAT,
artist_location VARCHAR,
artist_name VARCHAR,
song_id VARCHAR,
title VARCHAR,
duration FLOAT,
year INTEGER
)
""")
songplay_table_create = ("""
CREATE TABLE IF NOT EXISTS songplay(
songplay_id INTEGER IDENTITY(0,1) PRIMARY KEY,
start_time TIMESTAMP NOT NULL SORTKEY DISTKEY,
user_id INTEGER NOT NULL,
level VARCHAR,
song_id VARCHAR NOT NULL,
artist_id VARCHAR NOT NULL,
session_id INTEGER,
location VARCHAR,
user_agent VARCHAR
)
""")
user_table_create = ("""
CREATE TABLE IF NOT EXISTS users (
user_id INTEGER NOT NULL SORTKEY PRIMARY KEY,
first_name VARCHAR NOT NULL,
last_name VARCHAR NOT NULL,
gender VARCHAR NOT NULL,
level VARCHAR NOT NULL
)
""")
song_table_create = ("""
CREATE TABLE IF NOT EXISTS song (
song_id VARCHAR NOT NULL SORTKEY PRIMARY KEY,
title VARCHAR NOT NULL,
artist_id VARCHAR NOT NULL,
year INTEGER NOT NULL,
duration FLOAT
)
""")
artist_table_create = ("""
CREATE TABLE IF NOT EXISTS artist (
artist_id VARCHAR NOT NULL SORTKEY PRIMARY KEY,
name VARCHAR NOT NULL,
location VARCHAR,
latitude FLOAT,
longitude FLOAT
)
""")
time_table_create = ("""
CREATE TABLE IF NOT EXISTS time (
start_time TIMESTAMP NOT NULL DISTKEY SORTKEY PRIMARY KEY,
hour INTEGER NOT NULL,
day INTEGER NOT NULL,
week INTEGER NOT NULL,
month INTEGER NOT NULL,
year INTEGER NOT NULL,
weekday VARCHAR(20) NOT NULL
)
""")
# STAGING TABLES
# Copy existing tables to DWH
staging_events_copy = ("""
copy staging_events from {data_bucket}
credentials 'aws_iam_role={role_arn}'
region 'us-west-2' format as JSON {log_json_path}
timeformat as 'epochmillisecs';
""").format(data_bucket=config['S3']['LOG_DATA'], role_arn=config['IAM_ROLE']['ARN'], log_json_path=config['S3']['LOG_JSONPATH'])
staging_songs_copy = ("""
copy staging_songs from {data_bucket}
credentials 'aws_iam_role={role_arn}'
region 'us-west-2' format as JSON 'auto';
""").format(data_bucket=config['S3']['SONG_DATA'], role_arn=config['IAM_ROLE']['ARN'])
# INSERT VALUES TO NEW TABLES WITH Transformation
songplay_table_insert = ("""
INSERT INTO songplay (start_time, user_id, level, song_id, artist_id, session_id, location, user_agent)
SELECT DISTINCT e.ts AS start_time,
e.userId AS user_id,
e.level AS level,
s.song_id AS song_id,
s.artist_id AS artist_id,
e.sessionId AS session_id,
e.location AS location,
e.userAgent AS user_agent
FROM staging_events e
JOIN staging_songs s ON (e.song = s.title AND e.artist = s.artist_name)
AND e.page ='NextSong';
""")
user_table_insert = ("""
INSERT INTO users (user_id, first_name, last_name, gender, level)
SELECT DISTINCT userId AS user_id,
firstName AS first_name,
lastName AS last_name,
gender,
level
FROM staging_events
WHERE user_id IS NOT NULL
AND page ='NextSong';
""")
song_table_insert = ("""
INSERT INTO song (song_id, title, artist_id, year, duration)
SELECT DISTINCT(song_id) AS song_id,
title,
artist_id,
year,
duration
FROM staging_songs
WHERE song_id IS NOT NULL;
""")
artist_table_insert = ("""
INSERT INTO artist (artist_id, name, location, latitude, longitude)
SELECT DISTINCT(artist_id) AS artist_id,
artist_name AS name,
artist_location AS location,
artist_latitude AS latitude,
artist_longitude AS longitude
FROM staging_songs
WHERE artist_id IS NOT NULL;
""")
time_table_insert = ("""
INSERT INTO time (start_time, hour, day, week, month, year, weekday)
SELECT DISTINCT(start_time) AS start_time,
EXTRACT(hour FROM start_time) AS hour,
EXTRACT(day FROM start_time) AS day,
EXTRACT(week FROM start_time) AS week,
EXTRACT(month FROM start_time) AS month,
EXTRACT(year FROM start_time) AS year,
EXTRACT(dayofweek FROM start_time) as weekday
FROM songplay;
""")
# QUERY LISTS
create_table_queries = [staging_events_table_create, staging_songs_table_create, songplay_table_create, user_table_create, song_table_create, artist_table_create, time_table_create]
drop_table_queries = [staging_events_table_drop, staging_songs_table_drop, songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop]
copy_table_queries = [staging_events_copy, staging_songs_copy]
insert_table_queries = [songplay_table_insert, user_table_insert, song_table_insert, artist_table_insert, time_table_insert]
|
[
"yuanliu1210@gmail.com"
] |
yuanliu1210@gmail.com
|
b975fcd774e1ff2c1195bcab830037bc445b1f82
|
6a1b0be3380d239de26807d165d0c318b3ca9ab2
|
/dlmslib/utils.py
|
a326485eb002f487cd20489e88df8a796f835c3f
|
[
"MIT"
] |
permissive
|
KeyiT/dlmslib
|
c1ab4fd63d34cae1f105344c6e5c78b3bf28b058
|
b20f5e361548a17ef264603cf6cef464c75ccc4a
|
refs/heads/master
| 2023-04-01T21:10:25.415148
| 2019-02-22T17:41:04
| 2019-02-22T17:41:04
| 167,739,288
| 0
| 0
|
MIT
| 2023-03-24T22:40:50
| 2019-01-26T21:10:35
|
Python
|
UTF-8
|
Python
| false
| false
| 583
|
py
|
import numpy as np
def is_integer(val):
return isinstance(val, (int, np.int_))
def is_float(val):
return isinstance(val, (float, np.float_))
def is_numeric(val):
return is_float(val) or is_integer(val)
def is_list_of_float(val):
if not isinstance(val, list):
return False
for ele in val:
if not is_float(ele):
return False
return True
def is_list_of_string(val):
if not isinstance(val, list):
return False
for ele in val:
if not isinstance(ele, str):
return False
return True
|
[
"keyit92@gmail.com"
] |
keyit92@gmail.com
|
d7c65f3f93dd2eee93fc6db45f5f1a09d14678dc
|
7df296a1515d85b15c4f6a1070853ae357012741
|
/EnteroBase/enterobase.py
|
2303df338161512a0310a8e735b3872c725c1edd
|
[] |
no_license
|
gharari1/SalML
|
2e5a2a6410a668a24156e4d95cbb25e5eb8fa416
|
13df5f4f24c95ed84a2548ff3782369db486129d
|
refs/heads/master
| 2020-08-01T03:33:52.670982
| 2019-09-26T06:57:03
| 2019-09-26T06:57:03
| 210,847,359
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,071
|
py
|
'''
*************************
Thanks to Dana Rapoport
for contributing to
the code.
*************************
'''
import os
import urllib.request
import json
import base64
import sys
from urllib.request import HTTPError
import logging
import pandas as pd
# You must have a valid API Token
API_TOKEN =''
SERVER_ADDRESS = 'http://enterobase.warwick.ac.uk'
SEROTYPE = 'Typhi'
DATABASE = 'senterica'
filename_out= "Enterobase.xlsx"
def __create_request(request_str):
request = urllib.request.Request(request_str)
base64string = base64.encodestring(('%s:%s' % (API_TOKEN,'')).encode('UTF-8')).decode('ascii').replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
return request
address_human= SERVER_ADDRESS+'/api/v2.0/%s/straindata?serotype=%s&only_fields=strain_name&only_fields=source_type&only_fields=source_details&only_fields=download_fasta_link&assembly_status=Assembled&source_type=Human&limit=%d' %(DATABASE, SEROTYPE, 224)
response = urllib.request.urlopen(__create_request(address_human))
data = json.load(response)
enterobase_human = pd.DataFrame.from_dict(data['straindata'],orient='index')
address_poultry= SERVER_ADDRESS+'/api/v2.0/senterica/straindata?offset=0&only_fields=strain_name&only_fields=source_type&only_fields=source_details&only_fields=download_fasta_link&source_details=poultry%2Cchicken%2Cpoultry%20tissue%3B%20Gallus%20gallus%2CComminuted%20poultry%2CGallus%20gallus%3B%20poultry&sortorder=asc&orderby=source_details&assembly_status=Assembled&source_type=Avian&limit=2500'
response = urllib.request.urlopen(__create_request(address_poultry))
data = json.load(response)
enterobase_poultry = pd.DataFrame.from_dict(data['straindata'],orient='index')
enterobase = pd.concat([enterobase_poultry, enterobase_human])
cols = enterobase.columns.tolist()
cols = [cols[4] , cols[3] , cols[2] , cols[1]]
enterobase = enterobase[cols]
enterobase.index = list(range(len(enterobase.index.tolist())+1))[1:]
enterobase.to_excel(filename_out)
|
[
"noreply@github.com"
] |
gharari1.noreply@github.com
|
90c0619fa04ea2469add5d28c5ef7a650924cdd4
|
2a094ffb34eb8f8a7ddb8dc6e7dd01b617281ba6
|
/demo_latent_space.py
|
23134a03cab9a5402c1255a1fd7af6b2f3696a43
|
[] |
no_license
|
ricklentz/shapegan
|
5cfadce637ae7901962419c331346282541e45ea
|
090a6a8f2d2ee1c005b19acd831a7a7f943ab64d
|
refs/heads/master
| 2022-11-14T10:57:00.931958
| 2020-07-14T23:09:08
| 2020-07-14T23:09:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,902
|
py
|
from util import device, ensure_directory
import scipy.interpolate
import numpy as np
from rendering import MeshRenderer
import torch
from tqdm import tqdm
import cv2
import random
import matplotlib.pyplot as plt
from sklearn.manifold import TSNE
from matplotlib.offsetbox import Bbox
from sklearn.cluster import KMeans
SAMPLE_COUNT = 30 # Number of distinct objects to generate and interpolate between
TRANSITION_FRAMES = 60
USE_VAE = False
SURFACE_LEVEL = 0.011
FRAMES = SAMPLE_COUNT * TRANSITION_FRAMES
progress = np.arange(FRAMES, dtype=float) / TRANSITION_FRAMES
if USE_VAE:
from model.autoencoder import Autoencoder, LATENT_CODE_SIZE
vae = Autoencoder()
vae.load()
vae.eval()
print("Calculating latent codes...")
from datasets import VoxelDataset
from torch.utils.data import DataLoader
dataset = VoxelDataset.glob('data/chairs/voxels_32/**.npy')
dataloader = DataLoader(dataset, batch_size=1000, num_workers=8)
latent_codes = torch.zeros((len(dataset), LATENT_CODE_SIZE))
with torch.no_grad():
position = 0
for batch in tqdm(dataloader):
latent_codes[position:position + batch.shape[0], :] = vae.encode(batch.to(device)).detach().cpu()
latent_codes = latent_codes.numpy()
else:
from model.sdf_net import SDFNet, LATENT_CODES_FILENAME
latent_codes = torch.load(LATENT_CODES_FILENAME).detach().cpu().numpy()
sdf_net = SDFNet()
sdf_net.load()
sdf_net.eval()
from shapenet_metadata import shapenet
raise NotImplementedError('A labels tensor needs to be supplied here.')
labels = None
print("Calculating embedding...")
tsne = TSNE(n_components=2)
latent_codes_embedded = tsne.fit_transform(latent_codes)
print("Calculating clusters...")
kmeans = KMeans(n_clusters=SAMPLE_COUNT)
indices = np.zeros(SAMPLE_COUNT, dtype=int)
kmeans_clusters = kmeans.fit_predict(latent_codes_embedded)
for i in range(SAMPLE_COUNT):
center = kmeans.cluster_centers_[i, :]
cluster_classes = labels[kmeans_clusters == i]
cluster_class = np.bincount(cluster_classes).argmax()
dist = np.linalg.norm(latent_codes_embedded - center[np.newaxis, :], axis=1)
dist[labels != cluster_class] = float('inf')
indices[i] = np.argmin(dist)
def try_find_shortest_roundtrip(indices):
best_order = indices
best_distance = None
for _ in range(5000):
candiate = best_order.copy()
a = random.randint(0, SAMPLE_COUNT-1)
b = random.randint(0, SAMPLE_COUNT-1)
candiate[a] = best_order[b]
candiate[b] = best_order[a]
dist = np.sum(np.linalg.norm(latent_codes_embedded[candiate, :] - latent_codes_embedded[np.roll(candiate, 1), :], axis=1)).item()
if best_distance is None or dist < best_distance:
best_distance = dist
best_order = candiate
return best_order, best_distance
def find_shortest_roundtrip(indices):
best_order, best_distance = try_find_shortest_roundtrip(indices)
for _ in tqdm(range(100)):
np.random.shuffle(indices)
order, distance = try_find_shortest_roundtrip(indices)
if distance < best_distance:
best_order = order
return best_order
print("Calculating trip...")
indices = find_shortest_roundtrip(indices)
indices = np.concatenate((indices, indices[0][np.newaxis]))
SIZE = latent_codes.shape[0]
stop_latent_codes = latent_codes[indices, :]
colors = np.zeros((labels.shape[0], 3))
for i in range(labels.shape[0]):
colors[i, :] = shapenet.get_color(labels[i])
spline = scipy.interpolate.CubicSpline(np.arange(SAMPLE_COUNT + 1), stop_latent_codes, axis=0, bc_type='periodic')
frame_latent_codes = spline(progress)
color_spline = scipy.interpolate.CubicSpline(np.arange(SAMPLE_COUNT + 1), colors[indices, :], axis=0, bc_type='periodic')
frame_colors = color_spline(progress)
frame_colors = np.clip(frame_colors, 0, 1)
frame_colors = np.zeros((progress.shape[0], 3))
for i in range(SAMPLE_COUNT):
frame_colors[i*TRANSITION_FRAMES:(i+1)*TRANSITION_FRAMES, :] = np.linspace(colors[indices[i]], colors[indices[i+1]], num=TRANSITION_FRAMES)
embedded_spline = scipy.interpolate.CubicSpline(np.arange(SAMPLE_COUNT + 1), latent_codes_embedded[indices, :], axis=0, bc_type='periodic')
frame_latent_codes_embedded = embedded_spline(progress)
frame_latent_codes_embedded[0, :] = frame_latent_codes_embedded[-1, :]
width, height = 40, 40
PLOT_FILE_NAME = 'tsne.png'
ensure_directory('images')
margin = 2
range_x = (latent_codes_embedded[:, 0].min() - margin, latent_codes_embedded[:, 0].max() + margin)
range_y = (latent_codes_embedded[:, 1].min() - margin, latent_codes_embedded[:, 1].max() + margin)
plt.ioff()
def create_plot(index, resolution=1080, filename=PLOT_FILE_NAME, dpi=100):
frame_color = frame_colors[index, :]
frame_color = (frame_color[0], frame_color[1], frame_color[2], 1.0)
size_inches = resolution / dpi
fig, ax = plt.subplots(1, figsize=(size_inches, size_inches), dpi=dpi)
ax.set_position([0, 0, 1, 1])
plt.axis('off')
ax.set_xlim(range_x)
ax.set_ylim(range_y)
ax.plot(frame_latent_codes_embedded[:, 0], frame_latent_codes_embedded[:, 1], c=(0.2, 0.2, 0.2, 1.0), zorder=1, linewidth=2)
ax.scatter(latent_codes_embedded[:, 0], latent_codes_embedded[:, 1], c=colors[:SIZE], s = 10, zorder=0)
ax.scatter(frame_latent_codes_embedded[index, 0], frame_latent_codes_embedded[index, 1], facecolors=frame_color, s = 200, linewidths=2, edgecolors=(0.1, 0.1, 0.1, 1.0), zorder=2)
ax.scatter(latent_codes_embedded[indices, 0], latent_codes_embedded[indices, 1], facecolors=colors[indices, :], s = 140, linewidths=1, edgecolors=(0.1, 0.1, 0.1, 1.0), zorder=3)
fig.savefig(filename, bbox_inches=Bbox([[0, 0], [size_inches, size_inches]]), dpi=dpi)
plt.close(fig)
frame_latent_codes = torch.tensor(frame_latent_codes, dtype=torch.float32, device=device)
print("Rendering...")
viewer = MeshRenderer(size=1080, start_thread=False)
def render_frame(frame_index):
viewer.model_color = frame_colors[frame_index, :]
with torch.no_grad():
if USE_VAE:
viewer.set_voxels(vae.decode(frame_latent_codes[frame_index, :]))
else:
viewer.set_mesh(sdf_net.get_mesh(frame_latent_codes[frame_index, :], voxel_resolution=128, sphere_only=True, level=SURFACE_LEVEL))
image_mesh = viewer.get_image(flip_red_blue=True)
create_plot(frame_index)
image_tsne = plt.imread(PLOT_FILE_NAME)[:, :, [2, 1, 0]] * 255
image = np.concatenate((image_mesh, image_tsne), axis=1)
cv2.imwrite("images/frame-{:05d}.png".format(frame_index), image)
for frame_index in tqdm(range(SAMPLE_COUNT * TRANSITION_FRAMES)):
render_frame(frame_index)
frame_index += 1
print("\n\nUse this command to create a video:\n")
print('ffmpeg -framerate 30 -i images/frame-%05d.png -c:v libx264 -profile:v high -crf 19 -pix_fmt yuv420p video.mp4')
|
[
"mail@marian42.de"
] |
mail@marian42.de
|
4dfb6b07cfc11aa0b89927d5daefb34f24fd2a4a
|
380b9992ddff952346c39fc4a2d9190cc5cf6d1b
|
/cistats.py
|
d71ceb5ad67f2e0b548d85e610d071e20ab8dd68
|
[] |
no_license
|
aplavin/cistats
|
61fd6fae659b9ef0e96ebbf8a1d81be03988259f
|
8688c39f076a7d08c04df678594f249bd9a7dc0e
|
refs/heads/master
| 2020-05-02T14:10:49.257098
| 2013-07-19T14:37:51
| 2013-07-19T14:37:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,364
|
py
|
from flask import Flask, render_template, request
import hglib
from datetime import datetime
import xmlrpclib
import re
app = Flask(__name__)
hglib.HGPATH = '/usr/local/bin/hg'
def timesince(value, default="just now"):
now = datetime.now()
diff = now - value
periods = (
(diff.days / 365, "year", "years"),
(diff.days / 30, "month", "months"),
(diff.days / 7, "week", "weeks"),
(diff.days, "day", "days"),
(diff.seconds / 3600, "hour", "hours"),
(diff.seconds / 60, "minute", "minutes"),
(diff.seconds, "second", "seconds"),
)
for period, singular, plural in periods:
if period:
return "%d %s ago" % (period, singular if period == 1 else plural)
return default
class Repo(object):
def __init__(self, path, url):
self.path = path
self.url = url
def get_commits(repo, user):
with hglib.open(repo.path) as client:
revs = client.log(user=user)
active_rev_hash = client.summary()['parent'][0][1]
return [
{
'desc': r[5],
'dt': r[6],
'hash': r[1],
'active': r[1].startswith(active_rev_hash),
}
for r in revs
]
def get_patchbombed(user):
rpc = xmlrpclib.Server('http://patchwork.serpentine.com/xmlrpc/')
person_id = rpc.person_list(user, 0)[0]['id']
state_id = rpc.state_list('New', 0)[0]['id']
patches = rpc.patch_list({'submitter_id': person_id, 'state_id': state_id})
return [
{
'desc': re.sub(r'^\[.*?\]\s*', '', p['name'])
}
for p in patches
]
def commit_in(ci, cis):
lst = [
c
for c in cis
if c['desc'].splitlines()[0] == ci['desc'].splitlines()[0]
]
if lst:
if 'hash' in lst[0]:
return lst[0]['hash']
else:
return True
else:
return False
@app.route('/')
def index():
user = 'me@aplavin.ru'
commits = {rid: get_commits(repos[rid], user) for rid in repos}
commits['pbomb'] = [
ci
for ci in get_patchbombed(user)
if commit_in(ci, commits['mine'])
]
my_commits = [
(
ci,
[
rid
for rid in commits
if commit_in(ci, commits[rid])
],
['active'] if ci['active'] else [],
)
for ci in commits['mine']
]
commit_cnts = {rid: len(commits[rid]) for rid in commits}
commit_cnts['crew'] -= commit_cnts['main']
commit_cnts['not_accepted'] = len([c for c, crepos, flags in my_commits if len(crepos) == 1])
return render_template(
'index.html',
all_commits=commits,
commits=my_commits,
commit_cnts=commit_cnts,
repos=repos,
debug='debug' in request.args
)
repos = {
'main': Repo('/home/alexander/hg_related/hg', 'http://selenic.com/hg'),
'crew': Repo('/home/alexander/hg_related/hg-crew', 'http://hg.intevation.org/mercurial/crew'),
'mine': Repo('/home/alexander/hg_related/hg_fork', 'http://hg.aplavin.ru/hg_fork'),
}
app.jinja_env.trim_blocks = True
app.jinja_env.filters['timedelta'] = timesince
app.jinja_env.filters['firstline'] = lambda s: (s.splitlines() + ['(none)'])[0]
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
[
"me@aplavin.ru"
] |
me@aplavin.ru
|
957ed1d0e0118f4d8f4e687cf64cac3716929ae3
|
77cd2fbaab036c8f2a19b2f2492895b5d7ac20bf
|
/calendar/main.py
|
a3cca0644cc15015c46399613ab10a4a0ad3eae2
|
[] |
no_license
|
mrh929/uestc_calendar_bot
|
9f72caf1dac1a58ad325c84d2afd9771271911c6
|
7d70fe9bd0f6b6ab3599bd9b3f15530d83653ed3
|
refs/heads/master
| 2020-06-14T03:42:53.939625
| 2019-09-09T03:20:18
| 2019-09-09T03:20:18
| 194,885,459
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,925
|
py
|
import telegram
import google
import logging
import base64
import io
from requests_html import HTMLSession
from google.cloud import firestore
from bs4 import BeautifulSoup
from PIL import Image
from time import sleep
from Spider import get_all_course
from UESTC_Login import _login, get_captcha
def __Bot_token():
f = open("token.txt","r")
token = f.read()
f.close()
return token
def course_print(mycourse, update):#构造一个好看的字符串发送给用户
chat_id = update.message.chat_id
week={
"0":"Monday",
"1":"Tuesday",
"2":"Wednsday",
"3":"Thirsday",
"4":"Friday",
"5":"Saturday",
"6":"Sunday",
}
for course in mycourse:
info = course[0]
time = course[1]
out = "{}\n{} {}\nweek:".format(info[0],info[1],info[2])
i = 0
while(i <= 20):
if(info[3][i] == 1):
out = out + " {}".format(i)
i += 1
out = out + '\n' + week[str(time[0][0])] + " "
out = out + "class no."
for classes in time:
out = out + " {}".format(classes[1]+1)
#print(out)
_bot_send_text(chat_id, out)
"""
print(info)
#out = out + str(info) + "\n"
out = out + str(info[0]) + '\n' + str(info[1]) + str(info[2]) +'\n' + str(info[3]) + "\n"
for t in time:
out = out + str(t) + "\n"
out = out + "\n"
"""
_bot_send_text(chat_id, "Demo version. To be continued....")
#bot.send_message(chat_id=update.message.chat_id, text=out)
def _bot_send_text(chat_id, text):
global bot
bot.send_message(chat_id = chat_id, text = text)
def _firestore_update(chat_id ,dict):
#将字典中的内容放入google firestore
global db
doc_ref = db.collection(u'uestc_calendar_bot').document(str(chat_id))
doc_ref.set(dict, merge=True)
def _firestore_read(chat_id):
global db
doc_ref = db.collection(u'uestc_calendar_bot').document(str(chat_id))
doc = doc_ref.get().to_dict()
return doc
def _Process_Start(update):
#打印欢迎界面
chat_id = update.message.chat_id
dicts = {
'user_id': chat_id,
'status': 0
}
_firestore_update(chat_id, dicts)
_bot_send_text(chat_id,
text=""" Welcome to YouESTC alarm clock!
This bot is used to query your timetable and alarm you before class.
Commands:
/login : to login into uestc""")
def _Process_Login(update):
chat_id = update.message.chat_id
dicts = {'status': 1}
_firestore_update(chat_id, dicts)
_bot_send_text(chat_id, "please input your UESTC student number:")
def _Process_Account(update):
#处理输入的帐号
chat_id = update.message.chat_id
dicts = {
'status': 2,
'account': update.message.text
}
_firestore_update(chat_id, dicts)
_bot_send_text(chat_id, "please input your password:")
def _Process_Password(update):
#处理输入的密码
chat_id = update.message.chat_id
doc = _firestore_read(chat_id)
account = doc['account']
passwd = update.message.text
dicts = {'passwd': base64.b64encode(passwd.encode('utf-8'))}
_firestore_update(chat_id, dicts)
bot.send_message(chat_id=update.message.chat_id, text="please input your captcha below:")
bot.send_message(chat_id=update.message.chat_id, text="Pulling captcha photo...")
form, img, new_session = get_captcha(account, passwd) #请求验证码图片
#f = open("captcha.png", "wb")
#f.write(img)
#f.close()
img_b64encode = base64.b64encode(img.encode('utf-8')) # base64编码
img_b64decode = base64.b64decode(img_b64encode) # base64解码
image = io.BytesIO(img_b64decode)
#f = open("captcha.png", "rb")
bot.send_photo(chat_id=chat_id, photo=image)
# 发送验证码图片给用户
dicts = {
'form': form,
'cookies': new_session.cookies.get_dict(),
'status': 3
}
_firestore_update(chat_id, dicts)
def _Process_Captcha(update):
#处理输入的验证码
chat_id = update.message.chat_id
_bot_send_text(chat_id, "Attempting to login...")
doc = _firestore_read(chat_id)
cookies = doc['cookies']
form = doc['form']
captcha = update.message.text
new_session, res = _login(form, captcha, cookies)
if(res == 0):
_bot_send_text(chat_id, "Login success! Pulling data...")
mycourse = get_all_course(new_session)
course_print(mycourse, update)
elif(res == 1):
_bot_send_text(chat_id, "Password wrong!")
elif(res == 2):
_bot_send_text(chat_id, "Captcha wrong!")
else:
_bot_send_text(chat_id, "Student number wrong!")
dicts = {'status': 0}
_firestore_update(chat_id, dicts)
def Text_Process(update):
doc_ref = db.collection(u'uestc_calendar_bot').document(str(update.message.chat_id))
try:#如果之前没有记录,自动跳转到start菜单
doc = doc_ref.get().to_dict()
except google.cloud.exceptions.NotFound:
_Process_Start(update)
return
status = doc['status']
if(status == 0):
_Process_Start(update)
elif(status == 1):
_Process_Account(update)
elif(status == 2):
_Process_Password(update)
elif(status == 3):
_Process_Captcha(update)
_bot_send_text(update.message.chat_id, "收到啦!")
def Command_Process(update): #用来处理指令
command = update.message.text
command_list = {
'/start': _Process_Start,
'/login': _Process_Login
}
if(command in command_list):
command_list[command](update)
elif(command[0] == '/'):
_Process_Start(update)
else:
Text_Process(update)
#不是命令,跳转到对文本的处理函数里去
if(__name__ == "__main__"):
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',level=logging.INFO)
# 记录日志
global bot
global db
#定义全局变量,使得所有函数用一个变量
token = __Bot_token()
bot = telegram.Bot(token = token)#bot
print(bot.get_me())
# 登录telegram
db = firestore.Client()
# 登录google filestore
while(1):
updates = bot.get_updates()
if(updates != []):
for update in updates:
Command_Process(update)
bot.get_updates(limit = 1, offset = update.update_id+1)
print(update.message.text, " ", update.message.chat_id)
else:
sleep(0.01)
#$env:GOOGLE_APPLICATION_CREDENTIALS="G:\github\telebot\key\My First Project-2035ff2d3024.json"
|
[
"809999463@qq.com"
] |
809999463@qq.com
|
9dde286444abdb781a63cf46ff1c010dd1be1bf2
|
b94a62a7e8bb0106f9ff8b7c933fb3818305288b
|
/src/tasks/api/controller/Products.py
|
df383d274136cbb5482be1b9453284b8c1d5099f
|
[] |
no_license
|
rijulg/q_coding_challenge
|
1c2958a6f7af65657e9526381e5fc02f9437b5ce
|
7e78f568c68e9ec02088fd7e23cc17abbf9a6634
|
refs/heads/develop
| 2023-08-21T14:40:38.169931
| 2021-09-28T13:51:30
| 2021-09-28T13:51:30
| 411,288,258
| 0
| 0
| null | 2021-09-28T14:01:35
| 2021-09-28T13:14:54
|
Python
|
UTF-8
|
Python
| false
| false
| 447
|
py
|
from flask import Blueprint
from ..datamapper import DataMapper
from ..model.Products import Products as ProductsModel
products_bp = Blueprint('products_bp', __name__)
@products_bp.route('/top_selling/<int:num_items>', methods=['GET'])
def top_selling(num_items):
items = ProductsModel(DataMapper()).top_selling(num_items)
top = {}
for (product_id, total_value) in items:
top[product_id] = float(total_value)
return top
|
[
"rijulg@gmail.com"
] |
rijulg@gmail.com
|
16ea09b1b607856af7749a50505668de699710cc
|
ed2b5193ae98c0f435a31b9ff10ba38688758582
|
/wazo_appgateway_client/models/channel_talking_started.py
|
a693f453ad71d177c622b2e7850701bc6863ff82
|
[] |
no_license
|
wazo-platform/wazo-asyncio-ari-client
|
c1cbbf09cee887663cf6dd0aedb39b4ef32922c5
|
ec777425f445021f41586d892085ca8574cb22ec
|
refs/heads/main
| 2023-01-10T11:51:25.677649
| 2020-10-15T18:16:00
| 2020-10-15T18:16:00
| 304,405,528
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,579
|
py
|
# coding: utf-8
"""
localhost:8088
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 5.1.1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from wazo_appgateway_client.configuration import Configuration
class ChannelTalkingStarted(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'channel': 'Channel'
}
attribute_map = {
'channel': 'channel'
}
def __init__(self, channel=None, local_vars_configuration=None): # noqa: E501
"""ChannelTalkingStarted - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._channel = None
self.discriminator = None
self.channel = channel
@property
def channel(self):
"""Gets the channel of this ChannelTalkingStarted. # noqa: E501
:return: The channel of this ChannelTalkingStarted. # noqa: E501
:rtype: Channel
"""
return self._channel
@channel.setter
def channel(self, channel):
"""Sets the channel of this ChannelTalkingStarted.
:param channel: The channel of this ChannelTalkingStarted. # noqa: E501
:type channel: Channel
"""
if self.local_vars_configuration.client_side_validation and channel is None: # noqa: E501
raise ValueError("Invalid value for `channel`, must not be `None`") # noqa: E501
self._channel = channel
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ChannelTalkingStarted):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ChannelTalkingStarted):
return True
return self.to_dict() != other.to_dict()
|
[
"sylvain@wazo.io"
] |
sylvain@wazo.io
|
ea26f99cb03f513d7a6a1cc852bc1911b07eb2e2
|
daab57b794c7f3fc59ece13b516e18f99b40372d
|
/virtualenv/lib/python3.4/site-packages/prospector/__pkginfo__.py
|
84b807e861588253bc9e4ce73b9a0d8de75ede3b
|
[] |
no_license
|
JakeCooper/AdmitMe
|
272fd80cc868d4b6764a738843533d731d305497
|
e28ebe538e68639c031d240bc38362f7fac483c7
|
refs/heads/master
| 2020-09-14T19:27:01.088620
| 2016-09-11T07:49:06
| 2016-09-11T07:49:06
| 67,902,114
| 1
| 1
| null | 2018-12-09T20:29:56
| 2016-09-11T00:48:41
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 105
|
py
|
# -*- coding: utf-8 -*-
__version_info__ = (0, 12, 2)
__version__ = '.'.join(map(str, __version_info__))
|
[
"jake.elijah.cooper@gmail.com"
] |
jake.elijah.cooper@gmail.com
|
0d4a6722460d4e7f1f42de735ae46e4666d6dad0
|
03676c74e4cc8d321b2a4d0ad0decbf81b43e294
|
/three_digit_reverse/main.py
|
ed97808e9bb3f952c781ad21f1b9c1b23f7cf300
|
[] |
no_license
|
AbishekNanjappa/MyPythonCode
|
16c2948f083f36ebd6d849b105af4a1046c43501
|
0dea5f9146f2f3db7499c72304d50b9c9c1e963b
|
refs/heads/main
| 2023-01-02T18:53:04.360025
| 2020-10-28T05:22:39
| 2020-10-28T05:22:39
| 302,643,587
| 0
| 0
| null | 2020-10-28T05:09:39
| 2020-10-09T13:05:50
|
Python
|
UTF-8
|
Python
| false
| false
| 205
|
py
|
import utility
num = int(input("Enter a three digit number:\n"))
return_val = utility.three_digit_reverse(num)
if return_val == -1:
print("Enter a three digit number")
else:
print(return_val)
|
[
"noreply@github.com"
] |
AbishekNanjappa.noreply@github.com
|
3038995c915fe17ce7ccc604ee1c93ab4558af56
|
5be5f49f289babb6d206174036ee96192bd340df
|
/dmsw/blog/migrations/0016_auto_20200510_1849.py
|
3b44be50f720fdc0ec88f95f49730a429a6e7ec5
|
[] |
no_license
|
ogglin/DiscoverMoscow
|
7ccd24481c31855b4d674bda213139a575fcb68c
|
9634637107625b1e538a18533b25422d79869051
|
refs/heads/master
| 2022-12-04T11:12:26.131403
| 2020-09-01T10:19:11
| 2020-09-01T10:19:11
| 258,610,397
| 0
| 0
| null | 2020-05-27T12:05:32
| 2020-04-24T19:55:32
|
Python
|
UTF-8
|
Python
| false
| false
| 9,492
|
py
|
# Generated by Django 3.0.5 on 2020-05-10 15:49
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.embeds.blocks
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('blog', '0015_auto_20200510_1849'),
]
operations = [
migrations.AlterField(
model_name='blogpage',
name='content_body',
field=wagtail.core.fields.StreamField([('container', wagtail.core.blocks.StructBlock([('onecol', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='cog', label='Одна колонка', null=True, required=False)), ('twocol', wagtail.core.blocks.StructBlock([('left_column', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='arrow-right', label='Left column content', null=True, required=False)), ('right_column', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='arrow-right', label='Right column content', null=True, required=False))], blank=True, icon='cog', label='Две колонки', null=True, required=False))])), ('container_narrow', wagtail.core.blocks.StructBlock([('onecol', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='cog', label='Одна колонка', null=True, required=False)), ('twocol', wagtail.core.blocks.StructBlock([('left_column', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='arrow-right', label='Left column content', null=True, required=False)), ('right_column', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='arrow-right', label='Right column content', null=True, required=False))], blank=True, icon='cog', label='Две колонки', null=True, required=False))])), ('container_wide', wagtail.core.blocks.StructBlock([('onecol', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='cog', label='Одна колонка', null=True, required=False)), ('twocol', wagtail.core.blocks.StructBlock([('left_column', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='arrow-right', label='Left column content', null=True, required=False)), ('right_column', wagtail.core.blocks.StreamBlock([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('video', wagtail.core.blocks.StreamBlock([('video', wagtail.embeds.blocks.EmbedBlock())], blank=True, icon='placeholder', label='Видео блок', null=True, required=False)), ('yt_video', wagtail.core.blocks.StreamBlock([('element', wagtail.core.blocks.StreamBlock([], blank=True, icon='video', label='Добавить видео', null=True, required=False))], blank=True, icon='placeholder', label='Видео галерея', null=True, required=False)), ('html', wagtail.core.blocks.RawHTMLBlock()), ('gallery', wagtail.core.blocks.StreamBlock([('image', wagtail.images.blocks.ImageChooserBlock())], blank=True, icon='image', label='Галерея', null=True, required=False))], blank=True, icon='arrow-right', label='Right column content', null=True, required=False))], blank=True, icon='cog', label='Две колонки', null=True, required=False))]))], blank=True, null=True, verbose_name='Статья'),
),
]
|
[
"server.ares@gmail.com"
] |
server.ares@gmail.com
|
da03445e886f095bca1890224c388f7e5cca65aa
|
fe72a7d2e770ed20e06aae3b62af6c810834a8f6
|
/venv/Scripts/pip3.7-script.py
|
1ae5e1a43c5e8296a1afdef84bec63e65884c3a5
|
[] |
no_license
|
HuYu211/finaltest
|
b026485b97628c2d022e8c89524a32cbf09e16dd
|
70962131b18148b577bb5cea07afdbd01511cee6
|
refs/heads/master
| 2020-05-14T19:01:34.943478
| 2019-04-25T03:23:25
| 2019-04-25T03:23:25
| 181,920,995
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 406
|
py
|
#!d:\PycharmProjects\notebook\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.7'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.7')()
)
|
[
"645889908@qq.com"
] |
645889908@qq.com
|
ad84ffe31bed796c048e75367da22f1b3c7f1ec0
|
d1170f1674ea2e4cbbfc8c77832c879a2a62021b
|
/intpro1.py
|
625c690f0cefefd496e628f3b33a449fdc815849
|
[] |
no_license
|
adnaksbhat/ChatBot
|
23f67678e87faae9feed7b1aeebd18d019c88caa
|
c755ae4b6c56f9bde1b641aca689918961c23b3e
|
refs/heads/main
| 2023-05-14T13:50:52.017337
| 2021-06-07T15:41:28
| 2021-06-07T15:41:28
| 374,717,952
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,083
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 7 11:28:21 2021
@author: Skanda
"""
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
# -*- coding: utf-8 -*-
import speech_recognition as s
from chatterbot import ChatBot
from chatterbot.trainers import ChatterBotCorpusTrainer
import pyttsx3 as pp #for audio
import threading
from tkinter import *
root = Tk()
engine=pp.init()
voices=engine.getProperty('voices')
engine.setProperty('voice',voices[1].id)
#function for the bot to speak
def speak(word):
engine.say(word)
engine.runAndWait()
# takes audio as input and converts it to string
def take_audio():
sr = s.Recognizer()
sr.pause_threshold = 1
print('Your bot is listening...')
with s.Microphone() as m:
try:
audio = sr.listen(m)
query = sr.recognize_google(audio, language='eng-in')
print(query)
e.delete(0, END)
e.insert(0, query)
sendq()
except Exception as ex:
print(ex)
def sendq():
send = e.get() # message from input entry given by the user
txt.insert(END, "\nYou: " + send) # message printed to the text area
if (send == 'Bye' or send == 'bye'):
reply = 'Nice Talking to you Bye'
txt.insert(END, '\n{}: {}'.format(bot.name, reply))
speak(reply)
e.delete(0, END) # delete the input given by user after send button is pressed so that next input can be given.
if (send != 'Bye' or send != 'bye'):
reply = bot.get_response(send) # response from the dataset fed
txt.insert(END, '\n{}: {}'.format(bot.name, reply))
speak(reply)
e.delete(0, END)
txt.yview(END)
# Define Chatbot with a name
bot = ChatBot('BLAHbot')
# set the trainer algorithm
bot.set_trainer(ChatterBotCorpusTrainer)
# training the chatbot on the data
# data : chatterbot/corpus/english
bot.train('chatterbot.corpus.english')
# title of the interface
root.title('CHATBOT')
# text area where the display is shown
txt = Text(root, bg='light blue')
txt.grid(columnspan=2)
scrollbar = Scrollbar(root, command=txt.yview)
scrollbar.place(x=630, y=4, height=385)
# input area where the user gives input
e = Entry(root, width=100)
e.grid(row=1, column=0)
# button to send the input.
send = Button(root, text="SEND", width=5, command=sendq, bg='light pink')
send.grid(row=1, column=1)
#function for invoking the button if enter key is pressed
def enter_fuction(event):
send.invoke()
#bind root window with enter key
root.bind('<Return>',enter_fuction)
# to listen to audio continuously
def repeatl():
while True:
take_audio()
t = threading.Thread(target=repeatl) # defined a thread so that both speech recognition and UI is shown simultaneously.
t.start()
root.mainloop()
|
[
"noreply@github.com"
] |
adnaksbhat.noreply@github.com
|
a2a54b5891d436d18c0b43cc21f8d68cf31ed107
|
a65622c6b3d8c570f113af3ee49b4c4f2ec4995c
|
/PY_files/DB_Tkin.py
|
bdb20b4cc356bb53565fafd621b17d254a5e9b10
|
[] |
no_license
|
muthu255/python
|
62a88d32450d71a9297b65ae1865c6125a716a8d
|
4c1135388b2ba36ac4c23e952e064496db9cab82
|
refs/heads/master
| 2022-07-03T14:49:35.008669
| 2020-05-13T19:22:40
| 2020-05-13T19:22:40
| 263,722,427
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,808
|
py
|
import cx_Oracle
import tkinter
import pandas as pd
from tkinter import filedialog
from tkinter import messagebox
from tkinter import *
root=tkinter.Tk()
root.title('DataBase')
e1=Entry(root,width=20,borderwidth=5,)
e1.grid(row=2,column=4,columnspan=3,padx=10,pady=10)
e2=Entry(root,width=20,borderwidth=5,show='*')
e2.grid(row=3,column=4,columnspan=5,padx=10,pady=10)
lb1=Label(root,text='USER NAME :').grid(row=2,column=3)
lb2=Label(root,text='PASSWORD :').grid(row=3,column=3)
def check():
user_name=e1.get()
Password=e2.get()
#Select the Source query')
file_path=filedialog.askopenfilename(initialdir='D:\Studies\Python\practice',title="#Select the Source query",filetypes=(('txt file','*.txt'),('all files','*.*')))
#Select the Target query')
file_path1=filedialog.askopenfilename(initialdir='D:\Studies\Python\practice',title="#Select the Target query",filetypes=(('SQL file','*.sql'),('all files','*.*')))
#print(file_path)
#print(file_path1)
source=open(file_path).read()
print("============SRC_QUERY============")
print(source)
target=open(file_path1).read()
print("============TGT_QUERY============")
print(target)
con_string=user_name+'/'+Password+'@localhost/xe'
print(con_string)
con = cx_Oracle.connect(con_string)
cur1 = con.cursor()
cur2 = con.cursor()
cur1.execute(str(source))
cur2.execute(str(target))
print("============SRC_QUERY============")
for i in cur1:
print(i)
print("============SRC_QUERY============")
for i in cur2:
print(i)
df1=pd.DataFrame(cur1)
df2=pd.DataFrame(cur2)
cur1.close()
cur2.close()
con.close()
but=Button(root,text='OK',command=check)
but.grid(row=4,column=5)
|
[
"noreply@github.com"
] |
muthu255.noreply@github.com
|
d32fa3c7ad808770d849fd474057658e82c74b40
|
aed8348fac95ceab26ed3a7099138624480c8d7a
|
/meme/meme/settings.py
|
9ed727cedde1766412b7a715e80dce8f367727e3
|
[] |
no_license
|
chaithanyarlk/MemePage
|
9c73c318c91674cd340089b4931d0e5a2704563e
|
8a078d1ab67b12a638b186a996cc55df7c74e054
|
refs/heads/master
| 2023-03-04T04:37:21.487584
| 2021-02-12T04:01:09
| 2021-02-12T04:01:09
| 338,073,821
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,395
|
py
|
"""
Django settings for meme project.
Generated by 'django-admin startproject' using Django 3.1.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
from decouple import config
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'core',
'django_filters',
'corsheaders',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
]
CORS_ORIGIN_ALLOW_ALL = True # If this is used then `CORS_ORIGIN_WHITELIST` will not have any effect
CORS_ALLOW_CREDENTIALS = True
ROOT_URLCONF = 'meme.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'meme.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
DATETIME_FORMAT = '%d-%m-%Y %H:%M:%S'
USE_L10N = False
USE_TZ = False #
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
|
[
"chithanyarlk@gmail.com"
] |
chithanyarlk@gmail.com
|
64f74fe56d657c97549777a57a66440633ecf9c6
|
fe57ad74daa82f3805978668caa68426787e4e3c
|
/mysite/polls/models.py
|
891928f36aeab91469ea6d4e6307c55fef31163f
|
[] |
no_license
|
Aliciarui/PycharmServer
|
d7efc02c7032127378b73af410e357cdf425564b
|
dbcc8b7a7906c3d58fca679efdbe23a12cc5d209
|
refs/heads/master
| 2023-06-03T09:07:32.920827
| 2021-06-17T08:16:03
| 2021-06-17T08:16:03
| 377,729,126
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 194
|
py
|
from django.db import models
# Create your models here.
# 用于新建类
class UserInfo(models.Model):
id = models.IntegerField(primary_key=True)
info = models.CharField(max_length=20)
|
[
"ruiqialicia@yeah.net"
] |
ruiqialicia@yeah.net
|
d4847f3c091bcb63756b682c82699178e1b4c83b
|
48544f9b8884af3ab86795d02ee7a4f067237377
|
/aiida/tools.codespc.vasp/query.py
|
4c36c2d8203f040b892c6267c2c3119bd5708a8a
|
[] |
no_license
|
greschd/aiida-vasp
|
ec10c930cc1207bf447395285e5ec60671e9c5b0
|
6eb7893b1e3c7cc58ca4bedca709b7dda2b72e09
|
refs/heads/master
| 2021-01-21T20:46:18.608435
| 2017-05-24T11:27:32
| 2017-05-24T11:27:32
| 92,274,671
| 0
| 0
| null | 2017-05-24T09:28:41
| 2017-05-24T09:28:41
| null |
UTF-8
|
Python
| false
| false
| 2,670
|
py
|
from aiida.orm.querytool import QueryTool
from aiida.orm.calculation.job import JobCalculation
class VaspFinder(object):
_vaspclass = ['vasp.vasp', 'vasp.asevasp', 'vasp.vasp5',
'vasp.vasp2w90', 'vasp.vasp2w90.Vasp2W90Calculation']
@classmethod
def cmp_ctime(cls, calc1, calc2):
t1 = calc1.ctime
t2 = calc2.ctime
if t1 < t2:
return -1
elif t1 > t2:
return 1
else:
return 0
@classmethod
def table_element(cls, calc):
ex = calc.get_extras()
s = ex.get('success')
if isinstance(s, (bool, int)):
ok = s and 'yes' or 'no'
else:
ok = s
element = {
'creation_time': calc.ctime.strftime(format='%Y-%m-%d %H:%M'),
'ctime': calc.ctime,
'class': calc.__class__.__name__,
'successful': ok and ok or 'N/A',
'experiment': ex.get('experiment', 'N/A'),
'exp_run': ex.get('run', 'N/A'),
'type': ex.get('type', 'N/A'),
'state': calc.get_state(),
'pk': calc.pk
}
return element
@classmethod
def str_table(cls, tab):
header = {
'creation_time': 'Creation Time',
'class': 'Class',
'successful': 'Success',
'experiment': 'Experiment',
'exp_run': 'Run Nr.',
'type': 'Tags',
'state': 'AiiDA-State',
'pk': 'PK'
}
line = '{pk:>5} {creation_time:18} {state:20} {successful:>6} '
line += '{experiment:20} {exp_run:>7} {class} {type}'
tab.insert(0, header)
return '\n'.join([line.format(**c) for c in tab])
@classmethod
def cstate(cls, calc):
if hasattr(calc, 'get_state'):
return calc.get_state()
else:
return 'N/A'
@classmethod
def history(cls, last=0, vaspclass=None):
q = QueryTool()
q.set_class(JobCalculation)
l = filter(lambda c: 'vasp' in str(c.__class__), q.run_query())
l.sort(cls.cmp_ctime)
res = l[-last:]
res.reverse()
tab = [cls.table_element(c) for c in res]
print cls.str_table(tab)
@classmethod
def status(cls, vaspclass=None):
q = QueryTool()
q.set_class(JobCalculation)
st = ['TOSOBMIT', 'SUBMITTING', 'WITHSCHEDULER',
'COMPUTED', 'PARSING', 'RETRIEVING']
l = filter(lambda c: cls.cstate(c) in st, q.run_query())
l.sort(cls.cmp_ctime)
l.reverse()
tab = [cls.table_element(c) for c in l]
print cls.str_table(tab)
|
[
"r.haeuselmann@gmx.ch"
] |
r.haeuselmann@gmx.ch
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.