hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6266929bceaa00edbf464b5a4d2470f14089186d
| 16,271
|
py
|
Python
|
test/test_info_api.py
|
fattureincloud/fattureincloud-python-sdk
|
f3a40fac345751014ea389680efdaef90f03bac1
|
[
"MIT"
] | 2
|
2022-02-17T08:33:17.000Z
|
2022-03-22T09:27:00.000Z
|
test/test_info_api.py
|
fattureincloud/fattureincloud-python-sdk
|
f3a40fac345751014ea389680efdaef90f03bac1
|
[
"MIT"
] | null | null | null |
test/test_info_api.py
|
fattureincloud/fattureincloud-python-sdk
|
f3a40fac345751014ea389680efdaef90f03bac1
|
[
"MIT"
] | null | null | null |
"""
Fatture in Cloud API v2 - API Reference
Connect your software with Fatture in Cloud, the invoicing platform chosen by more than 400.000 businesses in Italy. The Fatture in Cloud API is based on REST, and makes possible to interact with the user related data prior authorization via OAuth2 protocol. # noqa: E501
The version of the OpenAPI document: 2.0.9
Contact: info@fattureincloud.it
Generated by: https://openapi-generator.tech
"""
import unittest
import fattureincloud_python_sdk
from fattureincloud_python_sdk.rest import RESTResponse
import functions
from fattureincloud_python_sdk.api.info_api import InfoApi
from fattureincloud_python_sdk.model.city import City
from fattureincloud_python_sdk.model.currency import Currency
from fattureincloud_python_sdk.model.document_template import DocumentTemplate
from fattureincloud_python_sdk.model.language import Language
from fattureincloud_python_sdk.model.list_archive_categories_response import ListArchiveCategoriesResponse
from fattureincloud_python_sdk.model.list_cities_response import ListCitiesResponse
from fattureincloud_python_sdk.model.detailed_country import DetailedCountry
from fattureincloud_python_sdk.model.list_detailed_countries_response import ListDetailedCountriesResponse
from fattureincloud_python_sdk.model.list_cost_centers_response import ListCostCentersResponse
from fattureincloud_python_sdk.model.list_countries_response import ListCountriesResponse
from fattureincloud_python_sdk.model.list_currencies_response import ListCurrenciesResponse
from fattureincloud_python_sdk.model.list_delivery_notes_default_causals_response import ListDeliveryNotesDefaultCausalsResponse
from fattureincloud_python_sdk.model.list_languages_response import ListLanguagesResponse
from fattureincloud_python_sdk.model.list_payment_accounts_response import ListPaymentAccountsResponse
from fattureincloud_python_sdk.model.list_payment_methods_response import ListPaymentMethodsResponse
from fattureincloud_python_sdk.model.list_product_categories_response import ListProductCategoriesResponse
from fattureincloud_python_sdk.model.list_received_document_categories_response import ListReceivedDocumentCategoriesResponse
from fattureincloud_python_sdk.model.list_revenue_centers_response import ListRevenueCentersResponse
from fattureincloud_python_sdk.model.list_templates_response import ListTemplatesResponse
from fattureincloud_python_sdk.model.list_units_of_measure_response import ListUnitsOfMeasureResponse
from fattureincloud_python_sdk.model.list_vat_types_response import ListVatTypesResponse
from fattureincloud_python_sdk.model.payment_account import PaymentAccount
from fattureincloud_python_sdk.model.payment_account_type import PaymentAccountType
from fattureincloud_python_sdk.model.payment_method import PaymentMethod
from fattureincloud_python_sdk.model.payment_method_details import PaymentMethodDetails
from fattureincloud_python_sdk.model.payment_method_type import PaymentMethodType
from fattureincloud_python_sdk.model.vat_type import VatType
class TestInfoApi(unittest.TestCase):
"""InfoApi unit test stubs"""
def setUp(self):
self.api = InfoApi()
def tearDown(self):
pass
def test_list_archive_categories(self):
resp = {
'status': 200,
'data': b'{"data":["cat5", "cat6"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListArchiveCategoriesResponse(data = ["cat7", "cat6"])
actual = self.api.list_archive_categories(2)
actual.data[0] = "cat7"
assert actual == expected
def test_list_cities(self):
resp = {
'status': 200,
'data': b'{"data": [{"city": "bergamo", "province": "BG", "postal_code": "24121"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListCitiesResponse(data = [City( postal_code="24121", city="BG", province="BG" )])
actual = self.api.list_cities()
actual.data[0].city = "BG"
assert actual == expected
def test_list_cost_centers(self):
resp = {
'status': 200,
'data': b'{"data":["bg", "mi"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListCostCentersResponse(data = ["to", "mi"])
actual = self.api.list_cost_centers(2)
actual.data[0] = "to"
assert actual == expected
def test_list_countries(self):
resp = {
'status': 200,
'data': b'{"data":["Spagna", "Italia"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListCountriesResponse(data = ["Spagna", "Albania"])
actual = self.api.list_countries()
actual.data[1] = "Albania"
assert actual == expected
def test_list_detailed_countries(self):
resp = {
'status': 200,
'data': b'{"data": [{"name": "Italia", "settings_name": "Italia", "iso": "IT", "fiscal_iso": "IT", "uic": "086"}, {"name": "Albania", "settings_name": "Albania", "iso": "AL", "fiscal_iso": "AL", "uic": "087"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListDetailedCountriesResponse( data=[ DetailedCountry( name="Italia", settings_name="Italia", iso="IT", fiscal_iso="IT", uic="086" ), DetailedCountry( name="Albania", settings_name="Albania", iso="AL", fiscal_iso="AL", uic="087" ) ] )
actual = self.api.list_detailed_countries()
actual.data[1].name = "Albania"
assert actual == expected
def test_list_currencies(self):
resp = {
'status': 200,
'data': b'{"data": [{"id": "EUR", "symbol": "e", "exchange_rate": "1"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListCurrenciesResponse(data = [Currency( id="USD", symbol="e", exchange_rate="1")])
actual = self.api.list_currencies()
actual.data[0].id = "USD"
assert actual == expected
def test_list_delivery_notes_default_causals(self):
resp = {
'status': 200,
'data': b'{"data":["causal1", "causal2"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListDeliveryNotesDefaultCausalsResponse(data = ["causal3", "causal2"])
actual = self.api.list_delivery_notes_default_causals()
actual.data[0] = "causal3"
assert actual == expected
def test_list_languages(self):
resp = {
'status': 200,
'data': b'{"data":[{"code":"ITA","name":"Italiano"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListLanguagesResponse(data = [Language( code="ITA", name="Italiano" )])
actual = self.api.list_languages()
assert actual == expected
def test_list_payment_accounts(self):
resp = {
'status': 200,
'data': b'{"data": [{"id": 1, "name": "Conto Banca Intesa", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListPaymentAccountsResponse(data = [PaymentAccount( id=2, name="Conto Banca Intesa", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True )])
actual = self.api.list_payment_accounts(2)
actual.data[0].id = 2
assert actual == expected
def test_list_payment_methods(self):
resp = {
'status': 200,
'data': b'{"data": [{"id": 1, "name": "name_example", "type": "standard", "is_default": true, "default_payment_account": {"id": 1, "name": "Conto Banca Intesa", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}, "details": [{"title": "title_example", "description": "description_example"}], "bank_iban": "bank_iban_example", "bank_name": "bank_name_example", "bank_beneficiary": "bank_beneficiary_example", "ei_payment_method": "ei_payment_method_example"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListPaymentMethodsResponse(data = [PaymentMethod( id=2, name="name_example", type=PaymentMethodType("standard"), is_default=True, default_payment_account=PaymentAccount( id=1, name="Conto Banca Intesa", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True, ), details=[ PaymentMethodDetails( title="title_example", description="description_example", ), ], bank_iban="bank_iban_example", bank_name="bank_name_example", bank_beneficiary="bank_beneficiary_example", ei_payment_method="ei_payment_method_example" )])
actual = self.api.list_payment_methods(2)
actual.data[0].id = 2
assert actual == expected
def test_list_product_categories(self):
resp = {
'status': 200,
'data': b'{"data":["cat5", "cat6"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListProductCategoriesResponse(data = ["cat7", "cat6"])
actual = self.api.list_product_categories(2, "products")
actual.data[0] = "cat7"
assert actual == expected
def test_list_received_document_categories(self):
resp = {
'status': 200,
'data': b'{"data":["cat5", "cat6"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListReceivedDocumentCategoriesResponse(data = ["cat7", "cat6"])
actual = self.api.list_received_document_categories(2)
actual.data[0] = "cat7"
assert actual == expected
def test_list_revenue_centers(self):
resp = {
'status': 200,
'data': b'{"data":["bg", "mi"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListRevenueCentersResponse(data = ["to", "mi"])
actual = self.api.list_revenue_centers(2)
actual.data[0] = "to"
assert actual == expected
def test_list_templates(self):
resp = {
'status': 200,
'data': b'{"data":[{"id":2,"name":"Light Smoke","type":"type_example"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListTemplatesResponse(data = [DocumentTemplate( id=2, name="Light Smoke", type="type_example" )])
actual = self.api.list_templates()
assert actual == expected
def test_list_units_of_measure(self):
resp = {
'status': 200,
'data': b'{"data":["kg", "km"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListUnitsOfMeasureResponse(data = ["kb", "km"])
actual = self.api.list_units_of_measure()
actual.data[0] = "kb"
assert actual == expected
def test_list_vat_types(self):
resp = {
'status': 200,
'data': b'{"data": [{"id": 1, "value": 22.0, "description": "Non imponibile art. 123", "notes": "IVA non imponibile ai sensi dell articolo 123, comma 2", "e_invoice": true, "ei_type": "2", "ei_description": "ei_description_example", "is_disabled": true}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListVatTypesResponse(data = [VatType( id=2, value=22.0, description="Non imponibile art. 123", notes="IVA non imponibile ai sensi dell articolo 123, comma 2", e_invoice=True, ei_type="2", ei_description="ei_description_example", is_disabled=True )])
actual = self.api.list_vat_types(2)
actual.data[0].id = 2
assert actual == expected
if __name__ == '__main__':
unittest.main()
| 49.606707
| 587
| 0.685698
| 13,160
| 0.808801
| 0
| 0
| 0
| 0
| 0
| 0
| 3,207
| 0.197099
|
62691bca9ef85cd31b36e1e397faed73d833bd04
| 2,992
|
py
|
Python
|
tests/test_year_2018.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 179
|
2017-10-05T12:41:10.000Z
|
2022-03-24T22:18:25.000Z
|
tests/test_year_2018.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 17
|
2018-10-23T00:51:13.000Z
|
2021-11-22T11:40:06.000Z
|
tests/test_year_2018.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 17
|
2018-10-19T11:13:07.000Z
|
2022-01-29T08:05:56.000Z
|
# coding: utf-8
import datetime
import unittest
import jpholiday
class TestYear2018(unittest.TestCase):
def test_holiday(self):
"""
2018年祝日
"""
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 1, 1)), '元日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 1, 8)), '成人の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 2, 11)), '建国記念の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 2, 12)), '建国記念の日 振替休日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 3, 21)), '春分の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 4, 29)), '昭和の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 4, 30)), '昭和の日 振替休日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 3)), '憲法記念日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 4)), 'みどりの日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 5)), 'こどもの日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 7, 16)), '海の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 8, 11)), '山の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 17)), '敬老の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 23)), '秋分の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 24)), '秋分の日 振替休日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 10, 8)), '体育の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 11, 3)), '文化の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 11, 23)), '勤労感謝の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 12, 23)), '天皇誕生日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 12, 24)), '天皇誕生日 振替休日')
def test_count_month(self):
"""
2018年月祝日数
"""
self.assertEqual(len(jpholiday.month_holidays(2018, 1)), 2)
self.assertEqual(len(jpholiday.month_holidays(2018, 2)), 2)
self.assertEqual(len(jpholiday.month_holidays(2018, 3)), 1)
self.assertEqual(len(jpholiday.month_holidays(2018, 4)), 2)
self.assertEqual(len(jpholiday.month_holidays(2018, 5)), 3)
self.assertEqual(len(jpholiday.month_holidays(2018, 6)), 0)
self.assertEqual(len(jpholiday.month_holidays(2018, 7)), 1)
self.assertEqual(len(jpholiday.month_holidays(2018, 8)), 1)
self.assertEqual(len(jpholiday.month_holidays(2018, 9)), 3)
self.assertEqual(len(jpholiday.month_holidays(2018, 10)), 1)
self.assertEqual(len(jpholiday.month_holidays(2018, 11)), 2)
self.assertEqual(len(jpholiday.month_holidays(2018, 12)), 2)
def test_count_year(self):
"""
2018年祝日数
"""
self.assertEqual(len(jpholiday.year_holidays(2018)), 20)
| 53.428571
| 94
| 0.684492
| 3,153
| 0.978585
| 0
| 0
| 0
| 0
| 0
| 0
| 488
| 0.151459
|
6269876471cdc3a3de7a8b8ea2665c1065be9cdf
| 222
|
py
|
Python
|
src/server_3D/API/Rice/miscellaneous/tools.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | 1
|
2021-05-18T16:10:49.000Z
|
2021-05-18T16:10:49.000Z
|
src/server_3D/API/Rice/miscellaneous/tools.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | null | null | null |
src/server_3D/API/Rice/miscellaneous/tools.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | null | null | null |
import numpy
__author__ = 'roberto'
def rad_degrees(type_to, value):
if type_to == 'rad':
value = value*numpy.pi/180
if type_to == 'deg':
value = value*180/numpy.pi
return value
| 17.076923
| 35
| 0.581081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 19
| 0.085586
|
6269ffcac7da3b6435494d0d70dbe0aa69f6f55f
| 324
|
py
|
Python
|
conjur_api/__init__.py
|
cyberark/conjur-api-python
|
7dd1819bf68042620a06f38e395c3eb2989202a9
|
[
"Apache-2.0"
] | 1
|
2022-03-09T18:25:29.000Z
|
2022-03-09T18:25:29.000Z
|
conjur_api/__init__.py
|
cyberark/conjur-api-python
|
7dd1819bf68042620a06f38e395c3eb2989202a9
|
[
"Apache-2.0"
] | null | null | null |
conjur_api/__init__.py
|
cyberark/conjur-api-python
|
7dd1819bf68042620a06f38e395c3eb2989202a9
|
[
"Apache-2.0"
] | null | null | null |
"""
conjur_api
Package containing classes that are responsible for communicating with the Conjur server
"""
__version__ = "0.0.5"
from conjur_api.client import Client
from conjur_api.interface import CredentialsProviderInterface
from conjur_api import models
from conjur_api import errors
from conjur_api import providers
| 24.923077
| 88
| 0.83642
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 115
| 0.354938
|
626aa70e6e3d3a3eb14c59bc2e95240dc23ccc35
| 9,346
|
py
|
Python
|
examples/test_BoxCutter.py
|
pompiduskus/pybox2d
|
4393bc93df4828267d2143327abd76de6f146750
|
[
"Zlib"
] | null | null | null |
examples/test_BoxCutter.py
|
pompiduskus/pybox2d
|
4393bc93df4828267d2143327abd76de6f146750
|
[
"Zlib"
] | null | null | null |
examples/test_BoxCutter.py
|
pompiduskus/pybox2d
|
4393bc93df4828267d2143327abd76de6f146750
|
[
"Zlib"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# C++ version Copyright (c) 2006-2007 Erin Catto http://www.box2d.org
# Python version by Ken Lauer / sirkne at gmail dot com
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
# Original C++ version by Daid
# http://www.box2d.org/forum/viewtopic.php?f=3&t=1473
# - Written for pybox2d 2.1 by Ken
from framework import *
from math import sin, cos, pi
import sys
LASER_HALF_WIDTH=2
LASER_SPLIT_SIZE=0.1
LASER_SPLIT_TAG='can_cut'
def _polygon_split(fixture, p1, p2, split_size):
polygon=fixture.shape
body=fixture.body
transform=body.transform
local_entry=body.GetLocalPoint(p1)
local_exit=body.GetLocalPoint(p2)
entry_vector=local_exit-local_entry
entry_normal=entry_vector.cross(1.0)
last_verts=None
new_vertices=[[], []]
cut_added=[-1,-1]
for vertex in polygon.vertices:
# Find out if this vertex is on the new or old shape
if entry_normal.dot(b2Vec2(vertex)-local_entry) > 0.0:
verts=new_vertices[0]
else:
verts=new_vertices[1]
if last_verts!=verts:
# if we switch from one shape to the other, add the cut vertices
if last_verts==new_vertices[0]:
if cut_added[0]!=-1:
return []
cut_added[0]=len(last_verts)
last_verts.append(b2Vec2(local_exit))
last_verts.append(b2Vec2(local_entry))
elif last_verts==new_vertices[1]:
if cut_added[1]!=-1:
return []
cut_added[1]=len(last_verts)
last_verts.append(b2Vec2(local_entry))
last_verts.append(b2Vec2(local_exit))
verts.append(b2Vec2(vertex))
last_verts=verts
# Add the cut if not added yet
if cut_added[0] < 0:
cut_added[0]=len(new_vertices[0])
new_vertices[0].append(b2Vec2(local_exit))
new_vertices[0].append(b2Vec2(local_entry))
if cut_added[1] < 0:
cut_added[1]=len(new_vertices[1])
new_vertices[1].append(b2Vec2(local_entry))
new_vertices[1].append(b2Vec2(local_exit))
# Cut based on the split size
for added, verts in zip(cut_added, new_vertices):
if added > 0:
offset=verts[added-1]-verts[added]
else:
offset=verts[-1]-verts[0]
offset.Normalize()
verts[added]+=split_size*offset
if added < len(verts)-2:
offset=verts[added+2]-verts[added+1]
else:
offset=verts[0]-verts[len(verts)-1]
offset.Normalize()
verts[added+1]+=split_size*offset
# Ensure the new shapes aren't too small
for verts in new_vertices:
for i, v1 in enumerate(verts):
for j, v2 in enumerate(verts):
if i!=j and (v1-v2).length < 0.1:
# print('Failed to split: too small')
return []
try:
return [b2PolygonShape(vertices=verts) for verts in new_vertices]
except b2AssertException:
return []
except ValueError:
return []
def _laser_cut(world, laser_body, length=30.0, laser_half_width=2, **kwargs):
p1, p2=get_laser_line(laser_body, length, laser_half_width)
callback=laser_callback()
world.RayCast(callback, p1, p2)
if not callback.hit:
return []
hits_forward=callback.hits
callback=laser_callback()
world.RayCast(callback, p2, p1)
if not callback.hit:
return []
hits_reverse=callback.hits
if len(hits_forward) != len(hits_reverse):
return []
ret=[]
for (fixture1, point1), (fixture2, point2) in zip(hits_forward, hits_reverse):
#renderer.DrawPoint(renderer.to_screen(point1), 2, b2Color(1,0,0))
#renderer.DrawPoint(renderer.to_screen(point2), 2, b2Color(0,1,0))
#renderer.DrawSegment(renderer.to_screen(point1), renderer.to_screen(point2), b2Color(0,1,1))
if fixture1 != fixture2:
continue
new_polygons=_polygon_split(fixture1, point1, point2, LASER_SPLIT_SIZE)
if new_polygons:
ret.append((fixture1, new_polygons))
return ret
def laser_cut(world, laser_body, length=30.0, laser_half_width=2, **kwargs):
cut_fixtures=_laser_cut(world, laser_body, laser_half_width=LASER_HALF_WIDTH)
remove_bodies=[]
for fixture, new_shapes in cut_fixtures:
body=fixture.body
if body in remove_bodies:
continue
new_body=world.CreateDynamicBody(
userData=LASER_SPLIT_TAG,
position=body.position,
angle=body.angle,
linearVelocity=body.linearVelocity,
angularVelocity=body.angularVelocity,
)
try:
new_body.CreateFixture(
friction=fixture.friction,
restitution=fixture.restitution,
density=fixture.density,
shape=new_shapes[1],
)
except AssertionError:
print('New body fixture failed: %s' % sys.exc_info()[1])
remove_bodies.append(new_body)
try:
new_fixture=body.CreateFixture(
friction=fixture.friction,
restitution=fixture.restitution,
density=fixture.density,
shape=new_shapes[0],
)
body.DestroyFixture(fixture)
except AssertionError:
print('New fixture/destroy failed: %s' % sys.exc_info()[1])
remove_bodies.append(body)
for body in remove_bodies:
world.DestroyBody(body)
def get_laser_line(laser_body, length, laser_half_width):
laser_start=(laser_half_width-0.1, 0.0)
laser_dir =(length, 0.0)
p1 = laser_body.GetWorldPoint(laser_start)
p2 = p1 + laser_body.GetWorldVector(laser_dir)
return (p1, p2)
def laser_display(renderer, laser_body, length=30.0, laser_color=(1,0,0), laser_half_width=2, **kwargs):
if not renderer:
return
p1, p2=get_laser_line(laser_body, length, laser_half_width)
renderer.DrawSegment(renderer.to_screen(p1), renderer.to_screen(p2), b2Color(*laser_color))
class laser_callback(b2RayCastCallback):
"""This raycast collects multiple hits."""
def __init__(self, **kwargs):
b2RayCastCallback.__init__(self, **kwargs)
self.hit=False
self.hits=[]
def ReportFixture(self, fixture, point, normal, fraction):
self.hit=True
if fixture.body.userData==LASER_SPLIT_TAG:
self.hits.append((fixture,point))
self.last_fixture=fixture
self.last_point=point
return 1.0
class BoxCutter(Framework):
name="Box Cutter"
description='Press (c) to cut'
move=0
jump=100
def __init__(self):
super(BoxCutter, self).__init__()
# The ground
self.ground=self.world.CreateStaticBody(
userData='ground',
shapes=[
b2EdgeShape(vertices=[(-50,0),( 50, 0)]),
b2EdgeShape(vertices=[(-50,0),(-50,10)]),
b2EdgeShape(vertices=[( 50,0),( 50,10)]),
]
)
self.laser_body=self.world.CreateDynamicBody(
userData='laser',
position=(0,2),
fixtures=b2FixtureDef(
density=4.0,
shape=b2PolygonShape(box=(LASER_HALF_WIDTH, 1))
)
)
for i in range(2):
self.world.CreateDynamicBody(
userData=LASER_SPLIT_TAG,
position=(3.0+i*6,8),
fixtures=b2FixtureDef(
density=5.0,
shape=b2PolygonShape(box=(3, 3))
)
)
def Keyboard(self, key):
if key==Keys.K_c:
laser_cut(self.world, self.laser_body, laser_half_width=LASER_HALF_WIDTH)
def Step(self, settings):
Framework.Step(self, settings)
laser_display(self.renderer, self.laser_body, laser_half_width=LASER_HALF_WIDTH)
if __name__=="__main__":
main(BoxCutter)
| 35.267925
| 105
| 0.591162
| 2,022
| 0.216349
| 0
| 0
| 0
| 0
| 0
| 0
| 1,796
| 0.192168
|
626b51aefb27ae8f4702b720697fa00e55d0360c
| 1,309
|
py
|
Python
|
robot_simulator/grid/positioning.py
|
darshikaf/toy-robot-simulator
|
408d160033728d65e9bac376d3af7fc84c520f31
|
[
"MIT"
] | null | null | null |
robot_simulator/grid/positioning.py
|
darshikaf/toy-robot-simulator
|
408d160033728d65e9bac376d3af7fc84c520f31
|
[
"MIT"
] | null | null | null |
robot_simulator/grid/positioning.py
|
darshikaf/toy-robot-simulator
|
408d160033728d65e9bac376d3af7fc84c520f31
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import annotations
import math
class Point:
def __init__(self, x: int = 0, y: int = 0):
self.x = x
self.y = y
def __eq__(self, other: object) -> bool:
if isinstance(other, Point):
return (self.x == other.x) and (self.y == other.y)
return NotImplemented
def __ne__(self, other: object) -> bool:
if isinstance(other, Point):
return not (self == other)
return NotImplemented
def __add__(self, other: Point) -> Point:
x = self.x + other.x
y = self.y + other.y
return self.__class__(x, y)
class Vector(Point):
def __mul__(self, scale: int) -> Vector:
x = self.x * scale
y = self.y * scale
return self.__class__(x, y)
class Rect:
def __init__(self, point1: Point, point2: Point) -> None:
self.top = max(point1.y, point2.y)
self.right = max(point1.x, point2.x)
self.bottom = min(point1.y, point2.y)
self.left = min(point1.x, point2.x)
def contains(self, point: Point) -> bool:
contains_x = (self.left <= point.x) and (point.x <= self.right)
contains_y = (self.bottom <= point.y) and (point.y <= self.top)
return contains_x and contains_y
| 27.270833
| 71
| 0.578304
| 1,205
| 0.92055
| 0
| 0
| 0
| 0
| 0
| 0
| 44
| 0.033613
|
626ba702d88f0299279f562b39bddc29df5ddcaa
| 6,128
|
py
|
Python
|
src/utils/zarr_to_netcdf.py
|
jhkennedy/itslive
|
68b89b337548fe4e86a3d066c3fb2e4c2aaeed70
|
[
"MIT"
] | 8
|
2021-02-19T02:29:29.000Z
|
2021-11-10T05:26:30.000Z
|
src/utils/zarr_to_netcdf.py
|
jhkennedy/itslive
|
68b89b337548fe4e86a3d066c3fb2e4c2aaeed70
|
[
"MIT"
] | 11
|
2021-03-29T02:15:38.000Z
|
2021-11-18T23:29:33.000Z
|
src/utils/zarr_to_netcdf.py
|
jhkennedy/itslive
|
68b89b337548fe4e86a3d066c3fb2e4c2aaeed70
|
[
"MIT"
] | 3
|
2021-12-06T06:05:34.000Z
|
2022-03-13T16:44:44.000Z
|
"""
Script to convert Zarr store to the NetCDF format file.
Usage:
python zarr_to_netcdf.py -i ZarrStoreName -o NetCDFFileName
Convert Zarr data stored in ZarrStoreName to the NetCDF file NetCDFFileName.
"""
import argparse
import timeit
import warnings
import xarray as xr
from itscube_types import Coords, DataVars
if __name__ == '__main__':
warnings.filterwarnings('ignore')
# Command-line arguments parser
parser = argparse.ArgumentParser(epilog='\n'.join(__doc__.split('\n')[1:]),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-i', '--input', type=str, required=True,
help="Input Zarr store directory.")
parser.add_argument('-o', '--output', type=str, required=True,
help="NetCDF filename to store data to.")
parser.add_argument('-e', '--engine', type=str, required=False, default='h5netcdf',
help="NetCDF engine to use to store NetCDF data to the file.")
args = parser.parse_args()
start_time = timeit.default_timer()
# Don't decode time delta's as it does some internal conversion based on
# provided units
ds_zarr = xr.open_zarr(args.input, decode_timedelta=False)
# print(f"mid_date: {ds_zarr.mid_date}")
# print(f"x: {ds_zarr.x.attrs}")
# print(f"y: {ds_zarr.y.attrs}")
# This is just a work around for coordinates attributes not being written
# to the Zarr store (submit xarray ticket?)
ds_zarr.mid_date.attrs = {
DataVars.STD_NAME: Coords.STD_NAME[Coords.MID_DATE],
DataVars.DESCRIPTION_ATTR: Coords.DESCRIPTION[Coords.MID_DATE]
}
ds_zarr.x.attrs = {
DataVars.STD_NAME: Coords.STD_NAME[Coords.X],
DataVars.DESCRIPTION_ATTR: Coords.DESCRIPTION[Coords.X]
}
ds_zarr.y.attrs = {
DataVars.STD_NAME: Coords.STD_NAME[Coords.Y],
DataVars.DESCRIPTION_ATTR: Coords.DESCRIPTION[Coords.Y]
}
time_delta = timeit.default_timer() - start_time
print(f"Read Zarr {args.input} (took {time_delta} seconds)")
compression = {"zlib": True, "complevel": 2, "shuffle": True}
encoding = {}
encoding = {
'map_scale_corrected': {'_FillValue': 0.0, 'dtype': 'byte'},
'interp_mask': {'_FillValue': 0.0, 'dtype': 'ubyte'},
'flag_stable_shift': {'dtype': 'long'},
'chip_size_height': {'_FillValue': 0.0, 'dtype': 'ushort'},
'chip_size_width': {'_FillValue': 0.0, 'dtype': 'ushort'},
'v_error': {'_FillValue': -32767.0, 'dtype': 'short'},
'v': {'_FillValue': -32767.0, 'dtype': 'short'},
'vx': {'_FillValue': -32767.0, 'dtype': 'short'},
'vx_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vx_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vy': {'_FillValue': -32767.0, 'dtype': 'short'},
'vy_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vy_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'va': {'_FillValue': -32767.0, 'dtype': 'short'},
'va_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'va_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vr': {'_FillValue': -32767.0, 'dtype': 'short'},
'vr_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vr_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vxp': {'_FillValue': -32767.0, 'dtype': 'short'},
'vxp_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vxp_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vyp': {'_FillValue': -32767.0, 'dtype': 'short'},
'vyp_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vyp_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vp': {'_FillValue': -32767.0, 'dtype': 'short'},
'vp_error': {'_FillValue': -32767.0, 'dtype': 'short'},
'acquisition_img1': {'units': 'days since 1970-01-01'},
'acquisition_img2': {'units': 'days since 1970-01-01'},
'date_center': {'_FillValue': None, 'units': 'days since 1970-01-01'},
'mid_date': {'_FillValue': None, 'units': 'days since 1970-01-01'},
'autoRIFT_software_version': {'_FillValue': None},
'stable_count': {'_FillValue': None},
'date_dt': {'_FillValue': None},
'x': {'_FillValue': None},
'y': {'_FillValue': None}
}
encode_data_vars = (
'v',
'v_error',
'map_scale_corrected',
'vx',
'vx_error',
'vx_stable_shift',
'flag_stable_shift',
'vy',
'vy_error',
'vy_stable_shift',
'chip_size_height',
'chip_size_width',
'interp_mask',
'va',
'va_error',
'va_stable_shift',
'vp',
'vp_error',
'vr',
'vr_error',
'vr_stable_shift',
'vxp',
'vxp_error',
'vxp_stable_shift',
'vyp',
'vyp_error',
'vyp_stable_shift',
'mission_img1',
'sensor_img1',
'satellite_img1',
'acquisition_img1',
'mission_img2',
'sensor_img2',
'satellite_img2',
'acquisition_img2',
'date_dt',
'date_center',
'roi_valid_percentage',
'autoRIFT_software_version'
)
# Set up compression for each of the data variables
for each in encode_data_vars:
encoding.setdefault(each, {}).update(compression)
start_time = timeit.default_timer()
ds_zarr.to_netcdf(
args.output,
engine=args.engine,
encoding = encoding
)
time_delta = timeit.default_timer() - start_time
print(f"Wrote dataset to NetCDF file {args.output} (took {time_delta} seconds)")
| 39.535484
| 90
| 0.557115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,774
| 0.452676
|
626c09d5e7442d6e48e408bb35182589e7d6f723
| 87
|
py
|
Python
|
tests/periodicities/Business_Day/Cycle_Business_Day_200_B_24.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/periodicities/Business_Day/Cycle_Business_Day_200_B_24.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | 1
|
2019-11-30T23:39:38.000Z
|
2019-12-01T04:34:35.000Z
|
tests/periodicities/Business_Day/Cycle_Business_Day_200_B_24.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
import pyaf.tests.periodicities.period_test as per
per.buildModel((24 , 'B' , 200));
| 17.4
| 50
| 0.724138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3
| 0.034483
|
626c365969d9ff73eed75430ed790344b66ecdd6
| 932
|
py
|
Python
|
conanfile.py
|
maurodelazeri/conan-cpp-httplib
|
1a6ce1f1a79eb43071e8dc1bb6f84fba010aabd3
|
[
"MIT"
] | null | null | null |
conanfile.py
|
maurodelazeri/conan-cpp-httplib
|
1a6ce1f1a79eb43071e8dc1bb6f84fba010aabd3
|
[
"MIT"
] | null | null | null |
conanfile.py
|
maurodelazeri/conan-cpp-httplib
|
1a6ce1f1a79eb43071e8dc1bb6f84fba010aabd3
|
[
"MIT"
] | 1
|
2019-12-03T19:35:48.000Z
|
2019-12-03T19:35:48.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake, tools
import os
class CppHttpLibConan(ConanFile):
name = "cpp-httplib"
version = "0.2.1"
url = "https://github.com/zinnion/conan-cpp-httplib"
description = "A single file C++11 header-only HTTP/HTTPS server and client library"
license = "MIT"
no_copy_source = True
build_policy = "always"
requires = "OpenSSL/1.1.1d@zinnion/stable", "zlib/1.2.11@zinnion/stable"
def source(self):
source_url = "https://github.com/maurodelazeri/cpp-httplib"
tools.get("{0}/archive/v{1}.tar.gz".format(source_url, self.version))
extracted_dir = self.name + "-" + self.version
os.rename(extracted_dir, "sources")
def package_id(self):
self.info.header_only()
def package(self):
self.copy(pattern="LICENSE")
self.copy(pattern="*.h", dst="include", keep_path=False)
| 32.137931
| 88
| 0.651288
| 830
| 0.890558
| 0
| 0
| 0
| 0
| 0
| 0
| 358
| 0.38412
|
626ca157c2ac9db263365279311bac86dc999674
| 328
|
py
|
Python
|
backmarker/api/viewsets/driver_viewset.py
|
jmp/backmarker
|
e12a094d92dec798ad10aa8890fabe84f946c303
|
[
"MIT"
] | null | null | null |
backmarker/api/viewsets/driver_viewset.py
|
jmp/backmarker
|
e12a094d92dec798ad10aa8890fabe84f946c303
|
[
"MIT"
] | null | null | null |
backmarker/api/viewsets/driver_viewset.py
|
jmp/backmarker
|
e12a094d92dec798ad10aa8890fabe84f946c303
|
[
"MIT"
] | null | null | null |
from rest_framework.viewsets import ReadOnlyModelViewSet
from backmarker.api.serializers.driver_serializer import DriverSerializer
from backmarker.models.driver import Driver
class DriverViewSet(ReadOnlyModelViewSet):
queryset = Driver.objects.all()
serializer_class = DriverSerializer
lookup_field = "reference"
| 29.818182
| 73
| 0.829268
| 149
| 0.454268
| 0
| 0
| 0
| 0
| 0
| 0
| 11
| 0.033537
|
626cc4db6e624b921fb50a7db02432aa617a9dbd
| 215
|
py
|
Python
|
shell/response.py
|
YorkSu/deepgo
|
2f22ad50d2958a4f1c7dfc0af6fcd448f5e7e18d
|
[
"Apache-2.0"
] | null | null | null |
shell/response.py
|
YorkSu/deepgo
|
2f22ad50d2958a4f1c7dfc0af6fcd448f5e7e18d
|
[
"Apache-2.0"
] | null | null | null |
shell/response.py
|
YorkSu/deepgo
|
2f22ad50d2958a4f1c7dfc0af6fcd448f5e7e18d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Response
======
Response Class
"""
from deepgo.core.kernel.popo import VO
class Response(VO):
def __init__(self):
self.code = 0
def json(self):
return self.__dict__
| 11.315789
| 38
| 0.609302
| 103
| 0.47907
| 0
| 0
| 0
| 0
| 0
| 0
| 65
| 0.302326
|
626d65ee956ce1cac3af4218ef107258e83fd84e
| 4,793
|
py
|
Python
|
src/python/pants/option/options_fingerprinter_test.py
|
bastianwegge/pants
|
43f0b90d41622bee0ed22249dbaffb3ff4ad2eb2
|
[
"Apache-2.0"
] | 1,806
|
2015-01-05T07:31:00.000Z
|
2022-03-31T11:35:41.000Z
|
src/python/pants/option/options_fingerprinter_test.py
|
bastianwegge/pants
|
43f0b90d41622bee0ed22249dbaffb3ff4ad2eb2
|
[
"Apache-2.0"
] | 9,565
|
2015-01-02T19:01:59.000Z
|
2022-03-31T23:25:16.000Z
|
src/python/pants/option/options_fingerprinter_test.py
|
ryanking/pants
|
e45b00d2eb467b599966bca262405a5d74d27bdd
|
[
"Apache-2.0"
] | 443
|
2015-01-06T20:17:57.000Z
|
2022-03-31T05:28:17.000Z
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pathlib import Path
import pytest
from pants.option.custom_types import (
DictValueComponent,
ListValueComponent,
UnsetBool,
dict_with_files_option,
dir_option,
file_option,
)
from pants.option.options_fingerprinter import OptionsFingerprinter
from pants.testutil.rule_runner import RuleRunner
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner()
def test_fingerprint_dict() -> None:
d1 = {"b": 1, "a": 2}
d2 = {"a": 2, "b": 1}
d3 = {"a": 1, "b": 2}
fp1, fp2, fp3 = (
OptionsFingerprinter().fingerprint(DictValueComponent.create, d) for d in (d1, d2, d3)
)
assert fp1 == fp2
assert fp1 != fp3
def test_fingerprint_dict_with_non_string_keys() -> None:
d = {("a", 2): (3, 4)}
fp = OptionsFingerprinter().fingerprint(DictValueComponent.create, d)
assert fp == "3852a094612ce1c22c08ee2ddcdc03d09e87ad97"
def test_fingerprint_list() -> None:
l1 = [1, 2, 3]
l2 = [1, 3, 2]
fp1, fp2 = (OptionsFingerprinter().fingerprint(ListValueComponent.create, l) for l in (l1, l2))
assert fp1 != fp2
def test_fingerprint_file(rule_runner: RuleRunner) -> None:
fp1, fp2, fp3 = (
OptionsFingerprinter().fingerprint(file_option, rule_runner.write_files({f: c})[0])
for (f, c) in (
("foo/bar.config", "blah blah blah"),
("foo/bar.config", "meow meow meow"),
("spam/egg.config", "blah blah blah"),
)
)
assert fp1 != fp2
assert fp1 != fp3
assert fp2 != fp3
def test_fingerprint_file_outside_buildroot(tmp_path: Path, rule_runner: RuleRunner) -> None:
outside_buildroot = rule_runner.write_files({(tmp_path / "foobar").as_posix(): "foobar"})[0]
with pytest.raises(ValueError):
OptionsFingerprinter().fingerprint(file_option, outside_buildroot)
def test_fingerprint_file_list(rule_runner: RuleRunner) -> None:
f1, f2, f3 = (
rule_runner.write_files({f: c})[0]
for (f, c) in (
("foo/bar.config", "blah blah blah"),
("foo/bar.config", "meow meow meow"),
("spam/egg.config", "blah blah blah"),
)
)
fp1 = OptionsFingerprinter().fingerprint(file_option, [f1, f2])
fp2 = OptionsFingerprinter().fingerprint(file_option, [f2, f1])
fp3 = OptionsFingerprinter().fingerprint(file_option, [f1, f3])
assert fp1 == fp2
assert fp1 != fp3
def test_fingerprint_primitive() -> None:
fp1, fp2 = (OptionsFingerprinter().fingerprint("", v) for v in ("foo", 5))
assert fp1 != fp2
def test_fingerprint_unset_bool() -> None:
fp1 = OptionsFingerprinter().fingerprint(UnsetBool, UnsetBool)
fp2 = OptionsFingerprinter().fingerprint(UnsetBool, UnsetBool)
assert fp1 == fp2
def test_fingerprint_dir(rule_runner: RuleRunner) -> None:
d1 = rule_runner.create_dir("a")
d2 = rule_runner.create_dir("b")
d3 = rule_runner.create_dir("c")
rule_runner.write_files(
{
"a/bar/bar.config": "blah blah blah",
"a/foo/foo.config": "meow meow meow",
"b/foo/foo.config": "meow meow meow",
"b/bar/bar.config": "blah blah blah",
"c/bar/bar.config": "blah meow blah",
}
)
dp1 = OptionsFingerprinter().fingerprint(dir_option, [d1])
dp2 = OptionsFingerprinter().fingerprint(dir_option, [d1, d2])
dp3 = OptionsFingerprinter().fingerprint(dir_option, [d2, d1])
dp4 = OptionsFingerprinter().fingerprint(dir_option, [d3])
assert dp1 == dp1
assert dp2 == dp2
assert dp1 != dp3
assert dp1 != dp4
assert dp2 != dp3
def test_fingerprint_dict_with_files_order(rule_runner: RuleRunner) -> None:
f1, f2 = (
rule_runner.write_files({f: c})[0]
for (f, c) in (
("foo/bar.config", "blah blah blah"),
("foo/bar.config", "meow meow meow"),
)
)
fp1 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"})
fp2 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f2},{f1}"})
assert fp1 == fp2
def test_fingerprint_dict_with_file_content_change(rule_runner: RuleRunner) -> None:
f1, f2 = (
rule_runner.write_files({f: c})[0]
for (f, c) in (
("foo/bar.config", "blah blah blah"),
("foo/bar.config", "meow meow meow"),
)
)
fp1 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"})
with open(f1, "w") as f:
f.write("123")
fp2 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"})
assert fp1 != fp2
| 31.741722
| 99
| 0.636345
| 0
| 0
| 0
| 0
| 72
| 0.015022
| 0
| 0
| 820
| 0.171083
|
626e4c17d238ffdd4b719fcf03cef903734ecb10
| 201
|
py
|
Python
|
secondstring.py
|
Kokouvi/reversorder
|
157e39eaf424d816715080dbce0850670836e8fd
|
[
"MIT"
] | null | null | null |
secondstring.py
|
Kokouvi/reversorder
|
157e39eaf424d816715080dbce0850670836e8fd
|
[
"MIT"
] | null | null | null |
secondstring.py
|
Kokouvi/reversorder
|
157e39eaf424d816715080dbce0850670836e8fd
|
[
"MIT"
] | null | null | null |
str = "The quick brown fox jumps over the lazy dog." # initial string
reversed = "".join(reversed(str)) #.join() method merges all of the charactera
print(reversed[0:43:2]) # print the reversed string
| 50.25
| 78
| 0.731343
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 135
| 0.671642
|
626e9153453be95674085a5a9f6b92944cbfbd68
| 1,660
|
py
|
Python
|
image_extractor.py
|
IstoVisio/script_image_extractor
|
dda8c8bb96a16d1ffe5d52af198b66bd619edc4f
|
[
"MIT"
] | null | null | null |
image_extractor.py
|
IstoVisio/script_image_extractor
|
dda8c8bb96a16d1ffe5d52af198b66bd619edc4f
|
[
"MIT"
] | null | null | null |
image_extractor.py
|
IstoVisio/script_image_extractor
|
dda8c8bb96a16d1ffe5d52af198b66bd619edc4f
|
[
"MIT"
] | null | null | null |
import os
import sys
import syglass as sy
from syglass import pyglass
import numpy as np
import tifffile
import subprocess
def extract(projectPath):
project = sy.get_project(projectPath)
head, tail = os.path.split(projectPath)
# Get a dictionary showing the number of blocks in each level
#codebreak()
resolution_map = project.get_resolution_map()
# Calculate the index of the highest resolution level
max_resolution_level = len(resolution_map) - 1
# Determine the number of blocks in this level
block_count = resolution_map[max_resolution_level]
# get size of project
total_size = project.get_size(max_resolution_level)
xsize = total_size[1]
ysize = total_size[2]
zslices = total_size[0]
dimensions = np.asarray([1,xsize, ysize])
offset = np.asarray([0,0,0])
os.chdir(os.path.dirname(projectPath))
for slice in range(zslices):
s = str(slice).zfill(5)
offset[0] = slice
block = project.get_custom_block(0, max_resolution_level, offset, dimensions)
data = block.data
print(s + ".tiff")
tifffile.imwrite(tail + "_" + s + ".tiff", data)
subprocess.run(['explorer', head])
def main():
print("Image Extractor, by Michael Morehead")
print("Attempts to extract the original data volume from a syGlass project")
print("and write it to a series of TIFF files")
print("---------------------------------------")
print("Usage: Highlight a project and use the Script Launcher in syGlass.")
print("---------------------------------------")
print(sys.argv)
for syGlassProjectPath in sys.argv:
print("Extracting project from: " + syGlassProjectPath)
extract(syGlassProjectPath)
if __name__== "__main__":
main()
| 29.642857
| 79
| 0.704217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 561
| 0.337952
|
627004517552f92e1e2ec8fa749130e02a42b77f
| 7,531
|
py
|
Python
|
scaffoldgraph/analysis/enrichment.py
|
trumanw/ScaffoldGraph
|
a594e5c5effe6c5e45c0061a235ccbeb64e416f9
|
[
"MIT"
] | 121
|
2019-12-12T15:30:16.000Z
|
2022-02-28T02:00:54.000Z
|
scaffoldgraph/analysis/enrichment.py
|
trumanw/ScaffoldGraph
|
a594e5c5effe6c5e45c0061a235ccbeb64e416f9
|
[
"MIT"
] | 8
|
2020-04-04T15:37:26.000Z
|
2021-11-17T07:30:31.000Z
|
scaffoldgraph/analysis/enrichment.py
|
trumanw/ScaffoldGraph
|
a594e5c5effe6c5e45c0061a235ccbeb64e416f9
|
[
"MIT"
] | 28
|
2019-12-16T11:58:53.000Z
|
2021-11-19T09:57:46.000Z
|
"""
scaffoldgraph.analysis.enrichment
Module contains an implementation of Compound Set Enrichment from the papers:
- Compound Set Enrichment: A Novel Approach to Analysis of Primary HTS Data.
- Mining for bioactive scaffolds with scaffold networks: Improved compound set enrichment from primary screening data.
"""
from networkx import set_node_attributes
from scipy.stats import ks_2samp, binom_test
from loguru import logger
def _btp(scaffoldgraph, activity_key, alternative, pd):
"""CSE - binomial test (used in cse functions)."""
result, active, total = {}, 0, 0
for m, a in scaffoldgraph.get_molecule_nodes(activity_key):
if int(a) == 1:
active += 1
total += 1
if pd is None:
pd = active / total
logger.debug(f'(BTP) Total: {total}, Active: {active}, pd: {pd}')
for scaffold in scaffoldgraph.get_scaffold_nodes():
mols, acts = zip(*scaffoldgraph.get_molecules_for_scaffold(scaffold, activity_key))
N, K = len(mols), acts.count(1)
pval = binom_test(K, N, pd, alternative=alternative)
logger.debug(f'(BTP) {scaffold}, {K}, {N}, {pval}')
result[scaffold] = {'pval': pval, '_active': K, '_total': N}
return result
def _ksp(scaffoldgraph, activity_key, alternative):
"""CSE - Kolmogorov-Smirnov test (used in cse functions)."""
result, background = {}, []
for _, activity in scaffoldgraph.get_molecule_nodes(activity_key):
background.append(activity)
for scaffold in scaffoldgraph.get_scaffold_nodes():
mols, acts = zip(*scaffoldgraph.get_molecules_for_scaffold(scaffold, activity_key))
N = len(mols)
dmax, pval = ks_2samp(acts, background, alternative, 'auto')
logger.debug(f'(KSP) {scaffold}, {N}, {dmax}, {pval}')
result[scaffold] = {'pval': pval, 'dmax': dmax, '_total': N}
return result
def bonferroni_correction(scaffoldgraph, crit):
"""Returns bonferroni corrected significance level for each hierarchy.
Parameters
----------
scaffoldgraph : ScaffoldGraph
A ScaffoldGraph object to query.
crit : float
The critical significance value to apply bonferroni correction at
each scaffold hierarchy.
Returns
-------
dict
A dictionary containing the corrected critical significance value
at each scaffold hierarchy {hierarchy: crit}.
"""
hier = scaffoldgraph.get_hierarchy_sizes()
return {k: crit / v for k, v in hier.items()}
def calc_scaffold_enrichment(scaffoldgraph, activity, mode='ks', alternative='greater', p=None):
"""
Calculate scaffold enrichment using the Kolmogorov-Smirnov or binomal test.
Parameters
----------
scaffoldgraph : ScaffoldGraph
A ScaffoldGraph object to query.
activity : str
A scaffold node attribute key corresponding to an activity value.
If the test is binomial this value should be a binary attribute
(0 or 1 / True or False).
mode : {'ks', 'b'}, optional
A string specifying the statistical test to perform. 'ks' specifies a
Kolmogorov-Smirnov test and 'b' or 'binomial' specifies a binomial test.
The default is 'ks'.
alternative : {'two-sided', 'less', 'greater'}, optional
Defines the alternative hypothesis.
The following options are available:
* 'two-sided'
* 'less': one-sided
* 'greater': one-sided
The default is 'greater'.
p : float, None, optional
The hypothesized probability of success. 0 <= p <= 1. Used in binomial mode.
If not specified p is set automatically (number of active / total compounds).
The default is None.
Returns
-------
dict
A dict of dicts in the format {scaffold: {results}} where results is the set
of results returned by the statistical test and scaffold is a scaffold node
key corresponding to a scaffold in the ScaffoldGraph object.
See Also
--------
scaffoldgraph.analysis.enrichment.compound_set_enrichment
References
----------
.. [1] Varin, T., Schuffenhauer, A., Ertl, P., and Renner, S. (2011). Mining for bioactive scaffolds
with scaffold networks: Improved compound set enrichment from primary screening data.
Journal of Chemical Information and Modeling, 51(7), 1528–1538.
.. [2] Varin, T., Gubler, H., Parker, C., Zhang, J., Raman, P., Ertl, P. and Schuffenhauer, A. (2010)
Compound Set Enrichment: A Novel Approach to Analysis of Primary HTS Data.
Journal of Chemical Information and Modeling, 50(12), 2067-2078.
"""
if mode == 'binomial' or mode == 'b':
return _btp(scaffoldgraph, activity, alternative, p)
elif mode == 'ks' or mode == 'k':
return _ksp(scaffoldgraph, activity, alternative)
else:
raise ValueError(f'scaffold enrichment mode: {mode}, not implemented')
def compound_set_enrichment(scaffoldgraph, activity, mode='ks', alternative='greater', crit=0.01, p=None):
"""
Perform compound set enrichment (CSE), calculating scaffolds enriched for bioactivity.
Parameters
----------
scaffoldgraph : ScaffoldGraph
A ScaffoldGraph object to query.
activity : str
A scaffold node attribute key corresponding to an activity value.
If the test is binomial this value should be a binary attribute
(0 or 1 / True or False).
mode : {'ks', 'b'}, optional
A string specifying the statistical test to perform. 'ks' specifies a
Kolmogorov-Smirnov test and 'b' or 'binomial' specifies a binomial test.
The default is 'ks'.
alternative : {'two-sided', 'less', 'greater'}, optional
Defines the alternative hypothesis.
The following options are available:
* 'two-sided'
* 'less': one-sided
* 'greater': one-sided
The default is 'greater'.
crit : float, optional
The critical significance level. The default is 0.01
p : float, None, optional
The hypothesized probability of success. 0 <= p <= 1. Used in binomial mode.
If not specified p is set automatically (number of active / total compounds).
The default is None.
Returns
-------
A tuple of 'enriched' scaffold classes in the format: (scaffold, {data}) where data
is the corresponding node attributes for the returned scaffold.
Notes
-----
P-values are added as node attributes with the key 'pval'.
References
----------
.. [1] Varin, T., Schuffenhauer, A., Ertl, P., and Renner, S. (2011). Mining for bioactive scaffolds
with scaffold networks: Improved compound set enrichment from primary screening data.
Journal of Chemical Information and Modeling, 51(7), 1528–1538.
.. [2] Varin, T., Gubler, H., Parker, C., Zhang, J., Raman, P., Ertl, P. and Schuffenhauer, A. (2010)
Compound Set Enrichment: A Novel Approach to Analysis of Primary HTS Data.
Journal of Chemical Information and Modeling, 50(12), 2067-2078.
"""
set_node_attributes(scaffoldgraph, calc_scaffold_enrichment(scaffoldgraph, activity, mode, alternative, p))
bonferroni = bonferroni_correction(scaffoldgraph, crit)
result = []
for scaffold, data in scaffoldgraph.get_scaffold_nodes(True):
if data['pval'] < bonferroni[data['hierarchy']]:
result.append((scaffold, data))
return tuple(sorted(result, key=lambda x: x[1]['pval']))
| 41.379121
| 118
| 0.659673
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 5,314
| 0.705242
|
62711db72244e06a03957f6f565656dd9ee94885
| 803
|
py
|
Python
|
ejercicio_fichero/ejercicio_fichero1/fichero.py
|
Ironwilly/python
|
f6d42c685b4026b018089edb4ae8cc0ca9614e86
|
[
"CC0-1.0"
] | null | null | null |
ejercicio_fichero/ejercicio_fichero1/fichero.py
|
Ironwilly/python
|
f6d42c685b4026b018089edb4ae8cc0ca9614e86
|
[
"CC0-1.0"
] | null | null | null |
ejercicio_fichero/ejercicio_fichero1/fichero.py
|
Ironwilly/python
|
f6d42c685b4026b018089edb4ae8cc0ca9614e86
|
[
"CC0-1.0"
] | null | null | null |
# Lee el fichero y procésalo de tal manera que sea capaz de mostrar
# la temperatura máxima para una ciudad dada. Esa ciudad la debe poder
# recibir como un argumento de entrada. Si la ciudad no existe, se deberá
# manejar a través de una excepción.
import csv
provincia = input('Diga el nombre de la ciudad: ')
with open("climatologia.csv", encoding="utf-8") as csvfile:
reader = csv.reader(csvfile, delimiter=",")
try:
for row in reader:
if (provincia == row[2]):
temperatura_maxima = row[3]
print(f"provincia: '{provincia}' con temperatura maxima de {temperatura_maxima}")
else:
raise Exception("No existe ninguna ciudad: " + provincia)
except Exception as cityNotFound:
print(cityNotFound)
| 29.740741
| 97
| 0.655044
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 412
| 0.509901
|
62728052af8201aa2645d7c22783e76db3275ed8
| 4,059
|
py
|
Python
|
python/brunel/magics.py
|
Ross1503/Brunel
|
c6b6323fa6525c2e1b5f83dc6f97bdeb237e3b06
|
[
"Apache-2.0"
] | 306
|
2015-09-03T18:04:21.000Z
|
2022-02-12T15:15:39.000Z
|
python/brunel/magics.py
|
Ross1503/Brunel
|
c6b6323fa6525c2e1b5f83dc6f97bdeb237e3b06
|
[
"Apache-2.0"
] | 313
|
2015-09-09T14:20:14.000Z
|
2020-09-14T02:00:05.000Z
|
python/brunel/magics.py
|
Ross1503/Brunel
|
c6b6323fa6525c2e1b5f83dc6f97bdeb237e3b06
|
[
"Apache-2.0"
] | 88
|
2015-09-11T16:45:22.000Z
|
2021-11-28T12:35:48.000Z
|
# Copyright (c) 2015 IBM Corporation and others.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from IPython.core.magic import Magics, magics_class, line_magic, cell_magic, line_cell_magic
import pandas as pd
import brunel.brunel_main as brunel
ipy = get_ipython()
@magics_class
class BrunelMagics(Magics):
@line_cell_magic
def brunel(self, line, cell=None):
"Magic that works both as %brunel and as %%brunel"
datas = self.find_dataframes()
# print("Found dataframes", list(datas.keys()))
if cell is not None:
line = line + ' ' + cell.replace('\n',' ')
# print ("Command =", line)
data = None
height = 400
width = 500
output = 'd3'
online_js = False
parts = line.split('::')
action = parts[0].strip()
datasets_in_brunel = brunel.get_dataset_names(action)
self.cache_data(datasets_in_brunel,datas)
if len(parts) > 2:
raise ValueError("Only one ':' allowed in brunel magic. Format is 'ACTION : key=value, ...'")
if len(parts) > 1:
extras = parts[1].strip()
dataName = self.find_term('data', extras)
if dataName is not None:
try:
data = datas[dataName]
except:
raise ValueError("Could not find pandas DataFrame named '" + dataName + "'")
width = self.find_term('width', extras, width)
height = self.find_term('height', extras, height)
online_js = self.find_term('online_js', extras, online_js)
if data is None and len(datasets_in_brunel) == 0:
data = self.best_match(self.get_vars(action), list(datas.values()))
return brunel.display(action, data, width, height, online_js)
def cache_data(self, datasets_in_brunel, dataframes):
for data_name in datasets_in_brunel:
try:
data = dataframes[data_name]
brunel.cacheData(data_name, brunel.to_csv(data))
except:
pass
def find_term(self, key, string, default=None):
for expr in string.split(','):
terms = expr.split('=')
if len(terms) != 2:
raise ValueError("Bad format for key=value pair: " + expr)
if key == terms[0].strip().lower():
return terms[1].strip()
return default
def find_dataframes(self):
result = {}
for name in list(self.shell.user_ns.keys()):
v = self.shell.user_ns[name]
if name[0] != '_' and isinstance(v, pd.DataFrame):
result[name] = v
return result
def get_vars(self, line):
"Search for the internal bits of 'x(a,b)' and return as ['a','b']"
result = []
for part in line.split('('):
p = part.find(')')
if p > 0:
inner = part[:p].split(',')
for term in inner:
result.append(term.strip())
return result
def best_match(self, variables, datas):
# print("Searching for", variables, "in", len(datas), "dataframes")
all = [[self.match(variables, v.columns.values), v] for v in datas]
all.sort(key=lambda x: x[0])
return all[0][1]
def match(self, names1, names2):
n = 0
for i in names1:
for j in names2:
if str(i).lower() == str(j).lower(): n += 1
return -n
# Register with IPython
ipy.register_magics(BrunelMagics)
| 34.692308
| 105
| 0.584134
| 3,219
| 0.793052
| 0
| 0
| 3,233
| 0.796502
| 0
| 0
| 1,078
| 0.265583
|
62735fa3cb9b4a375ffe477b83e79ab29f0e085c
| 537
|
py
|
Python
|
plugs_newsletter/emails.py
|
solocompt/plugs-newsletter
|
57b9aa2caf9ed5bd5adf25839dbf52b85c0afa53
|
[
"MIT"
] | 1
|
2017-01-10T23:24:55.000Z
|
2017-01-10T23:24:55.000Z
|
plugs_newsletter/emails.py
|
solocompt/plugs-newsletter
|
57b9aa2caf9ed5bd5adf25839dbf52b85c0afa53
|
[
"MIT"
] | 1
|
2017-01-08T00:01:21.000Z
|
2017-01-08T00:01:21.000Z
|
plugs_newsletter/emails.py
|
solocompt/plugs-newsletter
|
57b9aa2caf9ed5bd5adf25839dbf52b85c0afa53
|
[
"MIT"
] | null | null | null |
"""
Plugs Newsletter Emails
"""
from plugs_mail.mail import PlugsMail
class NewsletterSubscribed(PlugsMail):
"""
Email sent to subscriber after newsletter subscription
"""
template = 'NEWSLETTER_SUBSCRIBED'
description = 'Email sent to subscriber after newsletter subscription'
class NewsletterUnsubscribed(PlugsMail):
"""
Email sent to subscriber after newsletter unsubscription
"""
template = 'NEWSLETTER_UNSUBSCRIBED'
description = 'Email sent to subscriber after newsletter unsubscription'
| 26.85
| 76
| 0.748603
| 462
| 0.860335
| 0
| 0
| 0
| 0
| 0
| 0
| 335
| 0.623836
|
6273ea53c245381a5adf539a8b0c5e691d335b8c
| 4,526
|
py
|
Python
|
modules/smsapi/proxy.py
|
kamilpp/iwm-project
|
d3d816b5a30047e4ec7c7e17d1f71e189858190a
|
[
"MIT"
] | null | null | null |
modules/smsapi/proxy.py
|
kamilpp/iwm-project
|
d3d816b5a30047e4ec7c7e17d1f71e189858190a
|
[
"MIT"
] | null | null | null |
modules/smsapi/proxy.py
|
kamilpp/iwm-project
|
d3d816b5a30047e4ec7c7e17d1f71e189858190a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import mimetypes
from io import BytesIO
try:
from urllib2 import Request, urlopen, URLError
from urllib import urlencode
except ImportError:
from urllib.request import Request, urlopen
from urllib.parse import urlencode
from urllib.error import URLError
try:
from mimetools import choose_boundary
except ImportError:
from uuid import uuid4
def choose_boundary():
return str(uuid4())
if sys.version_info[0] == 3:
text_type = str
else:
text_type = unicode
class ApiProxyError(Exception):
pass
class ApiProxy(object):
def __init__(self, hostname=None, data=None):
super(ApiProxy, self).__init__()
self.hostname = hostname
self.data = data or {}
self.files = []
def set_hostname(self, hostname):
self.hostname = hostname
def execute(self):
raise NotImplementedError
class ApiHttpProxy(ApiProxy):
user_agent = 'PySmsAPI'
def __init__(self, hostname=None, data=None):
super(ApiHttpProxy, self).__init__(hostname, data)
self.headers = {}
self.body = {}
def execute(self, uri=None, data=None):
if isinstance(data, dict):
self.data.update(data)
headers, body = self.prepare_request()
response = None
if isinstance(self.hostname, (list, tuple)):
for host in self.hostname:
response = self.connect(host, uri, body, headers)
if response and response.getcode() == 200:
break
else:
response = self.connect(self.hostname, uri, body, headers)
if not response:
raise ApiProxyError("Unable connect to the specified url: %s" % str(self.hostname))
return response
def connect(self, hostname, uri, body, headers):
try:
uri = uri or ''
if hostname.endswith('/'):
url = hostname + uri
else:
url = '%s/%s' % (hostname, uri)
req = Request(url, body, headers)
response = urlopen(req)
return response
except (URLError, ValueError):
return False
def add_file(self, filepath):
if os.path.isfile(filepath):
self.files.append(filepath)
else:
raise ValueError('Argument must be a file.')
def prepare_request(self):
headers = {
'User-Agent': self.user_agent,
}
if isinstance(self.data, dict):
self.data.update(self.data)
if self.files:
content_type, data = self.encode_multipart_data()
headers.update({
'Content-Type': content_type,
'Content-Length': str(len(data))
})
else:
headers.update({
'Content-type': "application/x-www-form-urlencoded; charset=utf-8"
})
data = urlencode(self.data)
return headers, data
def encode_multipart_data(self):
def encode(data):
if isinstance(data, text_type):
data = data.encode('utf-8')
return data
boundary = choose_boundary()
body = BytesIO()
for (key, value) in self.data.items():
body.write(encode('--%s\r\n' % boundary))
body.write(encode('Content-Disposition: form-data; name="%s"' % key))
body.write(encode('\r\n\r\n' + value + '\r\n'))
for _file in self.files:
body.write(encode('--%s\r\n' % boundary))
body.write(encode('Content-Disposition: form-data; name="file"; filename="%s"\r\n' % _file))
body.write(encode('Content-Type: %s\r\n' % mimetypes.guess_type(_file)[0] or 'application/octet-stream'))
body.write(encode('\r\n'))
try:
with open(_file, 'rb') as f:
data = f.read()
body.write(encode(data))
except IOError:
raise
body.write(encode('\r\n'))
body.write(encode('--%s--\r\n\r\n' % boundary))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
| 26.623529
| 117
| 0.532037
| 3,955
| 0.87384
| 0
| 0
| 0
| 0
| 0
| 0
| 482
| 0.106496
|
62753c5006150ce17ceda04507da80a31675516b
| 775
|
py
|
Python
|
maverick_api/modules/base/mission/util/srtm/make_dict.py
|
deodates-dev/UAV-maverick-api
|
15cf9e0bac6faf4b9361f060395f656575304097
|
[
"MIT"
] | 4
|
2018-02-10T01:00:35.000Z
|
2019-07-03T04:21:28.000Z
|
maverick_api/modules/base/mission/util/srtm/make_dict.py
|
deodates-dev/UAV-maverick-api
|
15cf9e0bac6faf4b9361f060395f656575304097
|
[
"MIT"
] | 244
|
2018-02-01T22:39:51.000Z
|
2021-07-29T05:58:48.000Z
|
maverick_api/modules/base/mission/util/srtm/make_dict.py
|
deodates-dev/UAV-maverick-api
|
15cf9e0bac6faf4b9361f060395f656575304097
|
[
"MIT"
] | 6
|
2018-02-12T10:58:05.000Z
|
2020-09-09T13:41:04.000Z
|
#!/usr/bin/python
import fileinput
import json
url_base = "https://dds.cr.usgs.gov/srtm/version2_1/SRTM3"
regions = [
"Africa",
"Australia",
"Eurasia",
"Islands",
"North_America",
"South_America",
]
srtm_dict = {}
srtm_directory = "srtm.json"
for region in regions:
print("Processing", region)
f = fileinput.input(region)
for name in f:
name = name.strip()
url = url_base + "/" + region + "/" + name
key = name.replace(".hgt.zip", "")
srtm_dict[key] = url
try:
print("Writing", srtm_directory)
f = open(srtm_directory, "w")
json.dump(srtm_dict, f, indent=2, sort_keys=True)
f.close()
except IOError as e:
print("Save srtm_dict(): I/O error({0}): {1}".format(e.errno, e.strerror))
| 20.945946
| 78
| 0.606452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 223
| 0.287742
|
62758b3a2a4619c1b6d03498fcd2b870db5024e4
| 495
|
py
|
Python
|
gail/crowd_sim/configs/icra_benchmark/sarl.py
|
ben-milanko/PyTorch-RL
|
4d7be8a7f26f21b490c93191dca1844046a092df
|
[
"MIT"
] | null | null | null |
gail/crowd_sim/configs/icra_benchmark/sarl.py
|
ben-milanko/PyTorch-RL
|
4d7be8a7f26f21b490c93191dca1844046a092df
|
[
"MIT"
] | null | null | null |
gail/crowd_sim/configs/icra_benchmark/sarl.py
|
ben-milanko/PyTorch-RL
|
4d7be8a7f26f21b490c93191dca1844046a092df
|
[
"MIT"
] | null | null | null |
from configs.icra_benchmark.config import BaseEnvConfig, BasePolicyConfig, BaseTrainConfig, Config
class EnvConfig(BaseEnvConfig):
def __init__(self, debug=False):
super(EnvConfig, self).__init__(debug)
class PolicyConfig(BasePolicyConfig):
def __init__(self, debug=False):
super(PolicyConfig, self).__init__(debug)
self.name = 'sarl'
class TrainConfig(BaseTrainConfig):
def __init__(self, debug=False):
super(TrainConfig, self).__init__(debug)
| 27.5
| 98
| 0.733333
| 387
| 0.781818
| 0
| 0
| 0
| 0
| 0
| 0
| 6
| 0.012121
|
62781a4622485a3c3996f4345f375edf051908c6
| 83
|
py
|
Python
|
backend/bios/apps.py
|
juanrmv/torre-test
|
39c4f8928f6f51e462975ef5f89c7a9b5bb05733
|
[
"Apache-2.0"
] | null | null | null |
backend/bios/apps.py
|
juanrmv/torre-test
|
39c4f8928f6f51e462975ef5f89c7a9b5bb05733
|
[
"Apache-2.0"
] | null | null | null |
backend/bios/apps.py
|
juanrmv/torre-test
|
39c4f8928f6f51e462975ef5f89c7a9b5bb05733
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class BiosConfig(AppConfig):
name = 'bios'
| 13.833333
| 33
| 0.73494
| 46
| 0.554217
| 0
| 0
| 0
| 0
| 0
| 0
| 6
| 0.072289
|
62785d99c24915bf064dcffd95ccc1f5a52eab27
| 3,982
|
py
|
Python
|
tests/snuba/eventstream/test_eventstream.py
|
pierredup/sentry
|
0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80
|
[
"BSD-3-Clause"
] | null | null | null |
tests/snuba/eventstream/test_eventstream.py
|
pierredup/sentry
|
0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80
|
[
"BSD-3-Clause"
] | null | null | null |
tests/snuba/eventstream/test_eventstream.py
|
pierredup/sentry
|
0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from datetime import datetime, timedelta
import six
import time
import logging
from sentry.utils.compat.mock import patch, Mock
from sentry.event_manager import EventManager
from sentry.eventstream.kafka import KafkaEventStream
from sentry.eventstream.snuba import SnubaEventStream
from sentry.testutils import SnubaTestCase, TestCase
from sentry.utils import snuba, json
class SnubaEventStreamTest(TestCase, SnubaTestCase):
def setUp(self):
super(SnubaEventStreamTest, self).setUp()
self.kafka_eventstream = KafkaEventStream()
self.kafka_eventstream.producer = Mock()
def __build_event(self, timestamp):
raw_event = {
"event_id": "a" * 32,
"message": "foo",
"timestamp": time.mktime(timestamp.timetuple()),
"level": logging.ERROR,
"logger": "default",
"tags": [],
}
manager = EventManager(raw_event)
manager.normalize()
return manager.save(self.project.id)
def __produce_event(self, *insert_args, **insert_kwargs):
# pass arguments on to Kafka EventManager
self.kafka_eventstream.insert(*insert_args, **insert_kwargs)
produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args)
assert not produce_args
assert produce_kwargs["topic"] == "events"
assert produce_kwargs["key"] == six.text_type(self.project.id)
version, type_, payload1, payload2 = json.loads(produce_kwargs["value"])
assert version == 2
assert type_ == "insert"
# insert what would have been the Kafka payload directly
# into Snuba, expect an HTTP 200 and for the event to now exist
snuba_eventstream = SnubaEventStream()
snuba_eventstream._send(self.project.id, "insert", (payload1, payload2))
@patch("sentry.eventstream.insert")
def test(self, mock_eventstream_insert):
now = datetime.utcnow()
event = self.__build_event(now)
# verify eventstream was called by EventManager
insert_args, insert_kwargs = list(mock_eventstream_insert.call_args)
assert not insert_args
assert insert_kwargs == {
"event": event,
"group": event.group,
"is_new_group_environment": True,
"is_new": True,
"is_regression": False,
"primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
"skip_consume": False,
"received_timestamp": event.data["received"],
}
self.__produce_event(*insert_args, **insert_kwargs)
assert (
snuba.query(
start=now - timedelta(days=1),
end=now + timedelta(days=1),
groupby=["project_id"],
filter_keys={"project_id": [self.project.id]},
).get(self.project.id, 0)
== 1
)
@patch("sentry.eventstream.insert")
def test_issueless(self, mock_eventstream_insert):
now = datetime.utcnow()
event = self.__build_event(now)
event.group_id = None
insert_args = ()
insert_kwargs = {
"event": event,
"group": None,
"is_new_group_environment": True,
"is_new": True,
"is_regression": False,
"primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
"skip_consume": False,
"received_timestamp": event.data["received"],
}
self.__produce_event(*insert_args, **insert_kwargs)
result = snuba.raw_query(
start=now - timedelta(days=1),
end=now + timedelta(days=1),
selected_columns=["event_id", "group_id"],
groupby=None,
filter_keys={"project_id": [self.project.id], "event_id": [event.event_id]},
)
assert len(result["data"]) == 1
assert result["data"][0]["group_id"] is None
| 35.238938
| 94
| 0.619789
| 3,566
| 0.89553
| 0
| 0
| 2,082
| 0.522853
| 0
| 0
| 770
| 0.19337
|
627cfb04842724bdfb5432c95eabf0e23e11ea54
| 470
|
py
|
Python
|
modulo.py
|
Alex9808/py101
|
18c585c1433e8ec6f5e4962e556a781e0c3c3cd5
|
[
"MIT"
] | 25
|
2018-08-14T22:13:13.000Z
|
2021-07-23T04:14:06.000Z
|
modulo.py
|
Alex9808/py101
|
18c585c1433e8ec6f5e4962e556a781e0c3c3cd5
|
[
"MIT"
] | 1
|
2021-05-21T23:46:42.000Z
|
2021-05-21T23:46:42.000Z
|
modulo.py
|
Alex9808/py101
|
18c585c1433e8ec6f5e4962e556a781e0c3c3cd5
|
[
"MIT"
] | 34
|
2018-07-30T20:48:17.000Z
|
2022-02-04T19:01:27.000Z
|
#! /bin/bash/python3
'''Ejemplo de un script que puede ser importado como módulo.'''
titulo = "Espacio muestral"
datos = (76, 81, 75, 77, 80, 75, 76, 79, 75)
def promedio(encabezado, muestra):
'''Despliega el contenido de encabezado,así como el cálculo del promedio de muestra, ingresado en una lista o tupla.'''
print("El promedio de %s con %d elementos es %f." % (encabezado, len(muestra), sum(muestra) / len(muestra)))
promedio(titulo, datos)
| 39.166667
| 126
| 0.678723
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 267
| 0.564482
|
627d2e10dedbd895286404f157c63ff39dd0589c
| 368
|
py
|
Python
|
experiments/duet_dataloader/input_file_generator.py
|
18praveenb/ss-vq-vae
|
89e76d69d6127b27ae4cc066a1a1f9c4147fb020
|
[
"Apache-2.0"
] | null | null | null |
experiments/duet_dataloader/input_file_generator.py
|
18praveenb/ss-vq-vae
|
89e76d69d6127b27ae4cc066a1a1f9c4147fb020
|
[
"Apache-2.0"
] | null | null | null |
experiments/duet_dataloader/input_file_generator.py
|
18praveenb/ss-vq-vae
|
89e76d69d6127b27ae4cc066a1a1f9c4147fb020
|
[
"Apache-2.0"
] | null | null | null |
genres = ['blues', 'classical', 'country', 'disco', 'hiphop', 'jazz', 'metal', 'pop', 'reggae', 'rock']
num_files = 100
with open(f'INPUT_FULL', 'w') as f:
for genre in genres:
for i in range(num_files):
for j in range(6):
f.write(f'/datasets/duet/genres/{genre}.{i:05d}.{j}.wav /datasets/duet/genres/{genre}.{i:05d}.{j}.wav\n')
| 52.571429
| 121
| 0.578804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 186
| 0.505435
|
627df994c37f89a314b88935ba858af233d102af
| 549
|
py
|
Python
|
generate_nginx_config.py
|
AppScale/appscake
|
615597765e835015c1e8d8bc70921a655f8aa86a
|
[
"BSD-3-Clause"
] | null | null | null |
generate_nginx_config.py
|
AppScale/appscake
|
615597765e835015c1e8d8bc70921a655f8aa86a
|
[
"BSD-3-Clause"
] | 1
|
2021-06-08T09:51:49.000Z
|
2021-06-08T09:51:49.000Z
|
generate_nginx_config.py
|
isabella232/appscake
|
615597765e835015c1e8d8bc70921a655f8aa86a
|
[
"BSD-3-Clause"
] | 1
|
2021-06-08T09:48:33.000Z
|
2021-06-08T09:48:33.000Z
|
import jinja2
import os
import socket
my_public_ip = os.popen("curl -L http://metadata/computeMetadata/v1beta1/instance/network-interfaces/0/access-configs/0/external-ip").read()
my_private_ip = socket.gethostbyname(socket.gethostname())
template_contents = open('/root/appscake/nginx_config').read()
template = jinja2.Template(template_contents)
rendered_template = template.render(my_private_ip=my_private_ip, my_public_ip=my_public_ip)
with open('/etc/nginx/sites-available/default', 'w') as file_handle:
file_handle.write(rendered_template)
| 42.230769
| 140
| 0.812386
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 176
| 0.320583
|
627e2692036a975d4c6bc119811af70c6ad6b162
| 909
|
py
|
Python
|
VisualizedSorting/Controller.py
|
lachieggg/Misc
|
066149309e3e4634cded168687c7dfc3a3a4d6f3
|
[
"MIT"
] | null | null | null |
VisualizedSorting/Controller.py
|
lachieggg/Misc
|
066149309e3e4634cded168687c7dfc3a3a4d6f3
|
[
"MIT"
] | null | null | null |
VisualizedSorting/Controller.py
|
lachieggg/Misc
|
066149309e3e4634cded168687c7dfc3a3a4d6f3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
from Window import Window
from Constants import *
from Algorithms.MergeSort import MergeSort
from Algorithms.QuickSort import QuickSort
from Algorithms.BubbleSort import BubbleSort
from Algorithms.InsertionSort import InsertionSort
from Algorithms.SelectionSort import SelectionSort
class Controller:
def __init__(self):
# Window
self.initAlgorithm()
self.arr = self.algorithm.initArray()
self.window = Window(self.algorithm)
def main(self):
self.window.main()
def initAlgorithm(self):
try:
self.algorithm = eval(ALGORITHM)
except NameError:
print(ALGORITHM_NOT_FOUND)
exit()
if(__name__ == "__main__"):
print("Click to begin the algorithm.")
try:
c = Controller()
c.main()
except KeyboardInterrupt:
print('\nExiting.')
| 22.725
| 50
| 0.664466
| 403
| 0.443344
| 0
| 0
| 0
| 0
| 0
| 0
| 83
| 0.091309
|
627e4b8f24eb8ffa6dd2d71640b1a2b1b78cf92a
| 3,688
|
py
|
Python
|
openslides_backend/presenter/get_forwarding_meetings.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | 5
|
2020-01-20T13:57:15.000Z
|
2021-03-27T14:14:44.000Z
|
openslides_backend/presenter/get_forwarding_meetings.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | 859
|
2020-01-11T22:58:37.000Z
|
2022-03-30T14:54:06.000Z
|
openslides_backend/presenter/get_forwarding_meetings.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | 16
|
2020-01-04T20:28:57.000Z
|
2022-02-10T12:06:54.000Z
|
from typing import Any
import fastjsonschema
from ..permissions.permission_helper import has_perm
from ..permissions.permissions import Permissions
from ..shared.exceptions import PermissionDenied, PresenterException
from ..shared.patterns import Collection, FullQualifiedId
from ..shared.schema import required_id_schema, schema_version
from .base import BasePresenter
from .presenter import register_presenter
get_forwarding_meetings_schema = fastjsonschema.compile(
{
"$schema": schema_version,
"type": "object",
"title": "get_forwarding_meetings",
"description": "get forwarding meetings",
"properties": {
"meeting_id": required_id_schema,
},
}
)
@register_presenter("get_forwarding_meetings")
class GetForwardingMeetings(BasePresenter):
"""
Get forwarded meetings.
"""
schema = get_forwarding_meetings_schema
def get_result(self) -> Any:
# check permission
if not has_perm(
self.datastore,
self.user_id,
Permissions.Motion.CAN_MANAGE,
self.data["meeting_id"],
):
msg = "You are not allowed to perform presenter get_forwarding_meetings"
msg += f" Missing permission: {Permissions.Motion.CAN_MANAGE}"
raise PermissionDenied(msg)
meeting = self.datastore.get(
FullQualifiedId(Collection("meeting"), self.data["meeting_id"]),
["committee_id", "is_active_in_organization_id", "name"],
)
if not meeting.get("committee_id"):
raise PresenterException(
f"There is no committee given for meeting/{self.data['meeting_id']} {meeting.get('name', 'nameless')}."
)
if not meeting.get("is_active_in_organization_id"):
raise PresenterException(
"Your sender meeting is an archived meeting, which can not forward motions."
)
committee = self.datastore.get(
FullQualifiedId(Collection("committee"), meeting["committee_id"]),
["forward_to_committee_ids"],
)
result = []
for forward_to_committee_id in committee.get("forward_to_committee_ids", []):
forward_to_committee = self.datastore.get(
FullQualifiedId(Collection("committee"), forward_to_committee_id),
["meeting_ids", "name", "default_meeting_id"],
)
meeting_result = []
for meeting_id2 in forward_to_committee.get("meeting_ids", []):
if not has_perm(
self.datastore,
self.user_id,
Permissions.Motion.CAN_FORWARD_INTO_THIS_MEETING,
meeting_id2,
):
continue
meeting2 = self.datastore.get(
FullQualifiedId(Collection("meeting"), meeting_id2),
["name", "is_active_in_organization_id"],
)
if meeting2.get("is_active_in_organization_id"):
meeting_result.append(
{"id": meeting_id2, "name": meeting2.get("name", "")}
)
if meeting_result:
result.append(
{
"id": forward_to_committee_id,
"name": forward_to_committee.get("name", ""),
"meetings": meeting_result,
"default_meeting_id": forward_to_committee.get(
"default_meeting_id"
),
}
)
return result
| 36.88
| 119
| 0.574837
| 2,916
| 0.790672
| 0
| 0
| 2,963
| 0.803416
| 0
| 0
| 927
| 0.251356
|
627e648e181ccec154beb32ed33085244d73a0fd
| 638
|
py
|
Python
|
settings.py
|
gyyang/olfaction_evolution
|
434baa85b91f450e1ab63c6b9eafb8d370f1df96
|
[
"MIT"
] | 9
|
2021-10-11T01:16:23.000Z
|
2022-01-13T14:07:08.000Z
|
settings.py
|
gyyang/olfaction_evolution
|
434baa85b91f450e1ab63c6b9eafb8d370f1df96
|
[
"MIT"
] | 1
|
2021-10-30T09:49:08.000Z
|
2021-10-30T09:49:08.000Z
|
settings.py
|
gyyang/olfaction_evolution
|
434baa85b91f450e1ab63c6b9eafb8d370f1df96
|
[
"MIT"
] | null | null | null |
"""User specific settings."""
import matplotlib as mpl
import matplotlib.pyplot as plt
mpl.rcParams['font.size'] = 7
mpl.rcParams['pdf.fonttype'] = 42
mpl.rcParams['ps.fonttype'] = 42
mpl.rcParams['font.family'] = 'arial'
mpl.rcParams['mathtext.fontset'] = 'stix'
seqcmap = mpl.cm.cool_r
try:
import seaborn as sns
plt.rcParams['axes.prop_cycle'] = plt.cycler(color=sns.color_palette('deep'))
# seqcmap = sns.color_palette("crest_r", as_cmap=True)
except ImportError as e:
print('Seaborn not available, default to matplotlib color scheme')
use_torch = True
cluster_path = '/share/ctn/users/gy2259/olfaction_evolution'
| 29
| 81
| 0.731975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 292
| 0.45768
|
627f3994b2ade29fca362ec30f86cac34a2baa81
| 152
|
py
|
Python
|
resolver/mindeps/__init__.py
|
Shivansh-007/python-resolver
|
c44e93e0715d6d7a736db17122e6a606267329b2
|
[
"MIT"
] | null | null | null |
resolver/mindeps/__init__.py
|
Shivansh-007/python-resolver
|
c44e93e0715d6d7a736db17122e6a606267329b2
|
[
"MIT"
] | null | null | null |
resolver/mindeps/__init__.py
|
Shivansh-007/python-resolver
|
c44e93e0715d6d7a736db17122e6a606267329b2
|
[
"MIT"
] | null | null | null |
# SPDX-License-Identifier: MIT
from resolver.mindeps.__main__ import entrypoint, get_min_deps # noqa: F401
__all__ = ('entrypoint', 'get_min_deps')
| 21.714286
| 76
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 68
| 0.447368
|
6280695be38110adda77e21a75a8350fbff3df45
| 9,433
|
py
|
Python
|
fabfile/data.py
|
nprapps/linklater
|
9ba8fbefcbe9148253e5d5c47572e8b887ce9485
|
[
"FSFAP"
] | null | null | null |
fabfile/data.py
|
nprapps/linklater
|
9ba8fbefcbe9148253e5d5c47572e8b887ce9485
|
[
"FSFAP"
] | 47
|
2015-01-22T16:12:16.000Z
|
2015-01-28T18:51:58.000Z
|
fabfile/data.py
|
nprapps/linklater
|
9ba8fbefcbe9148253e5d5c47572e8b887ce9485
|
[
"FSFAP"
] | 1
|
2021-02-18T11:26:35.000Z
|
2021-02-18T11:26:35.000Z
|
#!/usr/bin/env python
"""
Commands that update or process the application data.
"""
from datetime import datetime
import json
from bs4 import BeautifulSoup
from flask import render_template
from fabric.api import task
from fabric.state import env
from facebook import GraphAPI
from twitter import Twitter, OAuth
from jinja2 import Environment, FileSystemLoader
import app_config
import copytext
import os
import requests
TWITTER_BATCH_SIZE = 200
@task(default=True)
def update():
"""
Stub function for updating app-specific data.
"""
#update_featured_social()
@task
def make_tumblr_draft_html():
links = fetch_tweets(env.twitter_handle, env.twitter_timeframe)
template = env.jinja_env.get_template('tumblr.html')
output = template.render(links=links)
return output
@task
def fetch_tweets(username, days):
"""
Get tweets of a specific user
"""
current_time = datetime.now()
secrets = app_config.get_secrets()
twitter_api = Twitter(
auth=OAuth(
secrets['TWITTER_API_OAUTH_TOKEN'],
secrets['TWITTER_API_OAUTH_SECRET'],
secrets['TWITTER_API_CONSUMER_KEY'],
secrets['TWITTER_API_CONSUMER_SECRET']
)
)
out = []
tweets = twitter_api.statuses.user_timeline(screen_name=username, count=TWITTER_BATCH_SIZE)
i = 0
while True:
if i > (len(tweets)-1):
break
tweet = tweets[i]
created_time = datetime.strptime(tweet['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
time_difference = (current_time - created_time).days
if time_difference > int(days):
break
out.extend(_process_tweet(tweet, username))
i += 1
if i > (TWITTER_BATCH_SIZE-1):
tweets = twitter_api.statuses.user_timeline(screen_name=username, count=TWITTER_BATCH_SIZE, max_id=tweet['id'])
i = 0
out = _dedupe_links(out)
return out
def _process_tweet(tweet, username):
out = []
for url in tweet['entities']['urls']:
if url['display_url'].startswith('pic.twitter.com'):
continue
row = _grab_url(url['expanded_url'])
if row:
row['tweet_text'] = tweet['text']
if tweet.get('retweeted_status'):
row['tweet_url'] = 'http://twitter.com/%s/status/%s' % (tweet['retweeted_status']['user']['screen_name'], tweet['id'])
row['tweeted_by'] = tweet['retweeted_status']['user']['screen_name']
out.append(row)
else:
row['tweet_url'] = 'http://twitter.com/%s/status/%s' % (username, tweet['id'])
out.append(row)
return out
def _grab_url(url):
"""
Returns data of the form:
{
'title': <TITLE>,
'description': <DESCRIPTION>,
'type': <page/image/download>,
'image': <IMAGE_URL>,
'tweet_url': <TWEET_URL>.
'tweet_text': <TWEET_TEXT>,
'tweeted_by': <USERNAME>
}
"""
data = None
try:
resp = requests.get(url, timeout=5)
except requests.exceptions.Timeout:
print '%s timed out.' % url
return None
real_url = resp.url
if resp.status_code == 200 and resp.headers.get('content-type').startswith('text/html'):
data = {}
data['url'] = real_url
soup = BeautifulSoup(resp.content)
og_tags = ('image', 'title', 'description')
for og_tag in og_tags:
match = soup.find(attrs={'property': 'og:%s' % og_tag})
if match and match.attrs.get('content'):
data[og_tag] = match.attrs.get('content')
else:
print "There was an error accessing %s (%s)" % (real_url, resp.status_code)
return data
def _dedupe_links(links):
"""
Get rid of duplicate URLs
"""
out = []
urls_seen = []
for link in links:
if link['url'] not in urls_seen:
urls_seen.append(link['url'])
out.append(link)
else:
print "%s is a duplicate, skipping" % link['url']
return out
@task
def update_featured_social():
"""
Update featured tweets
"""
COPY = copytext.Copy(app_config.COPY_PATH)
secrets = app_config.get_secrets()
# Twitter
print 'Fetching tweets...'
twitter_api = Twitter(
auth=OAuth(
secrets['TWITTER_API_OAUTH_TOKEN'],
secrets['TWITTER_API_OAUTH_SECRET'],
secrets['TWITTER_API_CONSUMER_KEY'],
secrets['TWITTER_API_CONSUMER_SECRET']
)
)
tweets = []
for i in range(1, 4):
tweet_url = COPY['share']['featured_tweet%i' % i]
if isinstance(tweet_url, copytext.Error) or unicode(tweet_url).strip() == '':
continue
tweet_id = unicode(tweet_url).split('/')[-1]
tweet = twitter_api.statuses.show(id=tweet_id)
creation_date = datetime.strptime(tweet['created_at'],'%a %b %d %H:%M:%S +0000 %Y')
creation_date = '%s %i' % (creation_date.strftime('%b'), creation_date.day)
tweet_url = 'http://twitter.com/%s/status/%s' % (tweet['user']['screen_name'], tweet['id'])
photo = None
html = tweet['text']
subs = {}
for media in tweet['entities'].get('media', []):
original = tweet['text'][media['indices'][0]:media['indices'][1]]
replacement = '<a href="%s" target="_blank" onclick="_gaq.push([\'_trackEvent\', \'%s\', \'featured-tweet-action\', \'link\', 0, \'%s\']);">%s</a>' % (media['url'], app_config.PROJECT_SLUG, tweet_url, media['display_url'])
subs[original] = replacement
if media['type'] == 'photo' and not photo:
photo = {
'url': media['media_url']
}
for url in tweet['entities'].get('urls', []):
original = tweet['text'][url['indices'][0]:url['indices'][1]]
replacement = '<a href="%s" target="_blank" onclick="_gaq.push([\'_trackEvent\', \'%s\', \'featured-tweet-action\', \'link\', 0, \'%s\']);">%s</a>' % (url['url'], app_config.PROJECT_SLUG, tweet_url, url['display_url'])
subs[original] = replacement
for hashtag in tweet['entities'].get('hashtags', []):
original = tweet['text'][hashtag['indices'][0]:hashtag['indices'][1]]
replacement = '<a href="https://twitter.com/hashtag/%s" target="_blank" onclick="_gaq.push([\'_trackEvent\', \'%s\', \'featured-tweet-action\', \'hashtag\', 0, \'%s\']);">%s</a>' % (hashtag['text'], app_config.PROJECT_SLUG, tweet_url, '#%s' % hashtag['text'])
subs[original] = replacement
for original, replacement in subs.items():
html = html.replace(original, replacement)
# https://dev.twitter.com/docs/api/1.1/get/statuses/show/%3Aid
tweets.append({
'id': tweet['id'],
'url': tweet_url,
'html': html,
'favorite_count': tweet['favorite_count'],
'retweet_count': tweet['retweet_count'],
'user': {
'id': tweet['user']['id'],
'name': tweet['user']['name'],
'screen_name': tweet['user']['screen_name'],
'profile_image_url': tweet['user']['profile_image_url'],
'url': tweet['user']['url'],
},
'creation_date': creation_date,
'photo': photo
})
# Facebook
print 'Fetching Facebook posts...'
fb_api = GraphAPI(secrets['FACEBOOK_API_APP_TOKEN'])
facebook_posts = []
for i in range(1, 4):
fb_url = COPY['share']['featured_facebook%i' % i]
if isinstance(fb_url, copytext.Error) or unicode(fb_url).strip() == '':
continue
fb_id = unicode(fb_url).split('/')[-1]
post = fb_api.get_object(fb_id)
user = fb_api.get_object(post['from']['id'])
user_picture = fb_api.get_object('%s/picture' % post['from']['id'])
likes = fb_api.get_object('%s/likes' % fb_id, summary='true')
comments = fb_api.get_object('%s/comments' % fb_id, summary='true')
#shares = fb_api.get_object('%s/sharedposts' % fb_id)
creation_date = datetime.strptime(post['created_time'],'%Y-%m-%dT%H:%M:%S+0000')
creation_date = '%s %i' % (creation_date.strftime('%b'), creation_date.day)
# https://developers.facebook.com/docs/graph-api/reference/v2.0/post
facebook_posts.append({
'id': post['id'],
'message': post['message'],
'link': {
'url': post['link'],
'name': post['name'],
'caption': (post['caption'] if 'caption' in post else None),
'description': post['description'],
'picture': post['picture']
},
'from': {
'name': user['name'],
'link': user['link'],
'picture': user_picture['url']
},
'likes': likes['summary']['total_count'],
'comments': comments['summary']['total_count'],
#'shares': shares['summary']['total_count'],
'creation_date': creation_date
})
# Render to JSON
output = {
'tweets': tweets,
'facebook_posts': facebook_posts
}
with open('data/featured.json', 'w') as f:
json.dump(output, f)
| 31.029605
| 271
| 0.56578
| 0
| 0
| 0
| 0
| 6,823
| 0.723312
| 0
| 0
| 3,106
| 0.32927
|
62809e7157f3314444431a1ce7fbb43f9a5fe55b
| 4,277
|
py
|
Python
|
glow/nn/_trainer.py
|
arquolo/ort
|
6f2d32d739ed6a0dab81bb91018c005e47ef7e77
|
[
"MIT"
] | null | null | null |
glow/nn/_trainer.py
|
arquolo/ort
|
6f2d32d739ed6a0dab81bb91018c005e47ef7e77
|
[
"MIT"
] | null | null | null |
glow/nn/_trainer.py
|
arquolo/ort
|
6f2d32d739ed6a0dab81bb91018c005e47ef7e77
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
__all__ = ['Trainer']
from collections.abc import Callable, Collection, Iterator
from dataclasses import dataclass
import torch
import torch.nn as nn
import torch.optim
from tqdm.auto import tqdm
from .. import ichunked
from .. import metrics as m
from ._loader import _Loader
from .amp import Grads, get_grads
from .util import eval_
class Stage:
def __call__(self, loader: _Loader) -> Iterator[tuple[torch.Tensor, ...]]:
raise NotImplementedError
@dataclass(frozen=True)
class EvalStage(Stage):
net: nn.Module
device: torch.device
fp16: bool
def _infer(self, data: torch.Tensor,
target: torch.Tensor) -> tuple[torch.Tensor, ...]:
with torch.autocast(self.device.type, enabled=self.fp16):
out = self.net(data.to(self.device, non_blocking=True))
return out, target
def __call__(self, loader: _Loader) -> Iterator[tuple[torch.Tensor, ...]]:
with eval_(self.net), torch.inference_mode():
for data, target in loader:
yield self._infer(
data.to(self.device, non_blocking=True),
target.to(self.device, non_blocking=True),
)
@dataclass(frozen=True)
class TrainStage(Stage):
net: nn.Module
device: torch.device
fp16: bool
criterion: Callable[..., torch.Tensor]
grads: Grads
grad_steps: int
def _step(self, data: torch.Tensor,
target: torch.Tensor) -> tuple[torch.Tensor, ...]:
with torch.autocast(self.device.type, enabled=self.fp16):
out = self.net(data.to(self.device, non_blocking=True))
loss = self.criterion(out, target)
self.grads.backward(loss)
return out.detach(), target
def __call__(self, loader: _Loader) -> Iterator[tuple[torch.Tensor, ...]]:
for batches in ichunked(loader, self.grad_steps):
with self.grads:
for data, target in batches:
yield self._step(
data.to(self.device, non_blocking=True),
target.to(self.device, non_blocking=True),
)
# Clip norm here if needed
class Trainer:
def __init__(self,
net: nn.Module,
opt: torch.optim.Optimizer,
criterion: Callable[..., torch.Tensor],
metrics: Collection[m.Metric],
device: torch.device,
sched: torch.optim.lr_scheduler._LRScheduler | None = None,
fp16: bool = False,
grad_steps: int = 1) -> None:
self.metrics = metrics
grads = get_grads(opt, sched, fp16=fp16, max_retries=0)
self.stages = (
TrainStage(net, device, fp16, criterion, grads, grad_steps),
EvalStage(net, device, fp16),
)
def _run(self, stage: Stage, loader: _Loader, pbar: tqdm) -> m.Scores:
meter = m.compose(*self.metrics)
scores = m.Scores()
for out in stage(loader):
scores = meter.send(out)
pbar.set_postfix(scores.scalars)
pbar.update()
return scores
def train(self, loader: _Loader, pbar: tqdm) -> m.Scores:
return self._run(self.stages[0], loader, pbar)
def eval(self, loader: _Loader, pbar: tqdm) -> m.Scores:
return self._run(self.stages[1], loader, pbar)
def run(self,
train_loader: _Loader,
eval_loader: _Loader,
epochs: int = 1):
for i in tqdm(range(1, 1 + epochs), smoothing=0):
with tqdm(train_loader, desc='train', leave=False) as bar:
tscalars = self.train(bar, bar).scalars
with tqdm(eval_loader, desc='val', leave=False) as bar:
vscalars = self.eval(bar, bar).scalars
assert tscalars.keys() == vscalars.keys()
tags = sorted(tscalars.keys() | vscalars.keys())
# TODO: those lines should be moved outsize into loggers
line = ','.join(
f'{tag}: ' + '/'.join(f'{s[tag]:.3f}'
for s in (tscalars, vscalars))
for tag in tags)
print(f'[{i:03d}] {line}')
| 32.9
| 78
| 0.575871
| 3,843
| 0.898527
| 786
| 0.183774
| 1,719
| 0.401917
| 0
| 0
| 153
| 0.035773
|
6280a1beb33550565dc8c6430459ab48faab2173
| 14,450
|
py
|
Python
|
trapeza-import.py
|
davidmreed/trapeza-import
|
e745ff43f6901df47aba1ae820ca522492fa988b
|
[
"MIT"
] | null | null | null |
trapeza-import.py
|
davidmreed/trapeza-import
|
e745ff43f6901df47aba1ae820ca522492fa988b
|
[
"MIT"
] | 2
|
2015-04-10T17:12:39.000Z
|
2015-04-10T17:13:53.000Z
|
trapeza-import.py
|
davidmreed/trapeza-import
|
e745ff43f6901df47aba1ae820ca522492fa988b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# trapeza-import.py
#
# Copyright 2013-2014 David Reed <david@ktema.org>
# This file is available under the terms of the MIT License.
#
from itertools import groupby
from operator import itemgetter, attrgetter
import hashlib
import io
import os
import sys
import pickle
import tempfile
import trapeza
import trapeza.formats
import trapeza.match
from flask import Flask, request, session, make_response, render_template, flash, redirect, url_for
# Accept three Source objects. Run match_incoming_records() on them.
# Render a template showing the user matches and allowing them to select a
# matching record.
# On selection, run diff on the selected records (comparing the columns that are
# mapped by the profile) and offer the user the chance to select one of the two versions
# of each field or to enter their own data.
# On output, generate a copy of the input sheet with a unique ID column added specifying
# the matched record in the database. For each diffed column, if the user did not choose
# to retain existing data, include the user-input value or the value from incoming.
# Maintain all other columns in the original input sheet unaltered for import.
app = Flask(__name__)
app.secret_key = "THIS_IS_A_TESTING_SECRET_KEY"
encodings = {
"utf8": "UTF-8",
"utf-16": "UTF-16",
"latin_1": "Latin-1",
"mac_roman": "Mac OS Roman",
"cp1252": "Windows Codepage 1252"
}
# Filters and functions for use within the match template
def flatten_record(record, headers):
return u"; ".join([u"{0}: {1}".format(key, record.values[key]) for key in headers])
def get_identifier(element="", incoming_line=None, record_id="", key=""):
if not isinstance(incoming_line, str):
line = str(incoming_line)
else:
line = incoming_line or ""
return u"{0}{1}".format(element, hashlib.sha512(".".join([line, record_id or "", key or ""])).hexdigest())
def generate_header_mapping(profile):
header_map = {}
for mapping in profile.mappings:
if mapping.key not in header_map:
header_map[mapping.key] = mapping.master_key
return header_map
def group_results(results, nresults):
# Group result lines into tuples (line no, results list) by the original input line.
# Built a list whose elements are tuples (line no, result list), where the result
# list is sorted by match score and clamped to the best nresults
return sorted([(k, sorted(list(g), key=attrgetter("score"), reverse=True)[:nresults])
for k, g in groupby(results, lambda res: res.incoming.input_line())],
key=itemgetter(0))
@app.route("/")
def start():
return render_template("index.html",
encodings=sorted(encodings.items(), key=itemgetter(1)),
input_formats=trapeza.formats.available_input_formats(),
output_formats=trapeza.formats.available_output_formats())
@app.route("/run", methods=["POST"])
def step_one():
try:
# Load master sheet or processed master file and profile
master_file = request.files["master"]
if "input_format" in request.form \
and request.form.get("input_format") in trapeza.formats.available_input_formats():
master_format = request.form.get("input_format")
else:
master_format = trapeza.get_format(master_file.filename, "csv")
if "input_encoding" in request.form and request.form.get("input_encoding") in encodings:
input_encoding = request.form.get("input_encoding")
else:
input_encoding = "utf8"
if master_format == "trapeza":
# Master file has been preprocessed
processed_master = pickle.load(master_file)
master = processed_master.source
profile = processed_master.profile
else:
processed_master = None
master = trapeza.load_source(request.files["master"], master_format, encoding=input_encoding)
profile = trapeza.match.Profile(
source=trapeza.load_source(request.files["profile"],
trapeza.get_format(request.files["profile"].filename, "csv"),
encoding=input_encoding))
# Load incoming file
if "input_format" in request.form \
and request.form.get("input_format") in trapeza.formats.available_input_formats():
incoming_format = request.form.get("input_format")
else:
incoming_format = trapeza.get_format(request.files["incoming"].filename, "csv")
incoming = trapeza.load_source(request.files["incoming"], incoming_format, encoding=input_encoding)
except IOError:
flash("An error occurred during loading of input files. Please provide files in a format that Trapeza "
"understands and ensure you choose the correct text encoding.")
return redirect(url_for("start"))
# Determine primary key and set in Master. We do not set the primary key on incoming or the output.
# If the user feels like matching multiple input lines to the same master records, that's fine;
# we don't want to throw an exception.
try:
primary_key = request.form["primary_key"]
master.set_primary_key(primary_key)
except KeyError:
flash("One or more master records are missing the specified primary key.")
return redirect(url_for("start"))
# Bring in the parameters.
try:
cutoff = abs(int(request.form["cutoff"])) or 0
autocutoff = abs(int(request.form["autocutoff"])) or 0
nresults = abs(int(request.form["nresults"])) or 5
display_diff = "display_diff" in request.form
include_unmatched_records = "include_unmatched_records" in request.form
output_only_modified_entries = "output_only_modified_entries" in request.form
include_re_new_address_flag = "include_re_new_address_flag" in request.form
if request.form.get("line_endings") in ["cr", "crlf", "lf"]:
line_endings = request.form.get("line_endings")
else:
line_endings = "lf"
except [KeyError, TypeError]:
flash("An invalid option was specified.")
return redirect(url_for("start"))
# Perform the comparison.
try:
raw_results = group_results(profile.compare_sources(processed_master or master, incoming, cutoff), nresults)
# Pull out automatchable results
automatches = []
results = []
if autocutoff != 0:
for result in raw_results:
# Each result is a (line number, results list) tuple.
if result[1][0].score >= autocutoff and \
(len(result[1]) == 1 or (len(result[1]) > 1 and result[1][1].score < autocutoff)):
# We have exactly one match with a score at or above the cutoff.
# Automatically match this record.
automatches.append((result[0], result[1][0:1]))
else:
results.append(result)
else:
results = raw_results
except Exception as e:
flash("An error occurred during matching ({})".format(e))
return redirect(url_for("start"))
try:
# Save the details of the operation
if request.form.get("output_format") and \
request.form.get("output_format") in trapeza.formats.available_output_formats():
output_format = request.form.get("output_format")
else:
output_format = "csv"
if request.form.get("output_encoding") and request.form.get("output_encoding") in encodings:
output_encoding = request.form.get("output_encoding")
else:
output_encoding = "utf8"
operation = {"master": master,
"incoming": incoming,
"profile": profile,
"primary_key": primary_key,
"results": results,
"automatches": automatches,
"cutoff": cutoff,
"output_format": output_format,
"output_encoding": output_encoding,
"line_endings": line_endings,
"display_diff": display_diff,
"include_unmatched_records": include_unmatched_records,
"output_only_modified_entries": output_only_modified_entries,
"include_re_new_address_flag": include_re_new_address_flag}
outfile = tempfile.NamedTemporaryFile(delete=False)
session["file"] = outfile.name
pickle.dump(operation, outfile, pickle.HIGHEST_PROTOCOL)
outfile.close()
except Exception as e:
flash("An error occurred while saving output ({})".format(e))
return redirect(url_for("start"))
return render_template("match.html",
results=results,
primary_key=primary_key,
header_map=generate_header_mapping(profile),
incoming_headers=incoming.headers(),
master_headers=master.headers(),
flatten_record=flatten_record,
get_identifier=get_identifier,
display_diff=display_diff,
include_re_new_address_flag=include_re_new_address_flag)
@app.route("/dl", methods=["POST"])
def step_two():
try:
with open(session["file"], "rb") as sess_file:
operation = pickle.load(sess_file)
os.unlink(session["file"])
except IOError:
flash("An error occurred while loading processed results.")
return redirect(url_for("start"))
try:
primary_key = operation["primary_key"]
header_map = generate_header_mapping(operation["profile"])
output = trapeza.Source(operation["incoming"].headers())
output.add_column(primary_key)
if operation["include_re_new_address_flag"]:
if "New Address?" not in output.headers():
output.add_column("New Address?")
matched_records = []
for (original_line, matches) in operation["results"]:
matched_records.append(original_line)
master_id = request.form.get(get_identifier('select', original_line))
if master_id or operation["include_unmatched_records"]:
out_record = trapeza.Record(matches[0].incoming.values)
out_record.values[primary_key] = master_id or u""
modified = False
for key in out_record.values:
if key != primary_key:
value = request.form.get(get_identifier("select", original_line, master_id, key))
if value == "MASTER":
if not operation["output_only_modified_entries"]:
out_record.values[key] = operation["master"].get_record_with_id(master_id).values[
header_map[key]]
else:
out_record.values[key] = ""
elif value == "USER":
user_val = request.form.get(get_identifier("userentrybox", original_line, master_id, key))
if value is not None and len(value) > 0:
out_record.values[key] = user_val
modified = True
elif value == "INCOMING" or value == "" or value is None:
# If the user made no selection, assume that we're to retain the incoming data.
modified = True
pass
else:
raise Exception("Invalid form data.")
if operation["include_re_new_address_flag"]:
new_address = get_identifier('newaddressbox', original_line) in request.form
out_record.values["New Address?"] = "TRUE" if modified and new_address else "FALSE"
output.add_record(out_record)
# Process records which were automatically matched, if any.
for (original_line, matches) in operation["automatches"]:
out_record = trapeza.Record(matches[0].incoming.values)
out_record.values[primary_key] = matches[0].master.record_id()
output.add_record(out_record)
# If we are outputting unmatched records, we also must collect and output any record which didn't have a match
# over the cutoff (which won't appear in the matched list)
# Add line identifiers for automatched records to the matched set so we don't get duplicate output.
matched_records.extend(map(itemgetter(0), operation["automatches"]))
for record in operation["incoming"].records():
if record.input_line() not in matched_records:
out_record = trapeza.Record(record.values)
out_record.values[primary_key] = u""
if operation["include_re_new_address_flag"]:
out_record.values["New Address?"] = "FALSE"
output.add_record(record)
except Exception as e:
flash("An error occurred while processing matches ({})".format(e))
return redirect(url_for("start"))
try:
out_buffer = io.BytesIO()
endings = {"crlf": "\r\n", "lf": "\n", "cr": "\r"}
trapeza.write_source(output, out_buffer, operation["output_format"], encoding=operation["output_encoding"],
line_endings=endings.get(operation["line_endings"]))
data = out_buffer.getvalue()
out_buffer.close()
response = make_response(data)
response.headers["Content-Disposition"] = "attachment; filename=output.{}".format(operation["output_format"])
response.headers["Content-Type"] = "application/octet-stream"
return response
except Exception as e:
flash("An error occurred while writing output ({})".format(e))
return redirect(url_for("start"))
if __name__ == "__main__":
app.debug = True
app.run()
| 41.884058
| 118
| 0.612872
| 0
| 0
| 0
| 0
| 11,708
| 0.810242
| 0
| 0
| 4,318
| 0.298824
|
628148650e81ef63a14a0e7b6f3e244ba44786ee
| 1,200
|
py
|
Python
|
tests/test_yaml.py
|
janhybs/ci-hpi
|
293740c7af62ecada5744ff663266de2e3d37445
|
[
"MIT"
] | 1
|
2020-01-09T13:00:18.000Z
|
2020-01-09T13:00:18.000Z
|
tests/test_yaml.py
|
janhybs/ci-hpi
|
293740c7af62ecada5744ff663266de2e3d37445
|
[
"MIT"
] | null | null | null |
tests/test_yaml.py
|
janhybs/ci-hpi
|
293740c7af62ecada5744ff663266de2e3d37445
|
[
"MIT"
] | 2
|
2018-08-12T01:13:28.000Z
|
2018-08-13T14:37:28.000Z
|
#!/bin/python3
# author: Jan Hybs
import tests
tests.fix_paths()
import yaml
from unittest import TestCase
from cihpc.cfg.config import global_configuration
from cihpc.common.utils import extend_yaml
repeat_yaml = '''
foo: !repeat a 5
'''
range_yaml = '''
foo: !range 1 5
bar: !range 1 2 6
'''
sh_yaml = '''
foo: !readfile yaml/foo.txt
'''
class TestExtendYaml(TestCase):
def test_extend(self):
extend_yaml.extend()
self.assertIn('!range', yaml.Loader.yaml_constructors)
self.assertIn('!repeat', yaml.Loader.yaml_constructors)
self.assertIn('!readfile', yaml.Loader.yaml_constructors)
self.assertIn('!readyaml', yaml.Loader.yaml_constructors)
def test_repeat(self):
extend_yaml.extend()
result = yaml.load(repeat_yaml)
self.assertEqual(result.get('foo'), 'a'*5)
result = yaml.load(range_yaml)
self.assertTupleEqual(tuple(result.get('foo')), tuple(range(1, 5)))
self.assertTupleEqual(tuple(result.get('bar')), tuple(range(1, 2, 6)))
global_configuration.project_cfg_dir = tests.__dir__
result = yaml.load(sh_yaml)
self.assertEqual(result.get('foo'), 'top-secret')
| 22.641509
| 78
| 0.6725
| 848
| 0.706667
| 0
| 0
| 0
| 0
| 0
| 0
| 206
| 0.171667
|
62825aac43334a0c4d771dfbfbed86c8e81907ef
| 991
|
py
|
Python
|
Greedy.py
|
victor3r/search-algorithms
|
0256349b20b314c3337264c4c2b1ef4b1992a914
|
[
"MIT"
] | null | null | null |
Greedy.py
|
victor3r/search-algorithms
|
0256349b20b314c3337264c4c2b1ef4b1992a914
|
[
"MIT"
] | null | null | null |
Greedy.py
|
victor3r/search-algorithms
|
0256349b20b314c3337264c4c2b1ef4b1992a914
|
[
"MIT"
] | null | null | null |
from OrderedVector import OrderedVector
class Greedy:
def __init__(self, goal):
self.goal = goal
self.found = False
self.travelled_distance = 0
self.previous = None
self.visited_cities = []
def search(self, current):
current.visited = True
self.visited_cities.append(current.name)
for a in current.adjacent:
if a.city == self.previous:
self.travelled_distance += a.distance
self.previous = current
if current == self.goal:
self.found = True
else:
self.border = OrderedVector(len(current.adjacent))
for a in current.adjacent:
if a.city.visited == False:
a.city.visited = True
self.border.insert(a.city)
if self.border.getFirst() != None:
Greedy.search(self, self.border.getFirst())
return (self.visited_cities, self.travelled_distance)
| 31.967742
| 62
| 0.573158
| 948
| 0.956609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
6283560a4c1df948a92e15782756a76d970412a4
| 3,262
|
py
|
Python
|
mlf_core/create/templates/package/package_prediction/{{ cookiecutter.project_slug_no_hyphen }}/{{cookiecutter.project_slug_no_hyphen}}/cli_xgboost.py
|
mlf-core/mlf-core
|
016f6186b5b62622c3a2b3ca884331fe0165b97c
|
[
"Apache-2.0"
] | 31
|
2020-10-04T14:54:54.000Z
|
2021-11-22T09:33:17.000Z
|
mlf_core/create/templates/package/package_prediction/{{ cookiecutter.project_slug_no_hyphen }}/{{cookiecutter.project_slug_no_hyphen}}/cli_xgboost.py
|
mlf-core/mlf_core
|
cea155595df95d1d22473605d29813f5d698d635
|
[
"Apache-2.0"
] | 200
|
2020-08-05T13:51:14.000Z
|
2022-03-28T00:25:54.000Z
|
mlf_core/create/templates/package/package_prediction/{{ cookiecutter.project_slug_no_hyphen }}/{{cookiecutter.project_slug_no_hyphen}}/cli_xgboost.py
|
mlf-core/mlf_core
|
cea155595df95d1d22473605d29813f5d698d635
|
[
"Apache-2.0"
] | 3
|
2020-11-29T17:03:52.000Z
|
2021-06-03T13:12:03.000Z
|
import os
import sys
from dataclasses import dataclass
import click
import numpy as np
import xgboost as xgb
from rich import print, traceback
WD = os.path.dirname(__file__)
@click.command()
@click.option('-i', '--input', required=True, type=str, help='Path to data file to predict.')
@click.option('-m', '--model', type=str, help='Path to an already trained XGBoost model. If not passed a default model will be loaded.')
@click.option('-c/-nc', '--cuda/--no-cuda', type=bool, default=False, help='Whether to enable cuda or not')
@click.option('-o', '--output', type=str, help='Path to write the output to')
def main(input: str, model: str, cuda: bool, output: str):
"""Command-line interface for {{ cookiecutter.project_name }}"""
print(r"""[bold blue]
{{ cookiecutter.project_name }}
""")
print('[bold blue]Run [green]{{ cookiecutter.project_name }} --help [blue]for an overview of all commands\n')
if not model:
model = get_xgboost_model(f'{WD}/models/xgboost_test_model.xgb')
else:
model = get_xgboost_model(model)
if cuda:
model.set_param({'predictor': 'gpu_predictor'})
print('[bold blue] Parsing data')
data_to_predict = parse_data_to_predict(input)
print('[bold blue] Performing predictions')
predictions = np.round(model.predict(data_to_predict.DM))
print(predictions)
if output:
print(f'[bold blue]Writing predictions to {output}')
write_results(predictions, output)
@dataclass
class Dataset:
X: np.ndarray
y: list
DM: xgb.DMatrix
gene_names: list
sample_names: list
def parse_data_to_predict(path_to_data_to_predict: str) -> Dataset:
"""
Parses the data to predict and returns a full Dataset include the DMatrix
:param path_to_data_to_predict: Path to the data on which predictions should be performed on
"""
X = []
y = []
gene_names = []
sample_names = []
with open(path_to_data_to_predict, "r") as file:
all_runs_info = next(file).split("\n")[0].split("\t")[2:]
for run_info in all_runs_info:
split_info = run_info.split("_")
y.append(int(split_info[0]))
sample_names.append(split_info[1])
for line in file:
split = line.split("\n")[0].split("\t")
X.append([float(x) for x in split[2:]])
gene_names.append(split[:2])
X = [list(i) for i in zip(*X)]
X_np = np.array(X)
DM = xgb.DMatrix(X_np, label=y)
return Dataset(X_np, y, DM, gene_names, sample_names)
def write_results(predictions: np.ndarray, path_to_write_to) -> None:
"""
Writes the predictions into a human readable file.
:param predictions: Predictions as a numpy array
:param path_to_write_to: Path to write the predictions to
"""
np.savetxt(path_to_write_to, predictions, delimiter=',')
def get_xgboost_model(path_to_xgboost_model: str):
"""
Fetches the model of choice and creates a booster from it.
:param path_to_xgboost_model: Path to the xgboost model1
"""
model = xgb.Booster()
model.load_model(os.path.abspath(path_to_xgboost_model))
return model
if __name__ == "__main__":
traceback.install()
sys.exit(main()) # pragma: no cover
| 31.980392
| 136
| 0.66401
| 108
| 0.033109
| 0
| 0
| 1,427
| 0.437462
| 0
| 0
| 1,204
| 0.369099
|
62835966d4b0e4b0387185306e4dd38f6d24f970
| 1,597
|
py
|
Python
|
onnxruntime/python/tools/quantization/operators/direct_q8.py
|
kimjungwow/onnxruntime-riscv
|
3c21abef03190648fe68a6633ac026725e6dfc58
|
[
"MIT"
] | 18
|
2020-05-19T12:48:07.000Z
|
2021-04-28T06:41:57.000Z
|
onnxruntime/python/tools/quantization/operators/direct_q8.py
|
kimjungwow/onnxruntime-riscv
|
3c21abef03190648fe68a6633ac026725e6dfc58
|
[
"MIT"
] | 61
|
2021-05-31T05:15:41.000Z
|
2022-03-29T22:34:33.000Z
|
onnxruntime/python/tools/quantization/operators/direct_q8.py
|
ekmixon/onnxruntime
|
1ab8a95eb6675afb6d0ad9d93600ef0022e2ddb5
|
[
"MIT"
] | 9
|
2021-05-14T20:17:26.000Z
|
2022-03-20T11:44:29.000Z
|
from .base_operator import QuantOperatorBase
from .qdq_base_operator import QDQOperatorBase
from ..quant_utils import QuantizedValue
# For operators that support 8bits operations directly, and output could
# reuse input[0]'s type, zeropoint, scale; For example,Transpose, Reshape, etc.
class Direct8BitOp(QuantOperatorBase):
def __init__(self, onnx_quantizer, onnx_node):
super().__init__(onnx_quantizer, onnx_node)
def quantize(self):
node = self.node
# Quantize when input[0] is quantized already. Otherwise keep it.
if node.input[0] not in self.quantizer.quantized_value_map:
self.quantizer.new_nodes += [node]
return
# Create an entry for output quantized value
quantized_input_value = self.quantizer.quantized_value_map[node.input[0]]
quantized_output_value = QuantizedValue(node.output[0], node.output[0] + "_quantized",
quantized_input_value.scale_name, quantized_input_value.zp_name,
quantized_input_value.value_type)
self.quantizer.quantized_value_map[node.output[0]] = quantized_output_value
node.input[0] = quantized_input_value.q_name
node.output[0] = quantized_output_value.q_name
self.quantizer.new_nodes += [node]
class QDQDirect8BitOp(QDQOperatorBase):
def __init__(self, onnx_quantizer, onnx_node):
self.quantizer = onnx_quantizer
self.node = onnx_node
def quantize(self):
self.quantizer.quantize_tensor(self.node.input[0])
| 42.026316
| 112
| 0.689418
| 1,306
| 0.817783
| 0
| 0
| 0
| 0
| 0
| 0
| 272
| 0.170319
|
628478917870596f07ee4037c6b0a49ad4980015
| 3,659
|
py
|
Python
|
b01lers-ctf-2022/extreme_64_part_2/src/levels.py
|
novafacing/challenges
|
cfdfb34973880cfda03b4797d3b369344cfa4b9a
|
[
"MIT"
] | null | null | null |
b01lers-ctf-2022/extreme_64_part_2/src/levels.py
|
novafacing/challenges
|
cfdfb34973880cfda03b4797d3b369344cfa4b9a
|
[
"MIT"
] | null | null | null |
b01lers-ctf-2022/extreme_64_part_2/src/levels.py
|
novafacing/challenges
|
cfdfb34973880cfda03b4797d3b369344cfa4b9a
|
[
"MIT"
] | null | null | null |
from platform import architecture
from qiling import Qiling
from typing import Union
from asm_practice.coding.challenge import ArchSpec, Challenge, TestCase
challenges = []
amd64 = ArchSpec(
pwntools_arch="amd64",
pwntools_os="linux",
qiling_rootfs="qiling/examples/rootfs/x86_linux/",
qiling_ostype="linux",
qiling_archtype="x8664",
)
"""
lea rax, [rdi+rsi] ;
"""
challenges.append(
Challenge(
archspec=amd64,
instructions="Add rdi to rsi and store the result in rax using only one instruction.",
testcases=[
TestCase(
codeconditions=[
(lambda c: len(c.splitlines()) == 1, "too many instructions!"),
(lambda c: c.count(";") <= 1, "too many instructions!"),
],
preconditions=[
lambda ql: setattr(ql.reg, "rdi", 0x1000),
lambda ql: setattr(ql.reg, "rsi", 0x2000),
],
postconditions=[(lambda ql: ql.reg.rax == 0x3000, "rax != rdi + rsi!")],
)
],
secret="code{some_instructions_have_many_uses}",
)
)
class IOBuffer:
def __init__(self) -> None:
self.buffer = b""
def write(self, ql, fd, buf, count, *args, **kw) -> int:
self.buffer += ql.mem.read(buf, count)
ql.os.set_syscall_return(count)
"""
mov rax, 0x616d72656b636168 ;
push rax ;
mov rsi, rsp ;
mov rdi, 0x1 ;
mov rdx, 0x8 ;
mov rax, 0x1 ;
syscall ;
mov rax, 0x6e ;
push rax ;
mov rsi, rsp ;
mov rdi, 0x1 ;
mov rdx, 0x1 ;
mov rax, 0x1 ;
syscall ;
"""
IOBuffer_0 = IOBuffer()
challenges.append(
Challenge(
archspec=amd64,
instructions="Print the string 'hackerman' to stdout.",
testcases=[
TestCase(
preconditions=[lambda ql: ql.set_syscall("write", IOBuffer_0.write)],
postconditions=[
(
lambda ql: IOBuffer_0.buffer == b"hackerman" or print(f'stdout was {IOBuffer_0.buffer}, not \'hackerman\'!'),
f"",
)
],
)
],
secret="code{asm_c4n_d0_anything_java_can_do}",
)
)
"""
mov rax, 0x0 ;
comp:
cmp rax, 0x20 ;
jg end;
mov bl, [rsi+rax] ;
xor bl, 0x82 ;
mov byte ptr [rsi+rax], bl ;
inc rax ;
jmp comp ;
end:
mov rax, 0x1 ;
mov rdi, 0x1 ;
mov rdx, 0x20 ;
syscall ;
"""
ADDR_0 = 0x10000000//0x1000*0x1000
IOBuffer_1 = IOBuffer()
challenges.append(
Challenge(
archspec=amd64,
instructions="""
Register rsi contains a pointer to the flag, but the
flag has been encoded by XOR-ing each byte with 0x82.
Decode it and print it out to complete the challenge!
Hint: the flag is 32 characters long...
""",
testcases=[
TestCase(
preconditions=[
lambda ql: ql.mem.map(ADDR_0, 0x1000),
lambda ql: ql.mem.write(
ADDR_0,
b"\xe0\xe1\xf6\xe4\xf9\xe1\xb2\xec\xe5\xf0\xe3\xf6\xf1\xdd\xfb\xb2\xf7\xdd\xe3\xf0\xe7\xdd\xe3\xdd\xf2\xf0\xb2\xa3\xa3\xa3\xa3\xff",
# b"bctf{c0ngrats_y0u_are_a_pr0!!!!}"
),
lambda ql: setattr(ql.reg, "rsi", ADDR_0),
lambda ql: ql.set_syscall("write", IOBuffer_1.write)
],
postconditions=[(lambda ql: IOBuffer_1.buffer == b'bctf{c0ngrats_y0u_are_a_pr0!!!!}', 'didn\'t print out the encrypted flag!')],
)
],
secret="code{did_you_get_the_flag?}",
)
)
| 27.103704
| 156
| 0.544138
| 222
| 0.060672
| 0
| 0
| 0
| 0
| 0
| 0
| 1,403
| 0.383438
|
62858c1b3ffcd01a46345c2c6590f7749e7c007a
| 2,331
|
py
|
Python
|
lte/gateway/python/magma/enodebd/tr069/tests/models_tests.py
|
Aitend/magma
|
74fe7b437335728219dfbe54733955bdd652eb60
|
[
"BSD-3-Clause"
] | 849
|
2020-07-17T09:15:03.000Z
|
2022-03-31T17:11:52.000Z
|
lte/gateway/python/magma/enodebd/tr069/tests/models_tests.py
|
Aitend/magma
|
74fe7b437335728219dfbe54733955bdd652eb60
|
[
"BSD-3-Clause"
] | 8,386
|
2020-06-23T23:35:30.000Z
|
2022-03-31T23:31:28.000Z
|
lte/gateway/python/magma/enodebd/tr069/tests/models_tests.py
|
Aitend/magma
|
74fe7b437335728219dfbe54733955bdd652eb60
|
[
"BSD-3-Clause"
] | 406
|
2020-06-28T17:02:35.000Z
|
2022-03-31T22:03:55.000Z
|
"""
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from magma.enodebd.tr069.models import DeviceIdStruct
from spyne import ComplexModelBase
class DeviceIdStructTests(unittest.TestCase):
def test_as_dict_memory_leak(self):
"""
Test to ensure as_dict() doesn't leak model instances
"""
thing = DeviceIdStruct(
Manufacturer='abc',
OUI='def',
ProductClass='ghi',
SerialNumber='jkl',
)
res = thing.as_dict()
self.assertEqual(
{
'Manufacturer': 'abc',
'OUI': 'def',
'ProductClass': 'ghi',
'SerialNumber': 'jkl',
},
res,
)
# inspect the spyne.util.memoize object that wraps the staticmethod
self.assertEqual(1, len(ComplexModelBase.get_flat_type_info.memo))
# should produce a different result and not grow the size of memo
thing.OUI = 'aaaa'
res = thing.as_dict()
self.assertEqual(
{
'Manufacturer': 'abc',
'OUI': 'aaaa',
'ProductClass': 'ghi',
'SerialNumber': 'jkl',
},
res,
)
self.assertEqual(1, len(ComplexModelBase.get_flat_type_info.memo))
# use a different object this time. Again should not grow memo
thing = DeviceIdStruct(
Manufacturer='abc',
OUI='def',
ProductClass='ghi',
SerialNumber='jkl',
)
res = thing.as_dict()
self.assertEqual(
{
'Manufacturer': 'abc',
'OUI': 'def',
'ProductClass': 'ghi',
'SerialNumber': 'jkl',
},
res,
)
self.assertEqual(1, len(ComplexModelBase.get_flat_type_info.memo))
| 31.08
| 75
| 0.56671
| 1,746
| 0.749035
| 0
| 0
| 0
| 0
| 0
| 0
| 994
| 0.426426
|
6285f9b80fd1befd47cc7c79a141c672da8b15b7
| 732
|
py
|
Python
|
saleor/graphql/order/bulk_mutations/draft_orders.py
|
acabezasg/urpi-master
|
7c9cd0fbe6d89dad70652482712ca38b21ba6f84
|
[
"BSD-3-Clause"
] | 1
|
2019-04-15T09:37:26.000Z
|
2019-04-15T09:37:26.000Z
|
saleor/graphql/order/bulk_mutations/draft_orders.py
|
acabezasg/urpi-master
|
7c9cd0fbe6d89dad70652482712ca38b21ba6f84
|
[
"BSD-3-Clause"
] | 5
|
2021-03-09T16:22:37.000Z
|
2022-02-10T19:10:03.000Z
|
saleor/graphql/order/bulk_mutations/draft_orders.py
|
acabezasg/urpi-master
|
7c9cd0fbe6d89dad70652482712ca38b21ba6f84
|
[
"BSD-3-Clause"
] | 1
|
2020-12-26T10:25:37.000Z
|
2020-12-26T10:25:37.000Z
|
import graphene
from ....order import OrderStatus, models
from ...core.mutations import ModelBulkDeleteMutation
class DraftOrderBulkDelete(ModelBulkDeleteMutation):
class Arguments:
ids = graphene.List(
graphene.ID,
required=True,
description='List of draft order IDs to delete.')
class Meta:
description = 'Deletes draft orders.'
model = models.Order
@classmethod
def clean_instance(cls, info, instance, errors):
if instance.status != OrderStatus.DRAFT:
cls.add_error(errors, 'id', 'Cannot delete non-draft orders.')
@classmethod
def user_is_allowed(cls, user, input):
return user.has_perm('order.manage_orders')
| 28.153846
| 74
| 0.665301
| 616
| 0.84153
| 0
| 0
| 296
| 0.404372
| 0
| 0
| 117
| 0.159836
|
62891dc073d6683815ffdf4fdf46da450a83ebad
| 1,978
|
py
|
Python
|
thread_extensions/callback_thread.py
|
Sunchasing/python-common
|
bc9f11fe4585ef9abca7006c0bf64b11062742fd
|
[
"Apache-2.0"
] | 5
|
2021-08-15T23:04:25.000Z
|
2021-09-06T18:32:53.000Z
|
thread_extensions/callback_thread.py
|
Sunchasing/python-common
|
bc9f11fe4585ef9abca7006c0bf64b11062742fd
|
[
"Apache-2.0"
] | null | null | null |
thread_extensions/callback_thread.py
|
Sunchasing/python-common
|
bc9f11fe4585ef9abca7006c0bf64b11062742fd
|
[
"Apache-2.0"
] | 1
|
2022-01-28T13:12:23.000Z
|
2022-01-28T13:12:23.000Z
|
from threading import Thread
from typing import Any, Iterable, Mapping
from types_extensions import Function, void, safe_type
class CallbackThread(Thread):
"""
An extension to python's threading API allowing for a callback to be executed upon completion of the given
function. The callback is executed with the initial function's return value as the first parameter and any other
givent args and kwargs following.
Usage:
>>> def callback_func(x: int, y: int) -> None:
>>>
>>> print(f'Called with {x=} and {y=}.')
>>>
>>> def func(x: int) -> int:
>>> return x + 1
>>> thread_ = CallbackThread(target=func, kwargs={'x': 10}, callback=callback_func, callback_extra_args=(7,))
>>> thread_.start()
>>> thread_.join()
----
Called with x=11 and y=7
"""
def __init__(self, group: void = None, target: Function = None, name: str = None,
args: Iterable[Any] = (), kwargs: Mapping[str, Any] = None, *, daemon: bool = None,
callback: Function = None, callback_extra_args: Iterable[Any] = (),
callback_extra_kwargs: Mapping[str, Any] = None) -> void:
Thread.__init__(self, group=group, target=target, name=name,
args=args, kwargs=kwargs, daemon=daemon)
self._target: Function = target
self._args: Iterable[Any] = args
self._kwargs: safe_type(Mapping[str, Any]) = kwargs or {}
self._callback: Function = callback
self._callback_extra_args: Iterable[Any] = callback_extra_args
self._callback_extra_kwargs: safe_type(Mapping[str, Any]) = callback_extra_kwargs or {}
def run(self) -> void:
if self._target:
return_value = self._target(*self._args, **self._kwargs)
if self._callback:
self._callback(return_value, *self._callback_extra_args, **self._callback_extra_kwargs)
del self._target, self._args, self._kwargs
| 39.56
| 116
| 0.63549
| 1,848
| 0.934277
| 0
| 0
| 0
| 0
| 0
| 0
| 666
| 0.336704
|
62898b405054c48d9122f893fe9282577a49a61e
| 55
|
py
|
Python
|
enthought/traits/ui/wx/button_editor.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/traits/ui/wx/button_editor.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/traits/ui/wx/button_editor.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from traitsui.wx.button_editor import *
| 18.333333
| 39
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 14
| 0.254545
|
6289a72aad33159f7d054947864a1fb1eeedfac8
| 2,756
|
py
|
Python
|
ROSITA/ARMCycleCounterImpl.py
|
0xADE1A1DE/Rosita
|
13a669c1877b2eb5a36be6b41a6f840d83ffd46a
|
[
"Apache-2.0"
] | 10
|
2020-11-22T08:17:08.000Z
|
2021-12-17T04:06:01.000Z
|
ROSITA/ARMCycleCounterImpl.py
|
0xADE1A1DE/Rosita
|
13a669c1877b2eb5a36be6b41a6f840d83ffd46a
|
[
"Apache-2.0"
] | null | null | null |
ROSITA/ARMCycleCounterImpl.py
|
0xADE1A1DE/Rosita
|
13a669c1877b2eb5a36be6b41a6f840d83ffd46a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 University of Adelaide
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def check_inst(inst, mask, match):
return inst & mask == match
def cycles_push(inst):
if check_inst(inst, 0b1111111000000000, 0b1011010000000000):
return 1 + bin(inst & 0x00ff).count('1')
return -1
def cycles_pop(inst):
# Format 14: push/pop registers
if check_inst(inst, 0b1111111000000000, 0b1011110000000000):
return 1 + bin(inst & 0x00ff).count('1')
return -1
def cycles_pop_pc(inst):
# Format 14: push/pop registers
if check_inst(inst, 0b1111111100000000, 0b1011110100000000):
return 4 + bin(inst & 0x00ff).count('1')
return -1
def cycles_add(inst):
# Format 2: add/subtract
if check_inst(inst, 0b1111101000000000, 0b0001100000000000):
return 1
return -1
def cycles_add_pc(inst):
return -1
def cycles_rot(inst):
# Format 4
if check_inst(inst, 0b1111111111000000, 0b0100000111000000):
return 1
return -1
def cycles_ldr(inst):
# Format 7
# Format 9
if check_inst(inst, 0b1111101000000000, 0b0101100000000000) or \
check_inst(inst, 0b1110100000000000, 0b0110100000000000):
return 2
return -1
def cycles_str(inst):
# Format 7
# Format 9
if check_inst(inst, 0b1111101000000000, 0b0101000000000000) or \
check_inst(inst, 0b1110100000000000, 0b0110000000000000):
return 2
return -1
def cycles_mov(inst):
# Format 1: move shifted register
# Format 3: move/compare/add/subtract immediate
# Format 5: Hi register operations/branch exchange
if check_inst(inst, 0b1111111111000000, 0b0000000000000000) or \
check_inst(inst, 0b1111100000000000, 0b0010000000000000) or \
check_inst(inst, 0b1111111100000000, 0b0100011000000000):
return 1
return -1
def cycles_mov_pc(inst):
# Format 5: dest = pc
if check_inst(inst, 0b1111111101000111, 0b0100011001000111):
return 3
return -1
__cycle_counts = [
[ cycles_mov, cycles_mov_pc ],
[ cycles_add, cycles_add_pc ],
[ cycles_ldr ],
[ cycles_str ],
[ cycles_rot ],
[ cycles_pop, cycles_pop_pc ],
[ cycles_push ]
]
def get_cycle_counts():
return __cycle_counts
| 29.010526
| 74
| 0.697388
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 870
| 0.315675
|
6289d050b39c8fee926a510fc4214e7ee940d801
| 4,723
|
py
|
Python
|
dust/admin.py
|
MerlinEmris/eBazar
|
f159314183a8a95afd97d36b0d3d8cf22015a512
|
[
"MIT"
] | null | null | null |
dust/admin.py
|
MerlinEmris/eBazar
|
f159314183a8a95afd97d36b0d3d8cf22015a512
|
[
"MIT"
] | null | null | null |
dust/admin.py
|
MerlinEmris/eBazar
|
f159314183a8a95afd97d36b0d3d8cf22015a512
|
[
"MIT"
] | null | null | null |
from django.utils.html import format_html
def full_address(self):
return format_html('%s - <b>%s,%s</b>' % (self.address, self.city, self.state))
# null derek yazylmaly hat
admin.site.empty_value_display = '???'
admin.site.register(Item)
# str funksivany ady bilen gorkezyar
class ItemAdmin(admin.ModelAdmin):
list_display = ['name', '__str__']
# ozine gora hat chykarmat
class StoreAdmin(admin.ModelAdmin):
list_display = ['name', 'address', 'upper_case_city_state']
def upper_case_city_state(self, obj):
return ("%s %s" % (obj.city, obj.state)).upper()
upper_case_city_state.short_description = 'City/State'
# email domain ady gaytar
class Store(models.Model):
name = models.CharField(max_length=30)
email = models.EmailField()
def email_domain(self):
return self.email.split("@")[-1]
email_domain.short_description = 'Email domain'
class StoreAdmin(admin.ModelAdmin):
list_display = ['name','email_domain']
# how to sort manually created field that related with db
# models.py
from django.db import models
from django.utils.html import format_html
class Store(models.Model):
name = models.CharField(max_length=30)
address = models.CharField(max_length=30,unique=True)
city = models.CharField(max_length=30)
state = models.CharField(max_length=2)
def full_address(self):
return format_html('%s - <b>%s,%s</b>' % (self.address,self.city,self.state))
full_address.admin_order_field = '-city'
# admin.py
from django.contrib import admin
from coffeehouse.stores.models import Store
class StoreAdmin(admin.ModelAdmin):
list_display = ['name','full_address']
# gerekli column link goyyar
list_display_links = ['name', 'user', 'location', 'price']
# filtr ulananda detail girip chykanda filtirsyz edip gorkezyar
preserve_filters = False
#doredilen wagtyna gora filtrlemek uchin
date_hierarchy = 'created'
# yokarda yerleshen action manu-ny ayyryar
actions_on_top = False
#show only this fields
fields = ['address','city','state','email']
# changing type of field
formfield_overrides = {
models.CharField: {'widget': forms.Textarea}
}
# fills address field with sluged type of city and state field
prepopulated_fields = {'address': ['city','state']}
# create button that clone the record
save_as = True
save_as_continue = False #after cloning go to main page
# go to the page ayyryar
view_on_site = False
# if you want manually enter foreignkey ang manytomanyfield values
raw_id_fields = ["menu"]
#show foreignkeys and manytomanyfield like radio button
radio_fields = {"location": admin.HORIZONTAL}
# change admin form for user type
class MyModelAdmin(admin.ModelAdmin):
def get_form(self, request, obj=None, **kwargs):
if request.user.is_superuser:
kwargs['form'] = MySuperuserForm
return super(MyModelAdmin, self).get_form(request, obj, **kwargs)
#foreignkey values according to user
class MyModelAdmin(admin.ModelAdmin):
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "car":
kwargs["queryset"] = Car.objects.filter(owner=request.user)
return super(MyModelAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
# manytomanyfield values according to user
class MyModelAdmin(admin.ModelAdmin):
def formfield_for_manytomany(self, db_field, request, **kwargs):
if db_field.name == "cars":
kwargs["queryset"] = Car.objects.filter(owner=request.user)
return super(MyModelAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
#calls this after admin delete
def response_delete(request, obj_display, obj_id):
Determines the HttpResponse for the delete_view() stage.
response_delete is called after the object has been deleted.
You can override it to change the default behavior after the object has been deleted.
obj_display is a string with the name of the deleted object.
obj_id is the serialized identifier used to retrieve the object to be deleted.
# colored admin field
from django.db import models
from django.contrib import admin
from django.utils.html import format_html
class Person(models.Model):
first_name = models.CharField(max_length=50)
color_code = models.CharField(max_length=6)
def colored_first_name(self):
return format_html(
'<span style="color: #{};">{}</span>',
self.color_code,
self.first_name,
)
colored_first_name.admin_order_field = 'first_name'
class PersonAdmin(admin.ModelAdmin):
list_display = ('first_name', 'colored_first_name')
#cvbsxfgbsfdgs
list_select_related = ('organization', 'user')
| 27.782353
| 94
| 0.724328
| 2,405
| 0.50921
| 0
| 0
| 0
| 0
| 0
| 0
| 1,290
| 0.273131
|
628a26e801e2d3e6057da42ddf0d511ab519f04e
| 465
|
py
|
Python
|
main.py
|
Lasx/gb688_downloader
|
7e9711e7784c15bcd15a6129ab1fef99c8d44f23
|
[
"Apache-2.0"
] | 119
|
2020-02-27T04:27:15.000Z
|
2022-03-01T07:02:34.000Z
|
main.py
|
Lasx/gb688_downloader
|
7e9711e7784c15bcd15a6129ab1fef99c8d44f23
|
[
"Apache-2.0"
] | 12
|
2020-03-26T04:50:13.000Z
|
2021-11-24T04:00:08.000Z
|
main.py
|
Lasx/gb688_downloader
|
7e9711e7784c15bcd15a6129ab1fef99c8d44f23
|
[
"Apache-2.0"
] | 24
|
2020-03-11T22:50:24.000Z
|
2022-03-25T08:13:56.000Z
|
from standard import HDB, NatureStd
if __name__ == "__main__":
hb = HDB('hbba')
db = HDB('dbba')
data = db.search('政务云工程评价指标体系及方法')
print(data)
# first_record = data["records"][0]
# name = f'{first_record["code"]}({first_record["chName"]}'
# db.download(pk=first_record['pk'], name=name)
# std = NatureStd()
# std.search("")
# std.download("http://www.nrsis.org.cn/portal/stdDetail/211166", "乡(镇)土地利用总体规划制图规范.pdf") # 行标
| 27.352941
| 99
| 0.617204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 373
| 0.705104
|
628a35fc680aa3e71939daea214021b0a93a8fef
| 6,478
|
py
|
Python
|
tests/test_centering.py
|
chto/redmapper
|
1dc66b601889ef9913f0f9b2e05980b982834485
|
[
"Apache-2.0"
] | null | null | null |
tests/test_centering.py
|
chto/redmapper
|
1dc66b601889ef9913f0f9b2e05980b982834485
|
[
"Apache-2.0"
] | null | null | null |
tests/test_centering.py
|
chto/redmapper
|
1dc66b601889ef9913f0f9b2e05980b982834485
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import division, absolute_import, print_function
from past.builtins import xrange
import unittest
import numpy.testing as testing
import numpy as np
import fitsio
import os
from numpy import random
from redmapper import Cluster
from redmapper import Configuration
from redmapper import CenteringWcenZred, CenteringBCG, CenteringRandom, CenteringRandomSatellite
from redmapper import GalaxyCatalog
from redmapper import RedSequenceColorPar
from redmapper import Background
from redmapper import ZredBackground
from redmapper import ZlambdaCorrectionPar
class CenteringTestCase(unittest.TestCase):
"""
Test application of the centering models (CenteringWcenZred, CenteringBCG,
CenteringRandom, CenteringRandomSatelliate).
"""
def test_wcenzred(self):
"""
Test running of CenteringWcenZred.
"""
file_path = 'data_for_tests'
cluster = self._setup_cluster()
tempcat = fitsio.read(os.path.join(file_path, 'test_wcen_zred_data.fit'))
corr_filename = 'test_dr8_zlambdacorr.fit'
zlambda_corr = ZlambdaCorrectionPar(os.path.join(file_path, 'test_dr8_zlambdacorr.fit'), zlambda_pivot=30.0)
zlambda_corr = ZlambdaCorrectionPar(file_path + '/' + corr_filename, zlambda_pivot=30.0)
# And the meat of it...
cent = CenteringWcenZred(cluster, zlambda_corr=zlambda_corr)
cent.find_center()
testing.assert_almost_equal(cent.p_cen, tempcat[0]['PCEN'][tempcat[0]['GOOD']], 5)
testing.assert_almost_equal(cent.q_cen, tempcat[0]['QCEN'][tempcat[0]['GOOD']], 4)
testing.assert_almost_equal(cent.p_sat, tempcat[0]['PSAT'], 4)
testing.assert_almost_equal(cent.p_fg, tempcat[0]['PFG'], 4)
testing.assert_array_equal(cent.index, tempcat[0]['USE'][tempcat[0]['GOOD']])
def test_bcg(self):
"""
Test running of CenteringBcg.
"""
cluster = self._setup_cluster()
cent = CenteringBCG(cluster)
cent.find_center()
self.assertEqual(cent.maxind, 72)
self.assertEqual(cent.ngood, 1)
testing.assert_almost_equal(cent.ra, 150.55890608)
testing.assert_almost_equal(cent.dec, 20.53794937)
testing.assert_almost_equal(cent.p_cen[0], 1.0)
testing.assert_almost_equal(cent.q_cen[0], 1.0)
testing.assert_almost_equal(cent.p_sat[0], 0.0)
def test_random(self):
"""
Test running of CenteringRandom.
"""
random.seed(seed=12345)
cluster = self._setup_cluster()
cent = CenteringRandom(cluster)
cent.find_center()
self.assertEqual(cent.maxind, -1)
self.assertEqual(cent.ngood, 1)
testing.assert_almost_equal(cent.ra[0], 150.57049502423266)
testing.assert_almost_equal(cent.dec[0], 20.604521924053167)
testing.assert_almost_equal(cent.p_cen[0], 1.0)
testing.assert_almost_equal(cent.q_cen[0], 1.0)
testing.assert_almost_equal(cent.p_sat[0], 0.0)
def test_randsat(self):
"""
Test running of CenteringRandomSatellite.
"""
random.seed(seed=12345)
cluster = self._setup_cluster()
cent = CenteringRandomSatellite(cluster)
cent.find_center()
# Confirmed that the distribution is correct, this just checks for regression
self.assertEqual(cent.maxind, 721)
self.assertEqual(cent.ngood, 1)
testing.assert_almost_equal(cent.ra[0], 150.67510227)
testing.assert_almost_equal(cent.dec[0], 20.48011092)
testing.assert_almost_equal(cent.p_cen[0], 1.0)
testing.assert_almost_equal(cent.q_cen[0], 1.0)
testing.assert_almost_equal(cent.p_sat[0], 0.0)
def _setup_cluster(self):
"""
Set up the cluster to run through the centering code.
"""
file_path = 'data_for_tests'
cluster = Cluster()
cluster.config = Configuration(os.path.join(file_path, 'testconfig.yaml'))
tempcat = fitsio.read(os.path.join(file_path, 'test_wcen_zred_data.fit'))
temp_neighbors = np.zeros(tempcat[0]['RAS'].size,
dtype = [('RA', 'f8'),
('DEC', 'f8'),
('DIST', 'f4'),
('R', 'f4'),
('P', 'f4'),
('PFREE', 'f4'),
('PMEM', 'f4'),
('MAG', 'f4', 5),
('MAG_ERR', 'f4', 5),
('REFMAG', 'f4'),
('REFMAG_ERR', 'f4'),
('CHISQ', 'f4'),
('ZRED', 'f4'),
('ZRED_E', 'f4'),
('ZRED_CHISQ', 'f4')])
temp_neighbors['RA'] = tempcat[0]['RAS']
temp_neighbors['DEC'] = tempcat[0]['DECS']
temp_neighbors['R'] = tempcat[0]['R']
temp_neighbors['P'] = tempcat[0]['PVALS']
temp_neighbors['PFREE'] = tempcat[0]['WVALS']
temp_neighbors['PMEM'] = tempcat[0]['WTVALS']
temp_neighbors['REFMAG'] = tempcat[0]['REFMAG_TOTAL']
temp_neighbors['ZRED'] = tempcat[0]['GZREDS']
temp_neighbors['ZRED_E'] = tempcat[0]['GZREDE']
temp_neighbors['ZRED_CHISQ'] = tempcat[0]['GCHISQ']
temp_neighbors['DIST'] = tempcat[0]['R'] / (np.radians(1.) * cluster.config.cosmo.Da(0, tempcat[0]['ZCLUSTER']))
neighbors = GalaxyCatalog(temp_neighbors)
cluster.set_neighbors(neighbors)
zred_filename = 'test_dr8_pars.fit'
cluster.zredstr = RedSequenceColorPar(os.path.join(file_path, 'test_dr8_pars.fit'), fine=True, zrange=[0.25, 0.35])
cluster.bkg = Background(os.path.join(file_path, 'test_bkg.fit'))
cluster.zredbkg = ZredBackground(os.path.join(file_path, 'test_bkg.fit'))
cluster.redshift = tempcat[0]['ZCLUSTER']
cluster.ra = tempcat[0]['RAC']
cluster.dec = tempcat[0]['DECC']
cluster.r_lambda = 1.0 * (tempcat[0]['LAMBDA'] / 100.0)**0.2
cluster.Lambda = tempcat[0]['LAMBDA']
cluster.scaleval = tempcat[0]['SCALEVAL']
return cluster
if __name__=='__main__':
unittest.main()
| 37.662791
| 123
| 0.589379
| 5,858
| 0.904291
| 0
| 0
| 0
| 0
| 0
| 0
| 1,192
| 0.184007
|
628a9b46dc2e473cabe5460e321c03369c91a690
| 6,581
|
py
|
Python
|
converted_docs/run_pandoc.py
|
AndrewLoeppky/eoas_tlef
|
9119023ff21b8db0faf95b4e5f62a7c06eeb6b50
|
[
"BSD-3-Clause"
] | 3
|
2020-04-30T19:50:11.000Z
|
2020-10-17T02:07:00.000Z
|
converted_docs/run_pandoc.py
|
AndrewLoeppky/eoas_tlef
|
9119023ff21b8db0faf95b4e5f62a7c06eeb6b50
|
[
"BSD-3-Clause"
] | 35
|
2020-04-21T04:25:31.000Z
|
2021-11-06T22:49:44.000Z
|
converted_docs/run_pandoc.py
|
AndrewLoeppky/eoas_tlef
|
9119023ff21b8db0faf95b4e5f62a7c06eeb6b50
|
[
"BSD-3-Clause"
] | 11
|
2020-04-21T04:33:48.000Z
|
2020-10-23T21:12:12.000Z
|
"""
usage:
turn all docx files into markdown files
python run_pandoc.py transform-docs
turn all tex files nto markdown files
python run_pandoc.py transform-docs --doctype=tex
move all csv, md, pptx, docx, png, jpeg, jpg etc.
into Book/subdir folders, where subdir is the suffix filename
and write a file catlog Book/file_catalog.json
python run_pandoc.py move-files
remove all media files
python run_pandoc.py clean-media
remove all markdown files
python run_pandoc clean-markdown
"""
import contextlib
import os, sys
from pathlib import Path
import subprocess
import pprint
import shutil
import click
from collections import defaultdict
import copy
import json
pp=pprint.PrettyPrinter(indent=4)
@contextlib.contextmanager
def cd(path):
print(f'initially inside {os.getcwd()}')
CWD = os.getcwd()
os.chdir(path)
print(f'inside {os.getcwd()}')
try:
yield
except Exception as e:
print(f'Exception caught: {e}')
finally:
print(f'finally inside {os.getcwd()}')
os.chdir(CWD)
@click.group()
def main():
"""\b
usage: \b
\b
to turn all docx files into markdown files\b
python run_pandoc.py transform-docs
\b
to turn all tex files nto markdown files\b
python run_pandoc.py transform-docs --doctype=tex
\b
move all csv, md, pptx, docx, png, jpeg, jpg etc.
into Book/subdir folders, where subdir is the suffix filename
and write a file catlog Book/file_catalog.json
python run_pandoc.py move-files
\b
to remove all media files\b
npython run_pandoc.py clean-media
\b
to remove all markdown files\b
python run_pandoc clean-markdown
"""
pass
@main.command()
def clean_media():
all_media_dirs = Path().glob("**/*media*")
all_list = []
for a_dir in all_media_dirs:
if a_dir.is_dir():
all_list.append(str(a_dir))
bad_dirs = pp.pformat(all_list)
print(f"preparing to remove:\n{bad_dirs}")
for item in all_list:
try:
shutil.rmtree(item)
except FileNotFoundError:
pass
@main.command()
def move_files():
"""
move all files with a particular suffix to Book, creating a new subfolder
for each suffix. Any duplicate filenames are renamed by adding an integer counter to
the name in the form of "dup-counter"
A json file Book/file_catalog.json is written with the filelist
"""
all_files = list(Path().glob("**/*"))
#
# don't double count files in the Book directory
#
all_files = [item for item in all_files
if str(item).find("Book") == -1]
all_files = [item for item in all_files
if str(item).find(".git") == -1]
all_files = [item for item in all_files if item.suffix not in ['.docx','pptx']]
all_files = [item for item in all_files if item.is_file()]
all_files = [item for item in all_files if item.name[-1:] not in ['#','~']]
all_suffixes = [item.suffix for item in all_files]
#
# renove duplicates
#
unique_suffixes = set(all_suffixes)
#
# remove the leading period
#
unique_suffixes = [item[1:] for item in unique_suffixes]
#
# make a subfolder to hold each suffix
#
Book = Path() / 'Book'
Book.mkdir(parents=True,exist_ok=True)
for subdir in unique_suffixes:
new_dir = Book / subdir
new_dir.mkdir(parents=True,exist_ok = True)
#
# put all files in a dictionary indexed by filename
#
keep_dict=defaultdict(list)
for a_file in all_files:
keep_dict[a_file.name].append(a_file)
keep_dict.pop( "'.'",None)
keep_dict.pop('.DS_Store',None)
#
# build a new dictionary adding
# unique names for any duplicates
#
working_dict = copy.deepcopy(keep_dict)
bad_keys=[]
for key,value in keep_dict.items():
if len(value) > 1:
for count,item in enumerate(value):
print(f"processing duplicate name: {item}")
new_name=f"dup-{count}-{item.name}"
working_dict[new_name]=[item]
bad_keys.append(key)
count+=1
#
# remove the keys that had duplocate names
#
for key in bad_keys:
working_dict.pop(key,None)
out_dict = defaultdict(dict)
for key, value in working_dict.items():
filepath = value[0]
#
# drop the leading . in suffix
#
suffix=filepath.suffix[1:]
out_dict[suffix][key]=str(filepath)
#
# convert back to ordinary dict for json
#
out_dict = dict(out_dict)
pp.pprint(out_dict)
json_file = Book / 'file_catalog.json'
with open(json_file,'w') as outfile:
json.dump(out_dict,outfile,indent=4)
for key,file_dict in out_dict.items():
write_dir = Book / key
for unique_name, file_path in file_dict.items():
new_path = write_dir / unique_name
shutil.copy(file_path, new_path)
@main.command()
def clean_markdown():
all_markdown_files = Path().glob("**/*.md")
all_list = []
for a_file in all_markdown_files:
if a_file.is_file():
all_list.append(str(a_file))
bad_files = pp.pformat(all_list)
print(f"preparing to remove:\n{bad_files}")
for item in all_list:
try:
os.remove(item)
except FileNotFoundError:
pass
@main.command()
@click.option('--doctype', default="docx", show_default=True)
def transform_docs(doctype):
"""\b
doctype can be either docx or tex\b
default value is docx\b
"""
docx_files = Path().glob(f"**/*.{doctype}")
for a_file in docx_files:
the_dir = a_file.parent
#
# change into the directory to execute pandoc, returning
# to the run directory once the command completes or if
# there is an exception
#
with cd(the_dir):
in_name = Path(a_file.name)
out_name = f"--output={in_name.with_suffix('.md')}"
media_dir = f"--extract-media={in_name.stem}_figs"
arglist=["pandoc", "--to", "gfm", media_dir,
out_name,a_file.name]
print(f"running the command \n{' '.join(arglist)}\n")
result = subprocess.run(arglist, capture_output=True)
if result.stdout:
print('output: ',result.stdout)
if result.stderr:
print('error: ',result.stderr)
if __name__ == "__main__":
main()
| 29.511211
| 89
| 0.619511
| 0
| 0
| 306
| 0.046497
| 5,781
| 0.878438
| 0
| 0
| 2,577
| 0.391582
|
628ac7ff61e9eb7e83c84034316330a71c01848e
| 979
|
py
|
Python
|
swim/PIL2/WebPImagePlugin.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | 3
|
2015-06-05T00:32:44.000Z
|
2017-01-06T15:44:32.000Z
|
swim/PIL2/WebPImagePlugin.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | null | null | null |
swim/PIL2/WebPImagePlugin.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | null | null | null |
from PIL import Image
from PIL import ImageFile
from io import BytesIO
import _webp
def _accept(prefix):
return prefix[:4] == b"RIFF" and prefix[8:16] == b"WEBPVP8 "
class WebPImageFile(ImageFile.ImageFile):
format = "WEBP"
format_description = "WebP image"
def _open(self):
self.mode = "RGB"
data, width, height = _webp.WebPDecodeRGB(self.fp.read())
self.size = width, height
self.fp = BytesIO(data)
self.tile = [("raw", (0, 0) + self.size, 0, 'RGB')]
def _save(im, fp, filename):
if im.mode != "RGB":
raise IOError("cannot write mode %s as WEBP" % im.mode)
quality = im.encoderinfo.get("quality", 80)
data = _webp.WebPEncodeRGB(im.tostring(), im.size[0], im.size[1], im.size[0] * 3, float(quality))
fp.write(data)
Image.register_open("WEBP", WebPImageFile, _accept)
Image.register_save("WEBP", _save)
Image.register_extension("WEBP", ".webp")
Image.register_mime("WEBP", "image/webp")
| 28.794118
| 101
| 0.648621
| 340
| 0.347293
| 0
| 0
| 0
| 0
| 0
| 0
| 138
| 0.14096
|
628bbd970ba25dae836c86e32114954de704abe3
| 4,149
|
py
|
Python
|
AI Class/Agents/vacuum_v2.py
|
e-olang/Drafts
|
a26c958b50826c8762ba12046d1ef6aedefe7e5b
|
[
"MIT"
] | null | null | null |
AI Class/Agents/vacuum_v2.py
|
e-olang/Drafts
|
a26c958b50826c8762ba12046d1ef6aedefe7e5b
|
[
"MIT"
] | null | null | null |
AI Class/Agents/vacuum_v2.py
|
e-olang/Drafts
|
a26c958b50826c8762ba12046d1ef6aedefe7e5b
|
[
"MIT"
] | null | null | null |
from turtle import Turtle, Screen
from random import choice
from time import sleep
from queue import SimpleQueue
w: int
w, h = (853, 480)
wn = Screen()
wn.screensize(w, h)
wn.bgcolor("#d3d3d3")
Room_state = {"Clean": "#FFFFFF",
"Dirty": "#b5651d"}
cleaned = 0
def filler(t, color, delay=0, vacclean = False):
global cleaned
t.fillcolor(color)
t.penup()
if color == Room_state['Clean']:
sleep(delay) #To avoid instant cleaning
if vacclean:
cleaned += 1
t.begin_fill()
t.circle(130)
t.end_fill()
def setup():
A = Turtle() # Draws Circle in A
B = Turtle() # Draws Circle in B
X = Turtle() # Text Below A
Y = Turtle() # Text Below B
A.ht()
B.ht()
X.ht()
Y.ht()
A.speed(100)
B.speed(100)
X.speed(100)
Y.speed(100)
A.penup()
B.penup()
X.penup()
Y.penup()
A.setpos(-w / 4, -120)
B.setpos(w / 4, -120)
X.setpos(-w / 4, -200)
Y.setpos(w / 4, -200)
A.pendown()
B.pendown()
filler(A, Room_state['Clean'], False)
filler(B, Room_state['Clean'], False)
# Creates rooms and boundary
t1 = Turtle()
t1.ht()
t1.speed(20)
t1.penup()
t1.setposition(w / 2, h / 2)
t1.pendown()
t1.pensize(10)
t1.right(90)
t1.forward(h)
t1.right(90)
t1.forward(w)
t1.right(90)
t1.forward(h)
t1.right(90)
t1.forward(w)
t1.backward(w / 2)
t1.right(90)
t1.pensize(5)
t1.forward(h - 90)
t1.penup()
t1.setpos(-w / 4, h / 2 - 70)
t1.write("Room A", align="center", font=("Arial", 20, "normal"))
t1.setpos(w / 4, h / 2 - 70)
t1.write("Room B", align="center", font=("Arial", 20, "normal"))
return A, B, X, Y
A, B, X, Y = setup()
# Vaccum Cleaner
C = Turtle()
C.speed(8)
C.penup()
C.shape("circle")
C.setpos(A.xcor(), A.ycor() + 130)
count = 1
iter = Turtle()
cleanwriter = Turtle()
iter.ht()
cleanwriter.ht()
iter.penup()
cleanwriter.penup()
iter.setpos(0, -h / 2 + 50)
cleanwriter.setpos(0, -h / 2 + 20)
room_state = list(Room_state.keys())
state = SimpleQueue()
state.put_nowait(((choice(room_state)), choice(room_state)))
while True:
iter.clear()
cleanwriter.clear()
iter.write("Iteration : " + str(count), align="center", font=("Arial", 16, "normal"))
cleanwriter.write("Times Cleaned : " + str(cleaned), align="center", font=("Arial", 16, "normal"))
condition = state.get_nowait()
stateA = condition[0]
stateB = condition[1]
X.clear()
Y.clear()
nextA = choice(room_state)
nextB = choice(room_state)
state.put_nowait((nextA, nextB))
filler(A, Room_state[stateA])
filler(B, Room_state[stateB])
X.write("Now : " + stateA + "\nNext : " + nextA, align="center", font=("Arial", 16, "normal"))
Y.write("Now : " + stateB + "\nNext : " + nextB, align="center", font=("Arial", 16, "normal"))
print("\nA : " + stateA, "\tB : " + stateB)
if stateA == 'Dirty' and stateB == 'Dirty':
if C.xcor() < 0:
print("Both Dirty, Cleaned A going to B")
# noinspection PyTypeChecker
filler(A, Room_state['Clean'], 0.5, True)
stateA = 'Clean'
C.setpos(B.xcor(), B.ycor() + 130)
# noinspection PyTypeChecker
filler(B, Room_state['Clean'], 0.5, True)
stateB = 'Clean'
elif C.xcor() > 0:
print("Both Dirty, Cleaned B going to A")
# noinspection PyTypeChecker
filler(B, Room_state['Clean'], 0.5, True)
stateB = 'Clean'
C.setpos(A.xcor(), A.ycor() + 130)
# noinspection PyTypeChecker
filler(A, Room_state['Clean'], 0.5, True)
stateA = 'Clean'
if stateA == 'Dirty':
print("Cleaned A")
C.goto(A.xcor(), A.ycor() + 130)
# noinspection PyTypeChecker
filler(A, Room_state['Clean'], 0.3, True)
elif stateB == 'Dirty':
print("Cleaned B")
C.goto(B.xcor(), B.ycor() + 130)
# noinspection PyTypeChecker
filler(B, Room_state['Clean'], 0.3, True)
count += 1
sleep(0.5)
| 23.440678
| 102
| 0.557002
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 802
| 0.1933
|
628f60f6980f2fba69cda100a9a49fdeb649e134
| 1,226
|
py
|
Python
|
notebook/dict_keys_values_items.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 174
|
2018-05-30T21:14:50.000Z
|
2022-03-25T07:59:37.000Z
|
notebook/dict_keys_values_items.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 5
|
2019-08-10T03:22:02.000Z
|
2021-07-12T20:31:17.000Z
|
notebook/dict_keys_values_items.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 53
|
2018-04-27T05:26:35.000Z
|
2022-03-25T07:59:37.000Z
|
d = {'key1': 1, 'key2': 2, 'key3': 3}
for k in d:
print(k)
# key1
# key2
# key3
for k in d.keys():
print(k)
# key1
# key2
# key3
keys = d.keys()
print(keys)
print(type(keys))
# dict_keys(['key1', 'key2', 'key3'])
# <class 'dict_keys'>
k_list = list(d.keys())
print(k_list)
print(type(k_list))
# ['key1', 'key2', 'key3']
# <class 'list'>
for v in d.values():
print(v)
# 1
# 2
# 3
values = d.values()
print(values)
print(type(values))
# dict_values([1, 2, 3])
# <class 'dict_values'>
v_list = list(d.values())
print(v_list)
print(type(v_list))
# [1, 2, 3]
# <class 'list'>
for k, v in d.items():
print(k, v)
# key1 1
# key2 2
# key3 3
for t in d.items():
print(t)
print(type(t))
print(t[0])
print(t[1])
print('---')
# ('key1', 1)
# <class 'tuple'>
# key1
# 1
# ---
# ('key2', 2)
# <class 'tuple'>
# key2
# 2
# ---
# ('key3', 3)
# <class 'tuple'>
# key3
# 3
# ---
items = d.items()
print(items)
print(type(items))
# dict_items([('key1', 1), ('key2', 2), ('key3', 3)])
# <class 'dict_items'>
i_list = list(d.items())
print(i_list)
print(type(i_list))
# [('key1', 1), ('key2', 2), ('key3', 3)]
# <class 'list'>
print(i_list[0])
print(type(i_list[0]))
# ('key1', 1)
# <class 'tuple'>
| 13.775281
| 53
| 0.546493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 560
| 0.45677
|
629117006004b2eb7fb514daab59fc4ecf18244d
| 560
|
py
|
Python
|
flows/tests/settings.py
|
sergioisidoro/django-flows
|
326baa3e216a15bd7a8d13b2a09ba9752e250dbb
|
[
"BSD-2-Clause"
] | 104
|
2015-01-05T14:29:16.000Z
|
2021-11-08T11:20:24.000Z
|
flows/tests/settings.py
|
sergioisidoro/django-flows
|
326baa3e216a15bd7a8d13b2a09ba9752e250dbb
|
[
"BSD-2-Clause"
] | 4
|
2015-09-23T11:14:50.000Z
|
2020-03-21T06:08:34.000Z
|
flows/tests/settings.py
|
sergioisidoro/django-flows
|
326baa3e216a15bd7a8d13b2a09ba9752e250dbb
|
[
"BSD-2-Clause"
] | 16
|
2015-01-05T10:13:44.000Z
|
2022-02-14T05:21:23.000Z
|
import django
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = ['flows', 'flows.statestore.tests', 'django_nose']
SECRET_KEY = 'flow_tests'
if django.VERSION < (1, 6):
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = ''
if django.VERSION < (1, 7):
try:
__import__('south')
except ImportError:
pass
else:
INSTALLED_APPS.append('south')
| 19.310345
| 67
| 0.639286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 208
| 0.371429
|
6291397ad5ddff8c105b169b9ffba9daa398c30c
| 4,154
|
py
|
Python
|
apps/site/api/serializers/video_serializer.py
|
LocalGround/localground
|
aa5a956afe7a84a7763a3b23d62a9fd925831cd7
|
[
"Apache-2.0"
] | 9
|
2015-05-29T22:22:20.000Z
|
2022-02-01T20:39:00.000Z
|
apps/site/api/serializers/video_serializer.py
|
LocalGround/localground
|
aa5a956afe7a84a7763a3b23d62a9fd925831cd7
|
[
"Apache-2.0"
] | 143
|
2015-01-22T15:03:40.000Z
|
2020-06-27T01:55:29.000Z
|
apps/site/api/serializers/video_serializer.py
|
LocalGround/localground
|
aa5a956afe7a84a7763a3b23d62a9fd925831cd7
|
[
"Apache-2.0"
] | 5
|
2015-03-16T20:51:49.000Z
|
2017-02-07T20:48:49.000Z
|
from rest_framework import serializers
from localground.apps.site.api.serializers.base_serializer import \
BaseSerializer, NamedSerializerMixin, ProjectSerializerMixin, \
GeometrySerializerMixin
from localground.apps.site import models, widgets
from localground.apps.site.api import fields
import re
class VideoSerializer(
GeometrySerializerMixin, NamedSerializerMixin, ProjectSerializerMixin,
BaseSerializer):
VIDEO_PROVIDERS = (
('vimeo', 'Vimeo'),
('youtube', 'YouTube')
)
ordering = serializers.SerializerMethodField()
video_provider = serializers.ChoiceField(
source='provider', choices=VIDEO_PROVIDERS, read_only=True)
def get_video_provider_and_id(self, video_link):
if video_link is None:
return {}
video_id = ''
if 'youtube' in video_link:
params = re.split(r'[&|\?]', video_link)
for s in params:
if 'v=' in s:
video_id = s.split('v=')[1]
break
try:
if len(video_id) == 11:
return {
'provider': 'youtube',
'video_id': video_id
}
else:
raise serializers.ValidationError(
'Error parsing Youtube URL')
except Exception:
raise serializers.ValidationError('Error parsing Youtube URL')
elif 'vimeo' in video_link:
video_id = video_link.split('?')[0]
video_id = video_id.split('/')[-1]
try:
if len(video_id) >= 7 and int(video_id):
return {
'provider': 'vimeo',
'video_id': video_id
}
else:
raise serializers.ValidationError(
'Error parsing Vimeo URL')
except Exception:
raise serializers.ValidationError('Error parsing Vimeo URL')
else:
raise serializers.ValidationError(
'This is neither YouTube nor Vimeo')
def get_ordering(self, obj):
try:
return obj.ordering
except Exception:
return None
def create(self, validated_data):
# Overriding the create method to handle file processing
self.validated_data.update(self.get_presave_create_dictionary())
attribution = validated_data.get('attribution') \
or validated_data.get('owner')
self.validated_data.update({'attribution': attribution})
self.validated_data.update(self.get_video_provider_and_id(
validated_data.get('video_link')
))
self.instance = self.Meta.model.objects.create(**self.validated_data)
return self.instance
class Meta:
model = models.Video
read_only_fields = ('video_id', 'video_provider')
fields = BaseSerializer.field_list + \
NamedSerializerMixin.field_list + \
GeometrySerializerMixin.field_list + \
ProjectSerializerMixin.field_list + (
'video_link', 'video_id', 'video_provider', 'attribution',
'ordering'
)
class VideoUpdateSerializer(VideoSerializer):
video_link = serializers.CharField(required=False)
def update(self, instance, validated_data):
# Recalculate the video provider and id if provided
validated_data.update(self.get_presave_update_dictionary())
validated_data.update(self.get_video_provider_and_id(
validated_data.get('video_link')
))
return super(VideoUpdateSerializer, self).update(
instance, validated_data)
class Meta:
model = models.Video
read_only_fields = ('video_id', 'video_provider')
fields = BaseSerializer.field_list + \
NamedSerializerMixin.field_list + \
ProjectSerializerMixin.field_list + (
'video_link', 'video_id', 'video_provider', 'geometry',
'attribution'
)
| 37.089286
| 78
| 0.586904
| 3,840
| 0.92441
| 0
| 0
| 0
| 0
| 0
| 0
| 616
| 0.148291
|
6291f51816dba816b4da164bad84c6226c1b1dd7
| 957
|
py
|
Python
|
learning/async/test_async.py
|
Nephrin/Tut
|
9454be28fd37c155d0b4e97876196f8d33ccf8e5
|
[
"Apache-2.0"
] | 2
|
2019-06-23T07:17:30.000Z
|
2019-07-06T15:15:42.000Z
|
learning/async/test_async.py
|
Nephrin/Tut
|
9454be28fd37c155d0b4e97876196f8d33ccf8e5
|
[
"Apache-2.0"
] | null | null | null |
learning/async/test_async.py
|
Nephrin/Tut
|
9454be28fd37c155d0b4e97876196f8d33ccf8e5
|
[
"Apache-2.0"
] | 1
|
2019-06-23T07:17:43.000Z
|
2019-06-23T07:17:43.000Z
|
# Testing out some stuff with async and await
async def hello(name):
print ("hello" + name)
return "hello" + name
# We can use the await statement in coroutines to call
# coroutines as normal functions; i.e. what it implies is that
# if we runt return await func(*args) in a courtoune
# is like running return func2(*args) in a normal def function
async def await_hello(func, *args):
return await func(*args)
def run(coro, *args):
try:
# We need to create the coroutine object before we start using it.
#
g = coro(*args)
# In order to run the coroutine from a normal function, we need to
# send it a value, in this case None.
g.send(None)
# However, the coroutine will run til it returns a StopIteration
# which we need to catch, and then catch the value of that
# exception, so as to find out what it has produced
except StopIteration as e:
return e.value
print(run(await_hello, hello, "wut"))
| 30.870968
| 70
| 0.695925
| 0
| 0
| 0
| 0
| 0
| 0
| 133
| 0.138976
| 653
| 0.682341
|
654e2a43d7785c171e931fd74c28957f5d42803a
| 465
|
py
|
Python
|
hmm_data.py
|
vvhitedog/pystan-hmm
|
48bcb3d94eff09b4002ffa4b1fa70740b4ee35e3
|
[
"BSD-2-Clause"
] | null | null | null |
hmm_data.py
|
vvhitedog/pystan-hmm
|
48bcb3d94eff09b4002ffa4b1fa70740b4ee35e3
|
[
"BSD-2-Clause"
] | null | null | null |
hmm_data.py
|
vvhitedog/pystan-hmm
|
48bcb3d94eff09b4002ffa4b1fa70740b4ee35e3
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/python
import numpy as np
from matplotlib import pyplot as plt
theta = np.asarray([[.8,.2],[.1,.9]])
psi = np.asarray([3.,9.])
N = 100
K = 2
# sample z
z = np.empty(N,dtype='int')
z[0] = 1
for i in range(1,N):
z[i] = np.random.choice(np.arange(K),size=1,replace=True,p=theta[z[i-1]])
y = np.random.randn(N) + psi[z]
plt.figure()
plt.subplot(2,1,1)
plt.plot(z)
plt.subplot(2,1,2)
plt.plot(y)
plt.show()
np.savez_compressed('hmm_data',y=y,z=z)
| 16.034483
| 77
| 0.630108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 42
| 0.090323
|
654e6ec194bd32832a9d3c23d9b1b14efdd455a2
| 19,721
|
py
|
Python
|
s2e_env/commands/image_build.py
|
michaelbrownuc/s2e-env
|
4bd6a45bf1ec9456ed5acf5047b6aac3fcd19683
|
[
"BSD-3-Clause"
] | null | null | null |
s2e_env/commands/image_build.py
|
michaelbrownuc/s2e-env
|
4bd6a45bf1ec9456ed5acf5047b6aac3fcd19683
|
[
"BSD-3-Clause"
] | null | null | null |
s2e_env/commands/image_build.py
|
michaelbrownuc/s2e-env
|
4bd6a45bf1ec9456ed5acf5047b6aac3fcd19683
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright (c) 2017 Cyberhaven
Copyright (c) 2017 Dependable Systems Laboratory, EPFL
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import glob
import grp
import logging
import os
import pwd
import re
import socket
import time
from threading import Thread
import psutil
from psutil import NoSuchProcess
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
import sh
from sh import ErrorReturnCode
from s2e_env import CONSTANTS
from s2e_env.command import EnvCommand, CommandError
from s2e_env.utils import repos
from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, \
translate_image_name
logger = logging.getLogger('image_build')
def _get_user_groups(user_name):
"""
Get a list of groups for the user ``user_name``.
"""
groups = [g.gr_name for g in grp.getgrall() if user_name in g.gr_mem]
gid = pwd.getpwnam(user_name).pw_gid
groups.append(grp.getgrgid(gid).gr_name)
return groups
def _get_user_name():
"""
Get the current user.
"""
return pwd.getpwuid(os.getuid())[0]
def _user_belongs_to(group_name):
"""
Check that the current user belongs to the ``group_name`` group.
"""
user_name = _get_user_name()
groups = _get_user_groups(user_name)
return group_name in groups
def _raise_group_error(group_name):
raise CommandError(f'You must belong to the {group_name} group in order to build '
'images. Please run the following command, then logout '
'and login:\n\n'
f'\tsudo usermod -a -G {group_name} $(whoami)')
def _check_groups_docker():
"""
Check that the current user belongs to the required groups to both run S2E and build S2E images.
"""
if not _user_belongs_to('docker'):
_raise_group_error('docker')
def _check_groups_kvm():
"""Being member of KVM is required only when using KVM to build images"""
if not _user_belongs_to('libvirtd') and not _user_belongs_to('kvm'):
_raise_group_error('kvm')
def _check_virtualbox():
"""
Check if VirtualBox is running. VirtualBox conflicts with S2E's requirement for KVM, so VirtualBox must
*not* be running together with S2E.
"""
# Adapted from https://github.com/giampaolo/psutil/issues/132#issuecomment-44017679
# to avoid race conditions
for proc in psutil.process_iter():
try:
if proc.name() == 'VBoxHeadless':
raise CommandError('S2E uses KVM to build images. VirtualBox '
'is currently running, which is not '
'compatible with KVM. Please close all '
'VirtualBox VMs and try again.')
except NoSuchProcess:
pass
def _check_vmware():
"""
Check if VMWare is running. VMware conflicts with S2E's requirement for KVM, so VMWare must
*not* be running together with S2E.
"""
for proc in psutil.process_iter():
try:
if proc.name() == 'vmware-vmx':
raise CommandError('S2E uses KVM to build images. VMware '
'is currently running, which is not '
'compatible with KVM. Please close all '
'VMware VMs and try again.')
except NoSuchProcess:
pass
def _check_kvm():
"""
Check that the KVM interface exists. This is required by libs2e to communicate with QEMU.
"""
if not os.path.exists(os.path.join(os.sep, 'dev', 'kvm')):
raise CommandError('KVM interface not found - check that /dev/kvm '
'exists. Alternatively, you can disable KVM (-n '
'option) or download pre-built images (-d option)')
def _check_vmlinux():
"""
Check that /boot/vmlinux* files are readable. This is important for guestfish.
"""
try:
for f in glob.glob(os.path.join(os.sep, 'boot', 'vmlinu*')):
with open(f, 'rb'):
pass
except IOError:
raise CommandError('Make sure that the kernels in /boot are readable. '
'This is required for guestfish. Please run the '
'following command:\n\n'
'sudo chmod ugo+r /boot/vmlinu*') from None
# pylint: disable=no-member
def _check_cow(image_dir):
"""
Check that the file system that stores guest images supports copy-on-write.
"""
try:
src = f'{image_dir}/.cowcheck'
dst = f'{image_dir}/.cowcheck1'
sh.touch(src)
sh.cp('--reflink=always', src, dst)
return True
except Exception:
warn_msg = f"""
Copy-on-write check failed.
The file system where images are stored ({image_dir}) does not support copy-on-write.
It is recommended to use an XFS or BTRFS file system with copy-on-write enabled as a storage
location for S2E images, as this can save up to 60% of disk space. The building process checkpoints
intermediate build steps with cp --reflink=auto to make use of copy-on-write if it is available.
How to upgrade:
1. Create an XFS or BTRFS partition large enough to store the images that you need (~300 GB for all images).
Make sure you use reflink=1 to enable copy-on-write when running mkfs.xfs.
2. Create a directory for guest images on that partition (e.g., /mnt/disk1/images)
3. Delete the "images" folder in your S2E environment
4. Create in your S2E environment a symbolic link called "images" to the directory you created in step 2
"""
logger.warning(re.sub(r'^ {8}', '', warn_msg, flags=re.MULTILINE))
return False
finally:
sh.rm('-f', src)
sh.rm('-f', dst)
def _raise_invalid_image(image_name):
raise CommandError(f'Invalid image name: {image_name}. Run ``s2e image_build`` '
'to list available images')
def _get_base_image_and_app(image_name):
x = image_name.split('/')
if len(x) == 1:
return x[0], None
if len(x) == 2:
return x
raise CommandError(f'Invalid image name {image_name}')
def _has_app_image(image_names):
for name in image_names:
if '/' in name:
return True
return False
def _check_product_keys(image_descriptors, image_names):
missing_keys = []
for image_name in image_names:
image = image_descriptors[image_name]
if 'product_key' in image:
if not image['product_key']:
missing_keys.append(image_name)
ios = image_descriptors[image_name].get('os', {})
if 'product_key' in ios:
if not ios['product_key']:
missing_keys.append(image_name)
if missing_keys:
logger.error('The following images require a product key:')
for image in missing_keys:
logger.error(' * %s', image)
raise CommandError('Please update images.json and/or apps.json.')
def _check_iso(templates, app_templates, iso_dir, image_names):
for image_name in image_names:
base_image, app_name = _get_base_image_and_app(image_name)
descriptors = [templates[base_image]]
if app_name:
descriptors.append(app_templates[app_name])
for desc in descriptors:
iso = desc.get('iso', {})
if iso.get('url', ''):
continue
name = iso.get('name', '')
if not name:
continue
if not iso_dir:
raise CommandError(
'Please use the --iso-dir option to specify the path '
f'to a folder that contains {name}'
)
path = os.path.join(iso_dir, name)
if not os.path.exists(path):
raise CommandError(f'The image {image_name} requires {path}, which could not be found')
def _is_port_available(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(("127.0.0.1", port))
return True
except socket.error:
return False
finally:
s.close()
def _start_ftp_server(image_path, port):
authorizer = DummyAuthorizer()
authorizer.add_anonymous(image_path, perm='elradfmwMT')
handler = FTPHandler
handler.authorizer = authorizer
handler.masquerade_address = '10.0.2.2'
# QEMU slirp won't let the guest reconnect if timeout happens, so we disable it
handler.timeout = None
server = FTPServer(("127.0.0.1", port), handler)
thread = Thread(target=_run_ftp_server, args=[server])
thread.daemon = True
thread.start()
time.sleep(1)
return server
def _run_ftp_server(server):
try:
server.serve_forever()
finally:
logger.info('FTP server terminated')
server.close_all()
def _get_archive_rules(image_path, rule_names):
if _has_app_image(rule_names):
raise CommandError('Building archives of app images is not supported yet')
archive_rules = []
for r in rule_names:
archive_rules.append(os.path.join(image_path, f'{r}.tar.xz'))
logger.info('The following archives will be built:')
for a in archive_rules:
logger.info(' * %s', a)
return archive_rules
def _download_images(image_path, image_names, templates):
if _has_app_image(image_names):
raise CommandError('Downloading of app images is not supported yet')
image_downloader = ImageDownloader(templates)
image_downloader.download_images(image_names, image_path)
logger.info('Successfully downloaded images: %s', ', '.join(image_names))
class Command(EnvCommand):
"""
Builds an image.
"""
help = 'Build an image.'
def __init__(self):
super().__init__()
self._headless = True
self._use_kvm = True
self._num_cores = 1
self._has_cow = False
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument('name',
help='The name of the image to build. If empty,'
' shows available images', nargs='*')
parser.add_argument('-g', '--gui', action='store_true',
help='Display QEMU GUI during image build')
parser.add_argument('-c', '--cores', required=False, default=2,
type=int,
help='The number of cores used when building the '
'VM image. Defaults to 2')
parser.add_argument('-x', '--clean', action='store_true',
help='Deletes all images and rebuild them from '
'scratch')
parser.add_argument('-a', '--archive', action='store_true',
help='Creates an archive for the specified image')
parser.add_argument('-p', '--ftp-port', required=False, default=15468, type=int,
help='Port for the internal FTP server to receive files from guest VMs during build')
parser.add_argument('-d', '--download', action='store_true',
help='Download image from the repository instead '
'of building it')
parser.add_argument('-i', '--iso-dir',
help='Path to folder that stores ISO files of Windows images')
parser.add_argument('-n', '--no-kvm', action='store_true',
help='Disable KVM during image build')
def handle(self, *args, **options):
# If DISPLAY is missing, don't use headless mode
if options['gui']:
self._headless = False
# If KVM has been explicitly disabled, don't use it during the build
if options['no_kvm']:
self._use_kvm = False
self._num_cores = options['cores']
# The path could have been deleted by a previous clean
if not os.path.exists(self.image_path()):
os.makedirs(self.image_path())
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
if options['clean']:
self._invoke_make(img_build_dir, ['clean'])
return
image_names = options['name']
templates = get_image_templates(img_build_dir)
app_templates = get_app_templates(img_build_dir)
images, image_groups, image_descriptors = get_all_images(templates, app_templates)
if not image_names:
self._print_image_list(images, image_groups, image_descriptors)
print('\nRun ``s2e image_build <name>`` to build an image. '
'Note that you must run ``s2e build`` **before** building '
'an image')
return
image_names = translate_image_name(images, image_groups, image_names)
logger.info('The following images will be built:')
for image in image_names:
logger.info(' * %s', image)
if options['download']:
_download_images(self.image_path(), image_names, templates)
return
rule_names = image_names
if options['archive']:
rule_names = _get_archive_rules(self.image_path(), image_names)
iso_dir = os.path.abspath(options['iso_dir']) if options['iso_dir'] else None
# Check for optional product keys and iso directories.
# These may or may not be required, depending on the set of images.
_check_product_keys(image_descriptors, image_names)
_check_iso(templates, app_templates, iso_dir, image_names)
if self._use_kvm:
_check_kvm()
_check_groups_kvm()
_check_groups_docker()
_check_vmlinux()
self._has_cow = _check_cow(self.image_path())
if self._use_kvm:
_check_virtualbox()
_check_vmware()
if not _is_port_available(options['ftp_port']):
raise CommandError(f'localhost:{options["ftp_port"]} is not available. Check that the port is free or '
'specify a port with --ftp-port')
# Clone kernel if needed.
# This is necessary if the s2e env has been initialized with -b flag.
self._clone_kernel()
server = _start_ftp_server(self.image_path(), options['ftp_port'])
self._invoke_make(img_build_dir, rule_names, options['ftp_port'], iso_dir)
logger.success('Built image(s) \'%s\'', ' '.join(image_names))
server.close_all()
def _invoke_make(self, img_build_dir, rule_names, ftp_port=0, iso_dir=''):
env = os.environ.copy()
env['S2E_INSTALL_ROOT'] = self.install_path()
env['S2E_LINUX_KERNELS_ROOT'] = \
self.source_path(CONSTANTS['repos']['images']['linux'])
env['OUTDIR'] = self.image_path()
env['QEMU_FTP_PORT'] = str(ftp_port)
env['ISODIR'] = iso_dir if iso_dir else ''
env['DEBUG_INTERMEDIATE_RULES'] = '1' if self._has_cow else '0'
logger.debug('Invoking makefile with:')
logger.debug('export S2E_INSTALL_ROOT=%s', env['S2E_INSTALL_ROOT'])
logger.debug('export S2E_LINUX_KERNELS_ROOT=%s', env['S2E_LINUX_KERNELS_ROOT'])
logger.debug('export OUTDIR=%s', env['OUTDIR'])
logger.debug('export ISODIR=%s', env.get('ISODIR', ''))
logger.debug('export DEBUG_INTERMEDIATE_RULES=%s', env.get('DEBUG_INTERMEDIATE_RULES', ''))
if self._headless:
logger.warning('Image creation will run in headless mode. '
'Use --gui to see graphic output for debugging')
else:
env['GRAPHICS'] = ''
if not self._use_kvm:
env['QEMU_KVM'] = ''
logger.warning('Image build without KVM. This will be slow')
try:
make = sh.Command('make').bake(file=os.path.join(img_build_dir,
'Makefile'),
directory=self.image_path(),
_env=env, _fg=True)
make_image = make.bake(j=self._num_cores, r=True, warn_undefined_variables=True)
make_image(sorted(rule_names))
except ErrorReturnCode as e:
raise CommandError(e) from e
def _clone_kernel(self):
kernels_root = self.source_path(CONSTANTS['repos']['images']['linux'])
if os.path.exists(kernels_root):
logger.info('Kernel repository already exists in %s', kernels_root)
return
logger.info('Cloning kernels repository to %s', kernels_root)
kernels_repo = CONSTANTS['repos']['images']['linux']
repos.git_clone_to_source(self.env_path(), kernels_repo)
def _print_image_list(self, images, image_groups, image_descriptors):
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
templates = get_image_templates(img_build_dir)
if not templates:
images_json_path = os.path.join(img_build_dir, 'images.json')
raise CommandError('No images available to build. Make sure that '
f'{images_json_path} exists and is valid')
def get_max_len(lst):
ret = 0
for item in lst:
if len(item) > ret:
ret = len(item)
return ret
print('Available image groups:')
max_group_len = get_max_len(image_groups)
for group in image_groups:
print(f' * {group:{max_group_len}} - Build {group} images')
print('\nAvailable images:')
max_image_len = get_max_len(images)
for image in sorted(images):
print(f' * {image:{max_image_len}} - {image_descriptors[image]["name"]}')
def _print_apps_list(self):
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
app_templates = get_app_templates(img_build_dir)
if not app_templates:
apps_json_path = os.path.join(img_build_dir, 'apps.json')
raise CommandError('No apps available to build. Make sure that '
f'{apps_json_path} exists and is valid')
print('Available applications:')
for app_template, desc in sorted(app_templates.items()):
for base_image in desc['base_images']:
print(f' * {base_image}/{app_template} - {desc["name"]}')
| 36.3186
| 120
| 0.617565
| 8,841
| 0.448304
| 0
| 0
| 0
| 0
| 0
| 0
| 7,675
| 0.389179
|
654f10479c322ddf45d78b5c826a8ece925a9fa9
| 11,758
|
py
|
Python
|
chart_analysis.py
|
DeltaEpsilon7787/SimfileLibrary
|
af9f0949c21ad69465c8444161e069148bec574d
|
[
"MIT"
] | null | null | null |
chart_analysis.py
|
DeltaEpsilon7787/SimfileLibrary
|
af9f0949c21ad69465c8444161e069148bec574d
|
[
"MIT"
] | null | null | null |
chart_analysis.py
|
DeltaEpsilon7787/SimfileLibrary
|
af9f0949c21ad69465c8444161e069148bec574d
|
[
"MIT"
] | null | null | null |
import collections
from functools import lru_cache
from itertools import groupby, permutations
from operator import attrgetter
from typing import Counter, FrozenSet, Generic, List, Tuple, TypeVar, Union, cast
from attr import attrs, evolve
from .basic_types import Beat, CheaperFraction, NoteObject, Time, make_ordered_set
from .complex_types import MeasureBPMPair, MeasureMeasurePair
from .rows import DECORATIVE_SET, GlobalDeltaRow, GlobalRow, GlobalTimedRow, HasPosition, HasRow, HasTime, \
LONG_NOTE_SET, PureRow, RowFlags
# PureNotefield - PureRow --> HasRow
# UntimedNotefield - GlobalRow --> HasRow, HasPosition
# TimedNotefield - GlobalTimedRow --> HasRow, HasPosition, HasTime
# DeltaNotefield - GlobalDeltaRow --> HasRow, HasPosition, HasTime, GlobalDeltaRow
# MetaNotefield - MetaRow
# SequentialNotefield - RowSequence
# T = TypeVar('T', bound=Union[HasRow, HasPosition, HasTime])
T = TypeVar('T', HasRow, HasPosition, HasTime)
@attrs(auto_attribs=True)
class MetaRow(Generic[T]):
"""Final evolutionary stage of rows, with attached metadata."""
_row: T
_kind: RowFlags
@property
def row(self) -> T:
return self._row
@property
def kind(self) -> 'RowFlags':
return self._kind
@classmethod
def from_row(cls, row):
return cls(row, RowFlags.classify_row(row))
class AbstractNotefield(Generic[T], List[T]):
@property
def alphabet_size(self) -> int:
return len(self.unique_elements)
@property
def hashed_flat(self) -> 'AbstractNotefield[int]':
return AbstractNotefield(
hash(obj)
for obj in self
)
@property
def unique_elements(self):
return frozenset(self)
@property
def occurrence_counter(self) -> Counter[T]:
return collections.Counter(self)
class PureNotefield(Generic[T], AbstractNotefield[Union[T, PureRow]], List[PureRow]):
@property
def hold_roll_bodies_distinct(self) -> 'PureNotefield[HasRow]':
"""This inserts HOLD_BODY and ROLL_BODY between hold/roll starts and ends respectively.
Ends are preserved.
It's guaranteed that f(c) == f(f(c)) == f(f(f(c)) ..."""
new_note_field = []
active_holds = set()
active_rolls = set()
is_pure = self.__class__ == PureNotefield
for row in self:
active_holds -= row.row.find_object_lanes(NoteObject.HOLD_ROLL_END)
active_rolls -= row.row.find_object_lanes(NoteObject.HOLD_ROLL_END)
new_pure_row = [
(lane in active_holds and NoteObject.HOLD_BODY) or
(lane in active_rolls and NoteObject.ROLL_BODY) or
obj
for lane, obj in enumerate(row.row)
]
if is_pure:
new_note_field.append(PureRow(new_pure_row))
else:
new_note_field.append(
evolve(cast(T, row), row=PureRow(new_pure_row))
)
active_holds |= row.row.find_object_lanes(NoteObject.HOLD_START)
active_rolls |= row.row.find_object_lanes(NoteObject.ROLL_START)
return self.__class__(new_note_field)
@property
def ignore_empty_rows(self) -> 'PureNotefield[T]':
return self.__class__(
row
for row in self
if not row.row.is_empty
)
@property
def no_decorative_elements(self) -> 'PureNotefield[T]':
return self.__class__(
row.row.replace_objects(DECORATIVE_SET, NoteObject.EMPTY_LANE)
for row in self
)
@property
def ignore_pure_hold_roll_body_rows(self) -> 'PureNotefield[T]':
return self.__class__(
row
for row in self
if not row.is_pure_hold_roll_body
)
@property
def normalized(self) -> 'PureNotefield[T]':
return self.hold_roll_bodies_distinct.no_decorative_elements.ignore_empty_rows.ignore_pure_hold_roll_body_rows
@property
def permutative_notefield(self) -> 'AbstractNotefield[FrozenSet[T]]':
return AbstractNotefield(
row.permutative_set
for row in self
)
class UntimedNotefield(Generic[T], PureNotefield[GlobalRow], List[GlobalRow]):
def calculate_timings(self,
bpm_segments: List[MeasureBPMPair],
stop_segments: List[MeasureMeasurePair],
offset: Time) -> 'TimedNotefield':
bpm_segments = collections.deque(sorted(bpm_segments, key=attrgetter('measure')))
stop_segments = collections.deque(sorted(stop_segments, key=attrgetter('measure')))
note_field_deque = collections.deque(sorted(self, key=attrgetter('pos')))
elapsed_time = 0
last_measure = 0
last_bpm = bpm_segments.popleft()
next_stop = stop_segments.popleft() if stop_segments else None
new_note_field = []
while note_field_deque:
last_object = note_field_deque.popleft()
delta_measure = last_object.pos - last_measure
delta_time = 0
while True:
next_bpm = bpm_segments[0] if bpm_segments else None
if next_bpm and next_bpm.measure < last_object.pos:
delta_timing = next_bpm.measure - last_measure
delta_time += last_bpm.bpm.measures_per_second * delta_timing
delta_measure -= delta_timing
last_bpm = bpm_segments.popleft()
last_measure = last_bpm.measure
else:
break
delta_time += last_bpm.bpm.measures_per_second * delta_measure
while True:
if next_stop and next_stop.measure < last_measure + delta_measure:
delta_time += CheaperFraction(next_stop.value, last_bpm.bpm.measures_per_second)
next_stop = stop_segments.popleft() if stop_segments else None
else:
break
elapsed_time += delta_time
last_measure += delta_measure
new_note_field.append(
last_object.evolve(Time(elapsed_time - offset))
)
return TimedNotefield(new_note_field)
@property
def position_invariant(self) -> 'UntimedNotefield[T]':
return self.__class__(
obj.position_invariant
for obj in self
)
@property
def no_decorative_elements(self) -> 'UntimedNotefield[T]':
return self.__class__(
evolve(row, row=row.row.replace_objects({NoteObject.MINE, NoteObject.FAKE},
NoteObject.EMPTY_LANE))
for row in self
)
def row_sequence_by_beats(self, beat_window=1) -> 'SequentialNotefield[RowSequence[T, ...]]':
def group(row):
return int(row.pos / Beat(beat_window).as_measure)
result = [
RowSequence(
obj.localize(Beat(beat_window).as_measure)
for obj in group
)
for _, group in groupby(self, group)
]
return SequentialNotefield(result)
class TimedNotefield(Generic[T], UntimedNotefield[GlobalTimedRow], List[GlobalTimedRow]):
@property
def time_invariant(self):
return self.__class__(
obj.time_invariant
for obj in self
)
@property
def discrete_time(self) -> 'TimedNotefield':
return self.__class__(
evolve(row, time=row.time.limited_precision)
for row in self
)
@property
def miniholds_minirolls_as_taps(self):
hold_regrab_window = Time(250, 1000)
roll_tap_window = Time(500, 1000)
hold_coords = []
roll_coords = []
for index, row in enumerate(self):
hold_starts = self[index].find_object_lanes(NoteObject.HOLD_START)
roll_starts = self[index].find_object_lanes(NoteObject.ROLL_START)
if hold_starts or roll_starts:
for sub_index, sub_row in enumerate(self[index:], start=index):
ends = self[sub_index].find_object_lanes(NoteObject.HOLD_ROLL_END)
ended_holds = hold_starts & ends
ended_rolls = roll_starts & ends
hold_coords.extend((range(index, sub_index + 1), lane) for lane in ended_holds)
roll_coords.extend((range(index, sub_index + 1), lane) for lane in ended_rolls)
hold_starts -= ended_holds
roll_starts -= ended_rolls
if not (hold_starts | roll_starts):
break
hold_coords = [
pair
for pair in hold_coords
if self[pair[0].stop - 1].time - self[pair[0].start].time > hold_regrab_window
]
roll_coords = [
pair
for pair in hold_coords
if self[pair[0].stop - 1].time - self[pair[0].start].time > roll_tap_window
]
combined_coords = hold_coords + roll_coords
def new_object(obj, self_index, lane):
if obj not in LONG_NOTE_SET:
return obj
is_safe = any(self_index in long_note_range and lane == long_note_lane
for long_note_range, long_note_lane in combined_coords)
if is_safe:
return obj
return obj == NoteObject.HOLD_START and NoteObject.TAP_OBJECT or NoteObject.EMPTY_LANE
return self.__class__(
evolve(row, row=PureRow(new_object(obj, index, lane)
for lane, obj in enumerate(row.row)))
for index, row in enumerate(self)
)
@property
def delta_field(self) -> 'DeltaNotefield':
delta_rows = [a.evolve(b) for a, b in zip(self[:-1:], self[1::])]
delta_rows.append(self[-1].evolve(self[-1]))
return DeltaNotefield(delta_rows)
class DeltaNotefield(Generic[T], TimedNotefield[GlobalDeltaRow], List[GlobalDeltaRow]):
@property
def delta_invariant(self):
return self.__class__(
obj.delta_invariant
for obj in self
)
@property
def pure_delta(self):
return self.position_invariant.time_invariant
@lru_cache(10)
def generate_permutative_maps(lanes=4):
return [
{
index: permutation[index]
for index in range(lanes)
}
for permutation in permutations(range(lanes))
]
class RowSequence(Tuple[T, ...], tuple, Generic[T]):
__new__ = tuple.__new__
@property
def permutative_group(self):
lanes = len(self[0].row)
maps = generate_permutative_maps(lanes)
return frozenset(make_ordered_set(
tuple(
obj.switch_lanes(lane_map)
for obj in self
)
for lane_map in maps
))
@property
def is_empty_sequence(self):
return all(
row.is_empty
for row in self
)
class SequentialNotefield(Generic[T], AbstractNotefield[RowSequence[T]], List[RowSequence[T]]):
def broadcast(self, function):
return self.__class__(
tuple(
function(obj)
for obj in seq
)
for seq in self
)
@property
def permutative_field(self):
return self.__class__(
seq.permutative_group
for seq in self
)
class MetaNotefield(Generic[T], AbstractNotefield[MetaRow], List[MetaRow]):
pass
# from simfile_parser import AugmentedChart
class BatchOperations(object):
pass
| 32.213699
| 118
| 0.608352
| 10,484
| 0.891648
| 0
| 0
| 7,266
| 0.617962
| 0
| 0
| 966
| 0.082157
|
654fb2a6de595344cf7603c4bae18a08ffdc2465
| 2,271
|
py
|
Python
|
websocket_chat_server.py
|
ringolol/ru-gpts
|
991ef3fb4dbe46f7512ec958704c6fa9998049ab
|
[
"Apache-2.0"
] | null | null | null |
websocket_chat_server.py
|
ringolol/ru-gpts
|
991ef3fb4dbe46f7512ec958704c6fa9998049ab
|
[
"Apache-2.0"
] | null | null | null |
websocket_chat_server.py
|
ringolol/ru-gpts
|
991ef3fb4dbe46f7512ec958704c6fa9998049ab
|
[
"Apache-2.0"
] | null | null | null |
# WS server chat
import asyncio
import websockets
import torch
from transformers import AutoTokenizer, AutoModel, AutoModelWithLMHead
model_name = "sberbank-ai/rugpt2large" # "sberbank-ai/rugpt3large_based_on_gpt2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelWithLMHead.from_pretrained(model_name)
model.to("cuda")
# string_init = '''- Привет. - сказал он.\n- Привет. - сказал я.\n'''
string_init = ''
string = string_init
async def chat(websocket, path):
global string
while True:
msg = await websocket.recv()
if msg == 'stop':
await websocket.send('stopped!')
asyncio.get_event_loop().stop()
print(string)
return
elif msg == 'clear':
await websocket.send('cleared!')
string = string_init
return
elif msg == 'history':
await websocket.send(f'history: \n{string}')
return
print(f'They: {msg}')
string += f'- {msg} - сказал он.\n-'
encoded_prompt = tokenizer.encode(string, add_special_tokens=False, return_tensors="pt")
encoded_prompt = encoded_prompt.to("cuda")
output_sequences = model.generate(
input_ids=encoded_prompt,
max_length=len(encoded_prompt[0]) + 50,
temperature=1.,
top_k=10,
top_p=0.95,
repetition_penalty=1.,
do_sample=True,
num_return_sequences=1,
pad_token_id=50256,
)
if len(output_sequences.shape) > 2:
output_sequences.squeeze_()
generated_sequence = output_sequences[0].tolist()
text = tokenizer.decode(generated_sequence, clean_up_tokenization_spaces=True)
answer = text[len(string):].split('\n')[0]
stripped_answer = ''.join([sec for inx, sec in enumerate(answer.split(' - ')) if not inx % 2]).strip()
string += f'{answer}\n'
print(f'Me: {stripped_answer}')
await websocket.send(stripped_answer)
start_server = websockets.serve(chat, "192.168.1.3", 8765, ping_timeout=None, close_timeout=None)
asyncio.get_event_loop().run_until_complete(start_server)
print('waiting for message...')
asyncio.get_event_loop().run_forever()
| 30.689189
| 110
| 0.632321
| 0
| 0
| 0
| 0
| 0
| 0
| 1,592
| 0.690373
| 393
| 0.170425
|
6551a8bf653eda6b0c4ad77b9367ce0b7593d0e5
| 1,595
|
py
|
Python
|
mocket/utils.py
|
hectorcanto/python-mocket
|
76b6e26f51366a8f3494f3a767fb681af4e39ad8
|
[
"BSD-3-Clause"
] | null | null | null |
mocket/utils.py
|
hectorcanto/python-mocket
|
76b6e26f51366a8f3494f3a767fb681af4e39ad8
|
[
"BSD-3-Clause"
] | null | null | null |
mocket/utils.py
|
hectorcanto/python-mocket
|
76b6e26f51366a8f3494f3a767fb681af4e39ad8
|
[
"BSD-3-Clause"
] | null | null | null |
import binascii
import io
import os
import ssl
from .compat import decode_from_bytes, encode_to_bytes
SSL_PROTOCOL = ssl.PROTOCOL_SSLv23
class MocketSocketCore(io.BytesIO):
def write(self, content):
super(MocketSocketCore, self).write(content)
from mocket import Mocket
if Mocket.r_fd and Mocket.w_fd:
os.write(Mocket.w_fd, content)
def wrap_ssl_socket(
cls,
sock,
context,
keyfile=None,
certfile=None,
server_side=False,
cert_reqs=ssl.CERT_NONE,
ssl_version=SSL_PROTOCOL,
ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
ciphers=None,
):
return cls(
sock=sock,
keyfile=keyfile,
certfile=certfile,
server_side=server_side,
cert_reqs=cert_reqs,
ssl_version=ssl_version,
ca_certs=ca_certs,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
ciphers=ciphers,
_context=context,
)
def hexdump(binary_string):
r"""
>>> hexdump(b"bar foobar foo") == decode_from_bytes(encode_to_bytes("62 61 72 20 66 6F 6F 62 61 72 20 66 6F 6F"))
True
"""
bs = decode_from_bytes(binascii.hexlify(binary_string).upper())
return " ".join(a + b for a, b in zip(bs[::2], bs[1::2]))
def hexload(string):
r"""
>>> hexload("62 61 72 20 66 6F 6F 62 61 72 20 66 6F 6F") == encode_to_bytes("bar foobar foo")
True
"""
string_no_spaces = "".join(string.split())
return encode_to_bytes(binascii.unhexlify(string_no_spaces))
| 24.166667
| 117
| 0.664577
| 237
| 0.148589
| 0
| 0
| 0
| 0
| 0
| 0
| 263
| 0.16489
|
6551e1e19c5f8e4bf2d8b7c5b7b2115585ea7098
| 121
|
py
|
Python
|
exercicios_fixacao/lista04/repet_lim_ex6.py
|
PauloVictorSS/unicamp-mc102
|
077ca3ea6d3df40ebe205c2e874d20a934ea5541
|
[
"MIT"
] | null | null | null |
exercicios_fixacao/lista04/repet_lim_ex6.py
|
PauloVictorSS/unicamp-mc102
|
077ca3ea6d3df40ebe205c2e874d20a934ea5541
|
[
"MIT"
] | null | null | null |
exercicios_fixacao/lista04/repet_lim_ex6.py
|
PauloVictorSS/unicamp-mc102
|
077ca3ea6d3df40ebe205c2e874d20a934ea5541
|
[
"MIT"
] | null | null | null |
number = int(input())
raiz = number/2
for i in range(1,20):
raiz = ((raiz ** 2) + number) / (2 * raiz)
print(raiz)
| 15.125
| 46
| 0.561983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
65532dd4c64895eea8a26b3bd064fbb56add0988
| 1,726
|
py
|
Python
|
load.py
|
leonjovanovic/drl-ppo-bipedal-walker
|
5005bb396b409079fbb2065a2ecd5c956ba86630
|
[
"MIT"
] | 1
|
2022-01-21T23:19:20.000Z
|
2022-01-21T23:19:20.000Z
|
load.py
|
leonjovanovic/drl-ppo-bipedal-walker
|
5005bb396b409079fbb2065a2ecd5c956ba86630
|
[
"MIT"
] | null | null | null |
load.py
|
leonjovanovic/drl-ppo-bipedal-walker
|
5005bb396b409079fbb2065a2ecd5c956ba86630
|
[
"MIT"
] | null | null | null |
import gym
import numpy as np
import torch
import Config
import NN
import json
PATH_DATA = 'models/data23.15.9.40.json'
PATH_MODEL = 'models/model23.15.9.40.p'
with open(PATH_DATA, 'r') as f:
json_load = json.loads(f.read())
obs_rms_mean = np.asarray(json_load["obs_rms_mean"])
obs_rms_var = np.asarray(json_load["obs_rms_var"])
epsilon = json_load["eps"]
env = gym.make(Config.ENV_NAME)
if Config.ENV_SCALE_CROP:
env = gym.wrappers.RecordEpisodeStatistics(env)
env = gym.wrappers.ClipAction(env)
env = gym.wrappers.RecordVideo(env, "bestRecordings", name_prefix="rl-video" + PATH_MODEL[12:22], )
state = env.reset()
state = (state - obs_rms_mean) / np.sqrt(obs_rms_var + epsilon)
state = np.clip(state, -10, 10)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
policy_nn = NN.PolicyNN(input_shape=state.shape[0], output_shape=env.action_space.shape[0]).to(device)
policy_nn.load_state_dict(torch.load(PATH_MODEL))
print("Episodes done [", end="")
for n_episode in range(Config.NUMBER_OF_EPISODES):
print('.', end="")
env.start_video_recorder()
while True:
#env.render()
actions, _, _ = policy_nn(torch.tensor(state, dtype=torch.float, device=device))
new_state, reward, done, _ = env.step(actions.cpu().detach().numpy())
state = new_state
state = (state - obs_rms_mean) / np.sqrt(obs_rms_var + epsilon)
state = np.clip(state, -10, 10)
if done:
state = env.reset()
print(env.return_queue[n_episode])
break
env.close_video_recorder()
print("]")
print(env.return_queue)
print(" Mean 100 test reward: " + str(np.round(np.mean(env.return_queue), 2)))
| 35.958333
| 103
| 0.669757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 193
| 0.111819
|
6558e8275ddb990e4df9e50898094d28930069c7
| 1,111
|
py
|
Python
|
generate_sl4_1.8.py
|
qiantianpei/anonymizer
|
5a1cd276b6c9766b54db187dd4750a04f7559665
|
[
"Apache-2.0"
] | null | null | null |
generate_sl4_1.8.py
|
qiantianpei/anonymizer
|
5a1cd276b6c9766b54db187dd4750a04f7559665
|
[
"Apache-2.0"
] | null | null | null |
generate_sl4_1.8.py
|
qiantianpei/anonymizer
|
5a1cd276b6c9766b54db187dd4750a04f7559665
|
[
"Apache-2.0"
] | 2
|
2020-10-28T18:35:44.000Z
|
2020-11-10T07:37:49.000Z
|
import json
from PIL import Image
with open('/home/tianpei.qian/workspace/data_local/sl4_front_1.0/sl4_side_val_1.7.json') as f:
val_1_7 = json.load(f)
with open('sl4_side_val_1.7/results.json') as f:
new_1_8 = json.load(f)
ROOT = '/home/tianpei.qian/workspace/data_local/sl4_front_1.0/'
for old, new in zip(val_1_7, new_1_8):
assert old['file'] == new['file']
im = Image.open(ROOT + old['file'])
im_width, im_height = im.size
for box in new['detections']:
new_box = {}
x_min, x_max, y_min, y_max = box['x_min'], box['x_max'], box['y_min'], box['y_max']
width, height = x_max - x_min, y_max - y_min
new_box['coord'] = [(x_min + x_max) / 2 / im_width, (y_min + y_max) / 2 / im_height, width / im_width, height / im_height]
new_box['meta'] = {'isfrontcar': False}
new_box['class'] = box['kind']
new_box['occluded'] = 'none'
new_box['score'] = box['score']
old['boxes'].append(new_box)
with open('/home/tianpei.qian/workspace/data_local/sl4_front_1.0/sl4_side_val_1.8.json', 'w') as f:
json.dump(val_1_7, f)
| 39.678571
| 130
| 0.633663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 377
| 0.339334
|
65599b2db0af8388cda22867211e56c2902c85cb
| 3,815
|
py
|
Python
|
experiments/vitchyr/icml2017/watermaze_memory/generate_bellman_ablation_figure_data.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
experiments/vitchyr/icml2017/watermaze_memory/generate_bellman_ablation_figure_data.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
experiments/vitchyr/icml2017/watermaze_memory/generate_bellman_ablation_figure_data.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
"""
Generate data for ablation analysis for ICML 2017 workshop paper.
"""
import random
from torch.nn import functional as F
from railrl.envs.pygame.water_maze import (
WaterMazeMemory,
)
from railrl.exploration_strategies.ou_strategy import OUStrategy
from railrl.launchers.launcher_util import (
run_experiment,
)
from railrl.launchers.memory_bptt_launchers import bptt_ddpg_launcher
from railrl.pythonplusplus import identity
from railrl.memory_states.qfunctions import MemoryQFunction
from railrl.torch.rnn import GRUCell
if __name__ == '__main__':
n_seeds = 1
mode = "here"
exp_prefix = "dev-generate-bellman-ablation-figure-data"
run_mode = 'none'
n_seeds = 5
mode = "ec2"
exp_prefix = "generate-bellman_ablation-figure-data"
use_gpu = True
if mode != "here":
use_gpu = False
H = 25
subtraj_length = None
num_steps_per_iteration = 1000
num_steps_per_eval = 1000
num_iterations = 100
batch_size = 100
memory_dim = 100
version = "Our Method"
# noinspection PyTypeChecker
variant = dict(
memory_dim=memory_dim,
env_class=WaterMazeMemory,
env_params=dict(
horizon=H,
give_time=True,
),
memory_aug_params=dict(
max_magnitude=1,
),
algo_params=dict(
subtraj_length=subtraj_length,
batch_size=batch_size,
num_epochs=num_iterations,
num_steps_per_epoch=num_steps_per_iteration,
num_steps_per_eval=num_steps_per_eval,
discount=0.9,
use_action_policy_params_for_entire_policy=False,
action_policy_optimize_bellman=False,
write_policy_optimizes='bellman',
action_policy_learning_rate=0.001,
write_policy_learning_rate=0.0005,
qf_learning_rate=0.002,
max_path_length=H,
refresh_entire_buffer_period=None,
save_new_memories_back_to_replay_buffer=True,
write_policy_weight_decay=0,
action_policy_weight_decay=0,
do_not_load_initial_memories=False,
save_memory_gradients=False,
),
qf_class=MemoryQFunction,
qf_params=dict(
output_activation=identity,
fc1_size=400,
fc2_size=300,
ignore_memory=False,
),
policy_params=dict(
fc1_size=400,
fc2_size=300,
cell_class=GRUCell,
output_activation=F.tanh,
only_one_fc_for_action=False,
),
es_params=dict(
env_es_class=OUStrategy,
env_es_params=dict(
max_sigma=1,
min_sigma=None,
),
memory_es_class=OUStrategy,
memory_es_params=dict(
max_sigma=1,
min_sigma=None,
),
),
version=version,
)
for subtraj_length in [1, 5, 10, 15, 20, 25]:
variant['algo_params']['subtraj_length'] = subtraj_length
for exp_id, (
write_policy_optimizes,
version,
) in enumerate([
("bellman", "Bellman Error"),
("qf", "Q-Function"),
("both", "Both"),
]):
variant['algo_params']['write_policy_optimizes'] = (
write_policy_optimizes
)
variant['version'] = version
for _ in range(n_seeds):
seed = random.randint(0, 10000)
run_experiment(
bptt_ddpg_launcher,
exp_prefix=exp_prefix,
seed=seed,
mode=mode,
variant=variant,
exp_id=exp_id,
)
| 30.03937
| 69
| 0.5827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 364
| 0.095413
|
6559e635e127e3a20be2565ece18e60b7e2271a6
| 1,053
|
py
|
Python
|
setup.py
|
briancappello/py-meta-utils
|
342a1ba804bd240044b96ae7ceb636c27fa5c95c
|
[
"MIT"
] | 1
|
2020-11-25T14:37:09.000Z
|
2020-11-25T14:37:09.000Z
|
setup.py
|
briancappello/py-meta-utils
|
342a1ba804bd240044b96ae7ceb636c27fa5c95c
|
[
"MIT"
] | null | null | null |
setup.py
|
briancappello/py-meta-utils
|
342a1ba804bd240044b96ae7ceb636c27fa5c95c
|
[
"MIT"
] | null | null | null |
from setuptools import setup
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='Py-Meta-Utils',
version='0.7.8',
description='Metaclass utilities for Python',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/briancappello/py-meta-utils',
author='Brian Cappello',
license='MIT',
py_modules=['py_meta_utils'],
install_requires=[],
extras_require={
'docs': [
'm2r',
'sphinx<3',
'sphinx-material',
],
},
python_requires='>=3.5',
include_package_data=True,
zip_safe=False,
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| 25.682927
| 61
| 0.603989
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 493
| 0.468186
|
6559f6467029322c3a83d9fc2e0fd805cb80dd9c
| 363
|
py
|
Python
|
wflow/doc/pcraster-for-doc-only/pcraster/pcrstat.py
|
quanpands/wflow
|
b454a55e4a63556eaac3fbabd97f8a0b80901e5a
|
[
"MIT"
] | null | null | null |
wflow/doc/pcraster-for-doc-only/pcraster/pcrstat.py
|
quanpands/wflow
|
b454a55e4a63556eaac3fbabd97f8a0b80901e5a
|
[
"MIT"
] | 2
|
2018-07-05T14:36:18.000Z
|
2020-03-19T21:16:37.000Z
|
build/bin/pcraster/framework/pcrstat.py
|
openearth/hydro-model-generator-wflow
|
6f689859d0a9a307324db3dbe3f03c51884a42a8
|
[
"MIT"
] | null | null | null |
import math
# Returns a percentile of an array. It is assumed that the array is already
# sorted.
#
# The array must not be empty.
#
# Percentile must have a value between [0, 1.0].
def percentile(array, percentile):
assert len(array)
assert percentile >= 0.0 and percentile <= 1.0
index = int(math.ceil(percentile * len(array))) - 1
return array[index]
| 25.928571
| 75
| 0.707989
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 164
| 0.451791
|
655a5cff4fee32ccc4f815a5371fc224869e3257
| 9,512
|
py
|
Python
|
asetk/format/yambo.py
|
ltalirz/asetk
|
bdb31934a5eb49d601e492fc98078d27f5dd2ebd
|
[
"MIT"
] | 18
|
2017-02-07T21:35:21.000Z
|
2021-09-02T13:44:36.000Z
|
asetk/format/yambo.py
|
ltalirz/asetk
|
bdb31934a5eb49d601e492fc98078d27f5dd2ebd
|
[
"MIT"
] | 4
|
2016-10-20T21:23:23.000Z
|
2020-05-07T07:35:31.000Z
|
asetk/format/yambo.py
|
ltalirz/asetk
|
bdb31934a5eb49d601e492fc98078d27f5dd2ebd
|
[
"MIT"
] | 11
|
2016-10-20T21:17:20.000Z
|
2021-04-13T15:23:47.000Z
|
"""Classes for use with Yambo
Representation of a spectrum.
Main functionality is to read from Yambo output, o.qp files
and also netcdf databases.
"""
import re
import copy as cp
import numpy as np
import asetk.atomistic.fundamental as fu
import asetk.atomistic.constants as atc
from . import cube
class Dispersion:
"""A Dispersion holds the k-points belonging to one spin"""
def __init__(self, energylevels=None, kvectors=None, weights=None):
"""Set up spectrum from a list of EnergyLevels."""
self.__energylevels = energylevels
self.__kvectors = kvectors
self.__weights = weights
@property
def energylevels(self):
"""Returns energylevelsi of all k-points."""
return self.__energylevels
@property
def kvectors(self):
return self.__kvectors
@property
def weights(self):
return self.__weights
@property
def energies(self):
"""Returns list of energy levels of all k-points."""
list = [el.energies for el in self.__energylevels]
return np.concatenate(list)
@property
def occupations(self):
"""Returns list of level occupations of all k-points."""
os = []
for el in self.__energylevels:
os = os + list(el.occupations)
return os
def copy(self, dispersion):
"""Performs deep copy of dispersion."""
self.__energylevels = [ el.copy() for el in dispersion.__energylevels ]
self.__kvectors = cp.copy(spectrum.__kvectors)
self.__weights = cp.copy(spectrum.__weights)
def shift(self, de):
for levels in self.__energylevels:
levels.shift(de)
def __str__(self):
text = "Dispersion containing {} k-points\n".format(len(self.__energylevels))
for i in range(len(self.__energylevels)):
e = self.__energylevels[i]
k = self.__kvectors[i]
text += 'k = ({:6.3f}, {:6.3f}, {:6.3f})'.format(k[0], k[1], k[2])
if self.__weights:
w = self.__weights[i]
text += ', w = {}'.format(w)
text += ' : {}\n'.format(e.__str__())
return text
def __getitem__(self, index):
return self.__energylevels[index]
@property
def nk(self):
return len(self.energylevels)
class Spectrum(object):
"""A Spectrum holds the data belonging to all spins"""
def __init__(self, energylevels=None):
"""Set up spectrum from a list of EnergyLevels."""
self.dispersions = [ Dispersion(energylevels) ]
@classmethod
def from_output(cls, fname, mode='QP'):
"""Creates Spectrum from Yambo output file"""
tmp = Spectrum()
tmp.read_from_output(fname, mode)
return tmp
@classmethod
def from_qp(cls, fname=None, mode='QP'):
"""Creates Spectrum from Yambo o.qp file"""
tmp = Spectrum()
tmp.read_from_qp(fname, mode)
return tmp
@classmethod
def from_netcdf_db(cls, fname=None, mode='QP'):
"""Creates Spectrum from Yambo netcdf database"""
tmp = Spectrum()
tmp.read_from_netcdf_db(fname, mode=mode)
return tmp
@property
def energies(self):
"""Returns list of energies e[ispin][ibnd]."""
list = [disp.energies for disp in self.dispersions]
return list
@property
def energylevels(self):
"""Returns list of Energylevels l[ispin][ibnd]."""
list = []
for d in self.dispersions:
sum = fu.Energylevels()
for el in d.energylevels:
sum += el
list.append(sum)
return list
@property
def occupations(self):
"""Returns list of level occupations of all spins."""
os = []
for disp in self.dispersions:
os = os + disp.occupations
return os
@property
def nspin(self):
return len(self.dispersions)
def copy(self, spectrum):
"""Performs deep copy of spectrum."""
self.dispersions = [ el.copy() for el in spectrum.dispersions ]
self.spins = cp.copy(spectrum.spins)
def shift(self, de):
for disp in self.dispersions:
disp.shift(de)
def __str__(self):
text = "Spectrum containing {} spins\n".format(len(self.dispersions))
for i in range(len(self.dispersions)):
d = self.dispersions[i]
s = self.spins[i]
text += 'spin {} : {}\n'.format(s+1, d.__str__())
return text
def __getitem__(self, index):
return self.levels[index]
def read_from_output(self, fname, mode=None):
s = open(fname, 'r').read()
floatregex = '-?\d+\.\d+'
lineregex='[^\r\n]*\r?\n'
#blanklineregex='(?:^\s*$)'
if mode == 'DFT' or mode == None:
kptregex = 'X\* K.*?: ({f})\s*({f})\s*({f}).*?weight\s*({f}){l}(.*?)[\*\[]'\
.format(f=floatregex,l=lineregex)
fermiregex='Fermi Level.*?:(\s*[\-\d\.]+)'
elif mode == 'QP':
kptregex = 'Q?P \[eV\].*?:\s*({f})\s+({f})\s+({f})(.*?)[Q\[]'\
.format(f=floatregex)
self.spins=[]
self.dispersions=[]
# No spin for the moment, but shouldn't be too difficult to extend
for spin in [0]:
disp = Dispersion()
matches=re.findall(kptregex, s, re.DOTALL)
if mode == 'DFT' or mode == None:
fermi = float(re.search(fermiregex, s).group(1))
energylevels = []
kvectors = []
weights = []
for match in matches:
kx, ky, kz, weight, ldata = match
kvectors.append( np.array([kx, ky, kz], dtype=float) )
weights.append( float(weight) )
energies = re.findall('({f})'.format(f=floatregex), ldata, re.DOTALL)
energies = np.array(energies, dtype=float)
levels = fu.EnergyLevels(energies=energies,occupations=None, fermi=fermi)
energylevels.append(levels)
disp = Dispersion(energylevels=energylevels, kvectors=kvectors, weights=weights)
elif mode == 'QP':
energylevels = []
kvectors = []
for match in matches:
kx, ky, kz, ldata = match
kvectors.append( np.array([kx, ky, kz], dtype=float) )
energies = re.findall('E=\s*({f})'.format(f=floatregex), ldata, re.DOTALL)
energies = np.array(energies, dtype=float)
levels = fu.EnergyLevels(energies=energies)
energylevels.append(levels)
disp = Dispersion(energylevels=energylevels, kvectors=kvectors)
self.dispersions.append(disp)
self.spins.append(spin)
def read_from_qp(self, fname="o.qp", ihomo=None):
"""Read from o.qp output (has more digits than in report.
Anyhow, the proper way would be to read the database"""
s = open(fname, 'r').read()
data = np.genfromtxt(fname, dtype=float)
energies = data[:,2] + data[:,3]
# setting HOMO to zero
if ihomo:
energies -= energies[ihomo]
self.spins=[]
self.dispersions=[]
# No spin for the moment, but shouldn't be too difficult to extend
for spin in [0]:
levels = fu.EnergyLevels(energies=energies,occupations=None)
disp = Dispersion(energylevels=[levels], kvectors = [ (0,0,0) ] )
self.dispersions.append(disp)
self.spins.append(spin)
def read_from_netcdf_db(self, fname="ndb.QP", mode="QP"):
"""Read from netCDF database
requires netCDF4 python module"""
from netCDF4 import Dataset
f = Dataset(fname, 'r')
SPIN_VARS = f.variables['SPIN_VARS'][:]
QP_kpts = f.variables['QP_kpts'][:]
QP_table = f.variables['QP_table'][:]
QP_E_Eo_Z = f.variables['QP_E_Eo_Z'][:]
f.close()
nspin = len(SPIN_VARS)
nk = QP_kpts.shape[1]
kpts = [ QP_kpts[:,ik] for ik in range(nk) ]
ibnds, dum, iks, ispins = QP_table
nbnd = len(ibnds) / (nspin * nk)
if mode == "QP":
iener = 0
elif mode == "DFT":
iener = 1
else:
print("Error: Did not recognize mode '{}'.".format(mode))
self.spins=[]
self.dispersions=[]
for ispin in range(nspin):
is_spin = np.where(ispins == SPIN_VARS[ispin])[0]
energylevels = []
kvectors = []
for ik in range(nk):
k = kpts[ik]
is_k = np.where(iks == ik+1)[0]
# still need to figure out the first index
# is it real vs. complex?
e = QP_E_Eo_Z[0, np.intersect1d(is_spin,is_k), iener] * atc.Ha / atc.eV
levels = fu.EnergyLevels(energies=e,occupations=None)
kvectors.append(k)
energylevels.append(levels)
disp = Dispersion(energylevels=energylevels, kvectors = kvectors)
self.dispersions.append(disp)
self.spins.append(ispin)
## setting HOMO to zero
#if ihomo:
# energies -= energies[ihomo]
| 30.983713
| 96
| 0.54815
| 9,205
| 0.967725
| 0
| 0
| 2,067
| 0.217304
| 0
| 0
| 1,867
| 0.196278
|
655a775ff795ff440709723d33d45d1b64c46416
| 733
|
py
|
Python
|
Chapter02/unittests/testExercise2_07.py
|
nijinjose/The-Supervised-Learning-Workshop
|
33a2fec1e202dc1394116ed7a194bd8cabb61d49
|
[
"MIT"
] | 19
|
2020-03-24T20:35:22.000Z
|
2022-01-03T19:19:48.000Z
|
Chapter02/unittests/testExercise2_07.py
|
thisabhijit/The-Supervised-Learning-Workshop
|
33a2fec1e202dc1394116ed7a194bd8cabb61d49
|
[
"MIT"
] | null | null | null |
Chapter02/unittests/testExercise2_07.py
|
thisabhijit/The-Supervised-Learning-Workshop
|
33a2fec1e202dc1394116ed7a194bd8cabb61d49
|
[
"MIT"
] | 50
|
2020-01-03T10:22:30.000Z
|
2022-01-15T07:54:26.000Z
|
import unittest
import os
import json
import pandas as pd
import numpy as np
class TestingExercise2_07(unittest.TestCase):
def setUp(self) -> None:
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(ROOT_DIR, '..', 'dtypes.json'), 'r') as jsonfile:
self.dtyp = json.load(jsonfile)
self.data = pd.read_csv(os.path.join(ROOT_DIR, '..', 'Datasets', 'earthquake_data.csv'),
dtype = self.dtyp)
def test_object_vars(self):
self.object_variables = self.data.select_dtypes(include = [np.object]).nunique().sort_values()
self.assertEqual(max(self.object_variables), (3821))
if __name__ == '__main__':
unittest.main()
| 34.904762
| 102
| 0.649386
| 607
| 0.828104
| 0
| 0
| 0
| 0
| 0
| 0
| 65
| 0.088677
|
655b3273748ed0aac050a9902d9955141b494618
| 6,684
|
py
|
Python
|
models/cfnet.py
|
Cousin-Zan/Semantic-Segmentation-for-Steel-Strip-Surface-Defect-Detection
|
ae135743cc190cdf5a04b5d3ca04dde44f6dd58a
|
[
"Apache-2.0"
] | null | null | null |
models/cfnet.py
|
Cousin-Zan/Semantic-Segmentation-for-Steel-Strip-Surface-Defect-Detection
|
ae135743cc190cdf5a04b5d3ca04dde44f6dd58a
|
[
"Apache-2.0"
] | null | null | null |
models/cfnet.py
|
Cousin-Zan/Semantic-Segmentation-for-Steel-Strip-Surface-Defect-Detection
|
ae135743cc190cdf5a04b5d3ca04dde44f6dd58a
|
[
"Apache-2.0"
] | 1
|
2022-01-20T06:06:21.000Z
|
2022-01-20T06:06:21.000Z
|
"""
The implementation of PAN (Pyramid Attention Networks) based on Tensorflow.
@Author: Yang Lu
@Author: Zan Peng
@Github: https://github.com/luyanger1799
@Github: https://github.com/Cousin-Zan
@Project: https://github.com/luyanger1799/amazing-semantic-segmentation
"""
from utils import layers as custom_layers
from models import Network
import tensorflow as tf
from utils.layers import Concatenate
layers = tf.keras.layers
models = tf.keras.models
backend = tf.keras.backend
class CFNET(Network):
def __init__(self, num_classes, version='CFNET', base_model='OSA', **kwargs):
"""
The initialization of CFNET.
:param num_classes: the number of predicted classes.
:param version: 'CFNET'
:param base_model: the backbone model
:param kwargs: other parameters
"""
base_model = 'ResNet50' if base_model is None else base_model
assert version == 'CFNET'
if base_model == 'OSA':
self.up_size = [(2, 2), (2, 2), (2, 2), (2, 2)]
else:
raise ValueError('The base model \'{model}\' is not '
'supported in CFNET.'.format(model=base_model))
super(CFNET, self).__init__(num_classes, version, base_model, **kwargs)
def __call__(self, inputs=None, input_size=None, **kwargs):
assert inputs is not None or input_size is not None
if inputs is None:
assert isinstance(input_size, tuple)
inputs = layers.Input(shape=input_size + (3,))
return self._cfnet(inputs)
def _conv_bn_relu(self, x, filters, kernel_size, strides=1):
x = layers.Conv2D(filters, kernel_size, strides, padding='same', kernel_initializer='he_normal')(x)
x = layers.BatchNormalization()(x)
x = layers.ReLU()(x)
return x
def _fpa(self, x, out_filters):
_, h, w, _ = backend.int_shape(x)
glb = custom_layers.GlobalMaxPooling2D(keep_dims=True)(x)
glb = layers.Conv2D(out_filters, 1, strides=1, kernel_initializer='he_normal')(glb)
# down
down1 = layers.MaxPooling2D(pool_size=(2, 2))(x)
down1 = self._conv_bn_relu(down1, out_filters, 3, 1)
down1 = self._conv_bn_relu(down1, out_filters, 3, 1)
down1 = self._conv_bn_relu(down1, out_filters, 3, 1)
down2 = layers.MaxPooling2D(pool_size=(2, 2))(down1)
down2 = self._conv_bn_relu(down2, out_filters, 3, 1)
down2 = self._conv_bn_relu(down2, out_filters, 3, 1)
down3 = layers.MaxPooling2D(pool_size=(2, 2))(down2)
down3 = self._conv_bn_relu(down3, out_filters, 3, 1)
down1 = self._conv_bn_relu(down1, out_filters, 3, 1)
down1 = self._conv_bn_relu(down1, out_filters, 3, 1)
down1 = self._conv_bn_relu(down1, out_filters, 3, 1)
down2 = self._conv_bn_relu(down2, out_filters, 3, 1)
down2 = self._conv_bn_relu(down2, out_filters, 3, 1)
down3 = self._conv_bn_relu(down3, out_filters, 3, 1)
# up
up2 = layers.UpSampling2D(size=(2, 2))(down3)
up2 = layers.Add()([up2, down2])
up1 = layers.UpSampling2D(size=(2, 2))(up2)
up1 = layers.Add()([up1, down1])
up = layers.UpSampling2D(size=(2, 2))(up1)
x = layers.Conv2D(out_filters, 1, strides=1, kernel_initializer='he_normal')(x)
x = layers.BatchNormalization()(x)
# multiply
x = layers.Multiply()([x, up])
# add
x = layers.Add()([x, glb])
return x
def _gau(self, x, y, out_filters, up_size=(2, 2)):
glb = custom_layers.GlobalAveragePooling2D(keep_dims=True)(y)
glb = layers.Conv2D(out_filters, 1, strides=1, activation='sigmoid', kernel_initializer='he_normal')(glb)
x = self._conv_bn_relu(x, out_filters, 3, 1)
x = layers.Multiply()([x, glb])
# y = layers.UpSampling2D(size=up_size, interpolation='bilinear')(y)
y = layers.Conv2DTranspose(out_filters, 3, strides=2, padding='same', use_bias=False)(y)
y = layers.Add()([x, y])
return y
def _fmpm(self, inputs):
num_classes = self.num_classes
_, h, w, _ = backend.int_shape(inputs)
bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1
conv01 = self._conv_bn_relu(inputs, 64, 3, 1)
conv11 = self._conv_bn_relu(conv01, 64, 3, 1)
mp1 = layers.MaxPool2D(pool_size=2, strides=2, padding='same')(conv11)
conv21 = self._conv_bn_relu(mp1, 64, 3, 1)
mp2 = layers.MaxPool2D(pool_size=2, strides=2, padding='same')(conv21)
conv31 = self._conv_bn_relu(mp2, 64, 3, 1)
mp3 = layers.MaxPool2D(pool_size=2, strides=2, padding='same')(conv31)
conv41 = self._conv_bn_relu(mp3, 64, 3, 1)
mp4 = layers.MaxPool2D(pool_size=2, strides=2, padding='same')(conv41)
conv51 = self._conv_bn_relu(mp4, 64, 3, 1)
up1 = layers.UpSampling2D(size=2, interpolation='bilinear')(conv51)
c1 = Concatenate(out_size=(h, w), axis=bn_axis)([conv41, up1])
conv61 = self._conv_bn_relu(c1, 64, 3, 1)
up2 = layers.UpSampling2D(size=2, interpolation='bilinear')(conv61)
c2 = Concatenate(out_size=(h, w), axis=bn_axis)([conv31, up2])
conv71 = self._conv_bn_relu(c2, 64, 3, 1)
up3 = layers.UpSampling2D(size=2, interpolation='bilinear')(conv71)
c3 = Concatenate(out_size=(h, w), axis=bn_axis)([conv21, up3])
conv81 = self._conv_bn_relu(c3, 64, 3, 1)
up4 = layers.UpSampling2D(size=2, interpolation='bilinear')(conv81)
c4 = Concatenate(out_size=(h, w), axis=bn_axis)([conv11, up4])
conv91 = self._conv_bn_relu(c4, 64, 3, 1)
conv92 = self._conv_bn_relu(conv91, num_classes, 3, 1)
return conv92
def _cfnet(self, inputs):
num_classes = self.num_classes
up_size = self.up_size
_, h, w, _ = backend.int_shape(inputs)
bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1
c1, c2, c3, c4, c5 = self.encoder(inputs, output_stages=['c1', 'c2', 'c3', 'c4', 'c5'])
y = self._fpa(c5, 544)
y = self._gau(c4, y, 352, up_size[0])
y = self._gau(c3, y, 208, up_size[1])
y = self._gau(c2, y, 112, up_size[2])
y = self._gau(c1, y, 48, up_size[3])
y = self._conv_bn_relu(y, num_classes, 1, 1)
# y = layers.UpSampling2D(size=up_size[3], interpolation='bilinear',name='coarse')(y)
cmpm_output = Concatenate(out_size=(h, w), axis=bn_axis)([y]*8)
outputs = self._fmpm(cmpm_output)
print(y)
print(outputs)
return models.Model(inputs, [y, outputs], name=self.version)
| 36.52459
| 113
| 0.623429
| 6,199
| 0.927439
| 0
| 0
| 0
| 0
| 0
| 0
| 951
| 0.14228
|
655c497fe83653b39f7896791196ee547dcad9d2
| 2,340
|
py
|
Python
|
api/test/test_cli/test_bra_record_helper/test_persist.py
|
RemiDesgrange/nivo
|
e13dcd7c00d1fbc41c23d51c9004901d7704b498
|
[
"MIT"
] | 2
|
2019-05-07T20:23:59.000Z
|
2020-04-26T11:18:38.000Z
|
api/test/test_cli/test_bra_record_helper/test_persist.py
|
RemiDesgrange/nivo
|
e13dcd7c00d1fbc41c23d51c9004901d7704b498
|
[
"MIT"
] | 89
|
2019-08-06T12:47:50.000Z
|
2022-03-28T04:03:25.000Z
|
api/test/test_cli/test_bra_record_helper/test_persist.py
|
RemiDesgrange/nivo
|
e13dcd7c00d1fbc41c23d51c9004901d7704b498
|
[
"MIT"
] | 1
|
2020-06-23T10:07:38.000Z
|
2020-06-23T10:07:38.000Z
|
from uuid import UUID
from sqlalchemy import select, bindparam
from nivo_api.cli.bra_record_helper.persist import persist_zone, persist_massif
from nivo_api.core.db.connection import connection_scope
from nivo_api.core.db.models.sql.bra import ZoneTable, DepartmentTable, MassifTable
from test.pytest_fixtures import database
class TestPersistZone:
def test_insert_zone(self, database):
with connection_scope(database.engine) as con:
r = persist_zone(con, "this_is_a_test")
assert isinstance(r, UUID)
def test_multi_insert(self, database):
with connection_scope(database.engine) as con:
uuid_list = list()
for _ in range(5):
uuid_list.append(persist_zone(con, "this_is_a_test"))
for x in uuid_list:
assert isinstance(x, UUID)
assert all(x == uuid_list[0] for x in uuid_list)
class TestPersistMassif:
def test_massif(self, database):
with connection_scope(database.engine) as con:
r = persist_massif(
con,
"CHABLAIS",
{"name": "Haute-savoie", "number": "74"},
"Alpes du Nord",
)
assert isinstance(r, UUID)
def test_multi_massif(self, database):
with connection_scope(database.engine) as con:
r1 = persist_massif(
con,
"CHABLAIS",
{"name": "Haute-savoie", "number": "74"},
"Alpes du Nord",
)
r2 = persist_massif(
con,
"MONT-BLANC",
{"name": "Haute-savoie", "number": "74"},
"Alpes du Nord",
)
assert isinstance(r1, UUID)
assert isinstance(r2, UUID)
req = (
select([ZoneTable.c.z_id, DepartmentTable.c.d_id])
.select_from(ZoneTable.join(DepartmentTable).join(MassifTable))
.where(MassifTable.c.m_id == bindparam("massif"))
)
id1 = con.execute(req, massif=r1).first()
id2 = con.execute(req, massif=r2).first()
assert id1.z_id == id2.z_id
assert id1.d_id == id2.d_id
class TestPersistBra:
def test_persist_bra(self):
raise NotImplementedError()
| 33.428571
| 83
| 0.571368
| 2,001
| 0.855128
| 0
| 0
| 0
| 0
| 0
| 0
| 213
| 0.091026
|
655d1df65aed622243c78b66ce8d27bd0eb7b948
| 780
|
py
|
Python
|
FractionalKnapsack.py
|
shatheesh171/greedy-algos
|
2826f57be7b7c6223e266cf2283cc79e040e094a
|
[
"MIT"
] | null | null | null |
FractionalKnapsack.py
|
shatheesh171/greedy-algos
|
2826f57be7b7c6223e266cf2283cc79e040e094a
|
[
"MIT"
] | null | null | null |
FractionalKnapsack.py
|
shatheesh171/greedy-algos
|
2826f57be7b7c6223e266cf2283cc79e040e094a
|
[
"MIT"
] | null | null | null |
class Item:
def __init__(self,weight,value) -> None:
self.weight=weight
self.value=value
self.ratio=value/weight
def knapsackMethod(items,capacity):
items.sort(key=lambda x: x.ratio,reverse=True)
usedCapacity=0
totalValue=0
for i in items:
if usedCapacity+i.weight<=capacity:
usedCapacity+=i.weight
totalValue+=i.value
else:
unusedWeight=capacity-usedCapacity
value=i.ratio*unusedWeight
usedCapacity+=unusedWeight
totalValue+=value
if usedCapacity==capacity:
break
print("Total value obtained: "+str(totalValue))
item1=Item(20,100)
item2=Item(30,120)
item3=Item(10,60)
cList=[item1,item2,item3]
knapsackMethod(cList,50)
| 25.16129
| 51
| 0.637179
| 140
| 0.179487
| 0
| 0
| 0
| 0
| 0
| 0
| 24
| 0.030769
|
655e6832b1d17e6f32fa6e6e3a8b08c294e9b6de
| 4,146
|
py
|
Python
|
jspp_imageutils/image/chunking.py
|
jspaezp/jspp_imageutils
|
6376e274a1b0675622a7979c181b9effc125aa09
|
[
"Apache-2.0"
] | null | null | null |
jspp_imageutils/image/chunking.py
|
jspaezp/jspp_imageutils
|
6376e274a1b0675622a7979c181b9effc125aa09
|
[
"Apache-2.0"
] | null | null | null |
jspp_imageutils/image/chunking.py
|
jspaezp/jspp_imageutils
|
6376e274a1b0675622a7979c181b9effc125aa09
|
[
"Apache-2.0"
] | null | null | null |
import itertools
import numpy as np
from jspp_imageutils.image.types import GenImgArray, GenImgBatch
from typing import Tuple, Iterable, Iterator
# TODO: fix everywhere the x and y axis nomenclature
"""
chunk_image_on_position -> returns images
chunk_image_generator -> returns images
chunk_data_image_generator -> returns batches of data
"""
def chunk_image_on_position(arr_img: GenImgArray,
x_pos: Iterable[int], y_pos: Iterable[int],
dimensions: Tuple[int, int] = (50, 50),
warn_leftovers=True) -> \
Iterator[Tuple[int, int, GenImgArray]]:
# TODO decide if this should handle centering the points ...
x_ends = [x + dimensions[0] for x in x_pos]
y_ends = [y + dimensions[1] for y in y_pos]
i = 0
# TODO find a better way to indent this ...
for y_start, y_end, x_start, x_end in \
zip(y_pos, y_ends, x_pos, x_ends):
temp_arr_img = arr_img[x_start:x_end, y_start:y_end, ]
if temp_arr_img.shape[0:2] == dimensions:
yield x_start, y_start, temp_arr_img
i += 1
else:
if warn_leftovers:
print("skipping chunk due to weird size",
str(temp_arr_img.shape))
print("Image generator yielded ", str(i), " images")
def chunk_image_generator(img,
chunk_size: Tuple[int, int] = (500, 500),
displacement: Tuple[int, int] = (250, 250),
warn_leftovers=True) -> \
Iterator[Tuple[int, int, GenImgArray]]:
"""
Gets an image read with tensorflow.keras.preprocessing.image.load_img
and returns a generator that iterates over rectangular areas of it.
chunks are of dims (chunk_size, colors)
"""
# TODO unify the input for this guy ...
arr_img = np.asarray(img)
dims = arr_img.shape
x_starts = [
displacement[0] * x for x in range(dims[0] // displacement[0])
]
x_starts = [x for x in x_starts if
x >= 0 & (x + chunk_size[0]) < dims[0]]
y_starts = [
displacement[1] * y for y in range(dims[1] // displacement[1])
]
y_starts = [y for y in y_starts if
y >= 0 & (y + chunk_size[1]) < dims[1]]
coord_pairs = itertools.product(x_starts, y_starts)
coord_pairs = np.array(list(coord_pairs))
my_gen = chunk_image_on_position(
arr_img, coord_pairs[:, 0], coord_pairs[:, 1],
dimensions=chunk_size, warn_leftovers=warn_leftovers)
for chunk in my_gen:
yield(chunk)
def chunk_data_image_generator(img: GenImgArray,
chunk_size: Tuple[int, int] = (500, 500),
displacement: Tuple[int, int] = (250, 250),
batch: int = 16) -> GenImgBatch:
"""
chunk_data_image_generator [summary]
Gets an image read with tensorflow.keras.preprocessing.image.load_img
and returns a generator that iterates over BATCHES of rectangular
areas of it
dimensions are (batch, chunk_size, colors)
:param img: [description]
:type img: GenImgArray
:param chunk_size: [description], defaults to (500, 500)
:type chunk_size: Tuple[int, int], optional
:param displacement: [description], defaults to (250, 250)
:type displacement: Tuple[int, int], optional
:param batch: [description], defaults to 16
:type batch: int, optional
:return: [description]
:rtype: GenImgBatch
"""
# np.concatenate((a1, a2))
img_generator = chunk_image_generator(
img=img, chunk_size=chunk_size,
displacement=displacement)
counter = 0
img_buffer = []
for _, _, temp_arr_img in img_generator:
tmp_arr_dims = temp_arr_img.shape
temp_arr_img = temp_arr_img.reshape(1, *tmp_arr_dims)
img_buffer.append(temp_arr_img)
counter += 1
if counter == batch:
yield(np.concatenate(img_buffer))
counter = 0
img_buffer = []
yield(np.concatenate(img_buffer))
| 33.168
| 74
| 0.605885
| 0
| 0
| 3,791
| 0.914375
| 0
| 0
| 0
| 0
| 1,305
| 0.314761
|
655f05c48f34f6d4f2b19130434604b15e5c99a3
| 1,598
|
py
|
Python
|
python/craftassist/ttad/generation_dialogues/build_scene_flat_script.py
|
satyamedh/craftassist
|
d97cbc14bc25149d3ef41737231ab9f3cb7e392a
|
[
"MIT"
] | 626
|
2019-07-18T18:40:44.000Z
|
2022-03-29T17:34:43.000Z
|
python/craftassist/ttad/generation_dialogues/build_scene_flat_script.py
|
satyamedh/craftassist
|
d97cbc14bc25149d3ef41737231ab9f3cb7e392a
|
[
"MIT"
] | 42
|
2019-07-27T11:04:15.000Z
|
2021-02-23T03:15:14.000Z
|
python/craftassist/ttad/generation_dialogues/build_scene_flat_script.py
|
satyamedh/craftassist
|
d97cbc14bc25149d3ef41737231ab9f3cb7e392a
|
[
"MIT"
] | 89
|
2019-07-19T15:07:39.000Z
|
2022-02-15T18:44:24.000Z
|
if __name__ == "__main__":
import argparse
import pickle
import os
from tqdm import tqdm
from build_scene import *
from block_data import COLOR_BID_MAP
BLOCK_DATA = pickle.load(
open("/private/home/aszlam/minecraft_specs/block_images/block_data", "rb")
)
allowed_blocktypes = []
count = 0
for c, l in COLOR_BID_MAP.items():
for idm in l:
allowed_blocktypes.append(BLOCK_DATA["bid_to_name"][idm])
count += 1
parser = argparse.ArgumentParser()
parser.add_argument("--target", default="/checkpoint/aszlam/minecraft/inverse_model/flat_ads/")
parser.add_argument("--N", type=int, default=10000000)
# parser.add_argument("--num_per_chunk", type=int, default=10000000)
args = parser.parse_args()
template_attributes = {"count": range(1, 5)}
template_attributes["step"] = range(1, 10)
template_attributes["non_shape_names"] = ["triangle", "circle", "disk", "rectangle"]
template_attributes["mob_names"] = ["pig", "sheep", "cow", "chicken"]
template_attributes["allowed_blocktypes"] = allowed_blocktypes
template_attributes["distribution"] = {
"MOVE": 1.0,
"BUILD": 1.0,
"DESTROY": 1.0,
"DIG": 0.8,
"COPY": 0.8,
"FILL": 0.8,
"SPAWN": 0.1,
"DANCE": 0.8,
}
scenes = []
for i in tqdm(range(args.N)):
S = build_scene(template_attributes, sl=16, flat=True)
scenes.append(S)
f = open(os.path.join(args.target, "flat_scenes_dump.pk"), "wb")
pickle.dump(scenes, f)
f.close()
| 32.612245
| 99
| 0.621402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 443
| 0.277222
|
655fb86e683d32b0ac543bc333e3c26a2dfbef4d
| 1,256
|
py
|
Python
|
modules/transfer/scripts/info.py
|
sishuiliunian/falcon-plus
|
eb6e2a5c29b26812601535cec602b33ee42b0632
|
[
"Apache-2.0"
] | 7,208
|
2017-01-15T08:32:54.000Z
|
2022-03-31T14:09:04.000Z
|
modules/transfer/scripts/info.py
|
sishuiliunian/falcon-plus
|
eb6e2a5c29b26812601535cec602b33ee42b0632
|
[
"Apache-2.0"
] | 745
|
2017-01-17T06:55:21.000Z
|
2022-03-28T03:33:45.000Z
|
modules/transfer/scripts/info.py
|
sishuiliunian/falcon-plus
|
eb6e2a5c29b26812601535cec602b33ee42b0632
|
[
"Apache-2.0"
] | 1,699
|
2017-01-11T09:16:44.000Z
|
2022-03-29T10:40:31.000Z
|
import requests
# Copyright 2017 Xiaomi, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
d = [
{
"endpoint": "hh-op-mon-tran01.bj",
"counter": "load.15min",
},
{
"endpoint": "hh-op-mon-tran01.bj",
"counter": "net.if.in.bytes/iface=eth0",
},
{
"endpoint": "10.202.31.14:7934",
"counter": "p2-com.xiaomi.miui.mibi.service.MibiService-method-createTradeV1",
},
]
url = "http://query.falcon.miliao.srv:9966/graph/info"
r = requests.post(url, data=json.dumps(d))
print r.text
#curl "localhost:9966/graph/info/one?endpoint=`hostname`&counter=load.1min" |python -m json.tool
| 32.205128
| 96
| 0.630573
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 931
| 0.741242
|
655fe9ed2aae210f7f4352487b731196a667f5ea
| 18,904
|
py
|
Python
|
pyhpecw7/features/interface.py
|
zhangshilinh3c/pyhpecw7
|
93bc0d323be89b3672413bbddd25c66f75640fb5
|
[
"Apache-2.0"
] | null | null | null |
pyhpecw7/features/interface.py
|
zhangshilinh3c/pyhpecw7
|
93bc0d323be89b3672413bbddd25c66f75640fb5
|
[
"Apache-2.0"
] | null | null | null |
pyhpecw7/features/interface.py
|
zhangshilinh3c/pyhpecw7
|
93bc0d323be89b3672413bbddd25c66f75640fb5
|
[
"Apache-2.0"
] | 1
|
2022-01-27T12:17:20.000Z
|
2022-01-27T12:17:20.000Z
|
"""Manage interfaces on HPCOM7 devices.
"""
from pyhpecw7.utils.xml.lib import reverse_value_map
from pyhpecw7.features.errors import InterfaceCreateError, InterfaceTypeError,\
InterfaceAbsentError, InterfaceParamsError, InterfaceVlanMustExist
from pyhpecw7.features.vlan import Vlan
from pyhpecw7.utils.xml.lib import *
class Interface(object):
"""This class is used to get
and build interface configurations on ``HPCOM7`` devices.
Args:
device (HPCOM7): connected instance of a
``phyp.comware.HPCOM7`` object.
interface_name (str): The name of the interface.
Attributes:
device (HPCOM7): connected instance of a
``phyp.comware.HPCOM7`` object.
interface_name (str): The name of the interface.
iface_index (str): The device's internal number representation
of an interface.
iface_type (str): The type of interface,
for example: 'LoopBack', 'FortyGigE'.
is_ethernet (bool): Whether the interface is ethernet.
is_routed (bool): Whether the interface is in layer 3 mode.
If this is ``False``, the interface is either in bridged
mode or does not exist.
iface_exists (bool): Whether the interface exists. Physical
interfaces should always exist. Logical interfaces may
or may not exist.
"""
def __init__(self, device, interface_name):
# used to map key values from our dictionary model
# to expected XML tags and vice versa
self._key_map = {
'admin': 'AdminStatus',
'speed': 'ConfigSpeed',
'duplex': 'ConfigDuplex',
'description': 'Description',
'type': 'PortLayer'
}
# used to map value values from our dictionary model
# to expected XML tags and vice versa
self._value_map = {
'AdminStatus': {'1': 'up',
'2': 'down'},
'ConfigSpeed': {'1': 'auto', '2': '10',
'4': '100', '32': '1000',
'1024': '10000', '4096': '20000',
'8192': '40000', '16384': '100000'},
'ConfigDuplex': {'1': 'full',
'2': 'half',
'3': 'auto'},
'PortLayer': {'1': 'bridged',
'2': 'routed'}
}
self._iface_types = set(['FortyGigE', 'Tunnel', 'LoopBack',
'Vlan-interface', 'Bridge-Aggregation',
'Route-Aggregation', 'GigabitEthernet',
'Ten-GigabitEthernet'])
# xml tags
self._iface_row_name = 'Interface'
self._iface_index_name = 'IfIndex'
# usd in conjunction with key map and value map above
self._r_key_map = dict(reversed(item) for item in self._key_map.iteritems())
self._r_value_map = reverse_value_map(self._r_key_map, self._value_map)
# connect to the device and get more information
self.interface_name, self.iface_type = self._iface_type(interface_name)
self.device = device
# The interface index is needed for most interface NETCONF requests
self.iface_index = self._get_iface_index()
self.is_ethernet, self.is_routed = self._is_ethernet_is_routed()
self.iface_exists = True if self.iface_index else False
def _iface_type(self, if_name):
"""Return the normalized interface name and type
from a denormalized interface name.
"""
if if_name.lower().startswith('gi'):
if_type = 'GigabitEthernet'
elif if_name.lower().startswith('ten'):
if_type = 'Ten-GigabitEthernet'
elif if_name.lower().startswith('fo'):
if_type = 'FortyGigE'
elif if_name.lower().startswith('vl'):
if_type = 'Vlan-interface'
elif if_name.lower().startswith('lo'):
if_type = 'LoopBack'
elif if_name.lower().startswith('br'):
if_type = 'Bridge-Aggregation'
elif if_name.lower().startswith('ro'):
if_type = 'Route-Aggregation'
elif if_name.lower().startswith('tu'):
if_type = 'Tunnel'
else:
if_type = None
number_list = if_name.split(' ')
if len(number_list) == 2:
number = number_list[-1].strip()
else:
number = self._get_number(if_name)
if if_type:
proper_interface = if_type + number
else:
proper_interface = if_name
return proper_interface, if_type
def _get_number(self, if_name):
digits = ''
for char in if_name:
if char.isdigit() or char == '/':
digits += char
return digits
def _get_iface_index(self):
"""Return the interface index given the self.interface_name
attribute by asking the device. If the interface doesn't exist,
return the empty string.
"""
E = data_element_maker()
top = E.top(
E.Ifmgr(
E.Interfaces(
E.Interface(
E.Name(self.interface_name)
)
)
)
)
nc_get_reply = self.device.get(('subtree', top))
reply_data = find_in_data(
self._iface_index_name, nc_get_reply.data_ele)
if reply_data is None:
return ''
return reply_data.text
def _is_ethernet_is_routed(self):
"""Return whether the interface is ethernet and whether
it is routed. If the interface doesn't exist,
return False.
"""
E = data_element_maker()
top = E.top(
E.Ifmgr(
E.Interfaces(
E.Interface(
E.IfIndex(self.iface_index)
)
)
)
)
nc_get_reply = self.device.get(('subtree', top))
reply_data = find_in_data('ifType', nc_get_reply.data_ele)
routed_reply_data = find_in_data('PortLayer', nc_get_reply.data_ele)
is_ethernet = False
is_routed = False
try:
if reply_data.text == '6':
is_ethernet = True
except AttributeError:
pass
try:
if routed_reply_data.text == '2':
is_routed = True
except AttributeError:
pass
return is_ethernet, is_routed
def update(self):
"""Update ``self.iface_index`` and ``self.iface_exists``.
Usually called after a logical interface is created.
Raises:
InterfaceCreateError: if the interface hasn't yet
been successfully created.
Note:
It is the responsibility of the caller to call ``update()`
after staging (``create_logical()``) *and* executing
(``execute()`` on this class's ``device`` object) of
commands to create an interface.
"""
if_index = self._get_iface_index()
if not if_index:
raise InterfaceCreateError(self.interface_name)
self.iface_index = if_index
self.iface_exists = True
def get_default_config(self):
"""Return the default configuration of an interface.
Returns:
A dictionary of default interface configuration parameters,
depending on the type of interface.
For example, for ethernet interfaces::
{
'description': 'FortyGigE1/0/1 Interface',
'admin': 'up',
'speed': 'auto',
'duplex': 'auto',
'type': 'bridged'
}
"""
if not self.iface_type:
return None
defaults = {}
defaults['description'] = self.interface_name + ' Interface'
defaults['admin'] = 'up'
if self.is_ethernet:
defaults['speed'] = 'auto'
defaults['duplex'] = 'auto'
defaults['type'] = 'bridged'
elif self.iface_type == 'Bridge-Aggregation':
defaults['type'] = 'bridged'
else:
defaults['type'] = 'routed'
return defaults
def param_check(self, **params):
"""Checks given parameters against the interface for various errors.
Args:
**params: see Keyword Args
Keyword Args:
admin (str): The up/down state of the interface.
'up' or 'down'.
speed (str): The speed of the interface, in Mbps.
duplex (str): The duplex of the interface.
'full', 'half', or 'auto'.
description (str): The textual description of the interface.
type (str): Whether the interface is in layer 2 or layer 3 mode.
'bridged' or 'routed'.
Raises:
InterfaceTypeError: if the given interface isn't a valid type.
InterfaceAbsentError: if the given interface is of type is_ethernet
and doesn't exist.
InterfaceParamsError: if 'speed' or 'duplex' are supplied for a
non ethernet interface.
InterfaceVlanMustExist: if the interface is of type
'Vlan-interface' and the the associated vlan doesn't exist.
"""
if not self.iface_type:
raise InterfaceTypeError(
self.interface_name, list(self._iface_types))
if not self.iface_exists:
if self.iface_type in {'FortyGigE', 'GigabitEthernet',
'Ten-GigabitEthernet'}:
raise InterfaceAbsentError(self.interface_name)
if not self.is_ethernet:
param_names = []
if params.get('speed'):
param_names.append('speed')
if params.get('duplex'):
param_names.append('duplex')
if param_names:
raise InterfaceParamsError(self.interface_name, param_names)
if self.iface_type == 'Vlan-interface':
number = self.interface_name.split('Vlan-interface')[1]
vlan = Vlan(self.device, number)
if not vlan.get_config():
raise InterfaceVlanMustExist(self.interface_name, number)
def get_config(self):
"""Return the currently configured
parameters for the interface.
Returns:
A dictionary of currently configured
parameters for the interface, including:
:admin (str): The up/down state of the interface.
'up' or 'down'.
:speed (str): The speed of the interface, in Mbps.
:duplex (str): The duplex of the interface.
'full', 'half', or 'auto'.
:description (str): The textual description of the interface.
:type (str): Whether the interface is in layer 2 or
layer 3 mode. 'bridged' or 'routed'.
"""
E = data_element_maker()
top = E.top(
E.Ifmgr(
E.Interfaces(
E.Interface(
E.IfIndex(self.iface_index)
)
)
)
)
nc_get_reply = self.device.get(('subtree', top))
reply_data = find_in_data(self._iface_row_name, nc_get_reply.data_ele)
if reply_data is None:
return {}
return data_elem_to_dict(reply_data, self._key_map, value_map=self._value_map)
def create_logical(self, stage=False):
"""Stage or execute the configuration to create
a logical interface.
Supported types include 'LoopBack',
'Vlan-interface', 'Bridge-Aggregation',
and 'Route-Aggregation'
Note:
When stage=True, it's the caller's responsibility to call
``execute()`` on this class's ``device``
object after this method is called.
Note:
After execution, the caller must call ``update()`` on this class.
Returns:
True if successful.
Raises:
InterfaceCreateError: if the logical interface
cannot be created.
"""
return self._logical_iface(stage=stage)
def remove_logical(self, stage=False):
"""Stage or execute the configuration to remove
a logical interface.
Supported types include 'LoopBack',
'Vlan-interface', 'Bridge-Aggregation',
and 'Route-Aggregation'
Args:
stage (bool): whether to stage the commands or execute
immediately
Note:
It's the caller's responsibility to call
``execute()`` on this class's ``device``
object after this method is called.
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
Raises:
InterfaceCreateError: if the logical interface
cannot be removed.
"""
return self._logical_iface(remove=True, stage=stage)
def _logical_iface(self, remove=False, stage=False):
"""Stage or execute the configuration to create
or remove a logical interface.
Args:
remove (bool): If ``True``, the logical
interface is removed. If ``False``,
the logical interface is created.
stage (bool): whether to stage the commands or execute
immediately
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
"""
logic_type_map = {'LoopBack': '16',
'Vlan-interface': '41',
'Bridge-Aggregation': '56',
'Route-Aggregation': '67'}
if self.iface_type not in logic_type_map:
raise InterfaceCreateError(self.interface_name)
iface_number = self.interface_name.split(self.iface_type)[1]
E = action_element_maker()
top = E.top(
E.Ifmgr(
E.LogicInterfaces(
E.Interface(
E.IfTypeExt(logic_type_map[self.iface_type]),
E.Number(iface_number)
)
)
)
)
if remove:
find_in_action('Interface', top).append(E.Remove())
if stage:
return self.device.stage_config(top, 'action')
else:
return self.device.action(top)
def build(self, stage=False, **params):
"""Stage or execute the configuration to
modify an interface.
Args:
stage (bool): whether to stage the commands or execute
immediately
**params: see Keyword Args.
Keyword Args:
admin (str): The up/down state of the interface.
'up' or 'down'.
speed (str): The speed of the interface, in Mbps.
duplex (str): The duplex of the interface.
'full', 'half', or 'auto'.
description (str): The textual description of the interface.
type (str): Whether the interface is in layer 2 or layer 3 mode.
'bridged' or 'routed'.
Raises:
InterfaceCreateError: if a logical interface cannot be created.
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
"""
return self._build_config(state='present', stage=stage, **params)
def default(self, stage=False):
"""Stage or execute the configuration to default an interface.
stage (bool): whether to stage the commands or execute
immediately
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
"""
return self._build_config(state='default', stage=stage)
def _build_config(self, state, stage=False, **params):
"""Stage or execute the configuration to
configure, default, or remove an interface.
Args:
state (str): 'present' configures,
'absent' defaults,
'default' defaults.
stage (bool): whether to stage the commands or execute
immediately
**params: Used when state=present, see Keyword Args.
Keyword Args:
admin (str): The up/down state of the interface.
'up' or 'down'.
speed (str): The speed of the interface, in Mbps.
duplex (str): The duplex of the interface.
'full', 'half', or 'auto'.
description (str): The textual description of the interface.
type (str): Whether the interface is in layer 2 or layer 3 mode.
'bridged' or 'routed'.
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
False if illegal operation, e.g. removing a physical interface
"""
if state == 'default':
if self.iface_exists:
E = action_element_maker()
top = E.top(
E.Ifmgr(
E.Interfaces(
E.Interface(
E.IfIndex(self.iface_index),
E.Default()
)
)
)
)
if stage:
return self.device.stage_config(top, 'action')
else:
return self.device.action(top)
if state == 'present':
params[self._iface_index_name] = self.iface_index
EN = nc_element_maker()
EC = config_element_maker()
config = EN.config(
EC.top(
EC.Ifmgr(
EC.Interfaces(
EC.Interface(
*config_params(params, self._key_map, value_map=self._r_value_map)
)
)
)
)
)
if stage:
return self.device.stage_config(config, 'edit_config')
else:
return self.device.edit_config(config)
if state == 'absent':
if self.is_ethernet:
return self._build_config('default', stage=stage)
return False
| 34.75
| 98
| 0.541473
| 18,574
| 0.982543
| 0
| 0
| 0
| 0
| 0
| 0
| 9,881
| 0.522694
|
6561c740148f4e9dc7f6313a8907266fbf944537
| 233
|
py
|
Python
|
backend/src/urls.py
|
Ornstein89/LeadersOfDigital2021_FoxhoundTeam
|
a376525b07b900bd69dbc274daef66ddad7f1800
|
[
"MIT"
] | null | null | null |
backend/src/urls.py
|
Ornstein89/LeadersOfDigital2021_FoxhoundTeam
|
a376525b07b900bd69dbc274daef66ddad7f1800
|
[
"MIT"
] | null | null | null |
backend/src/urls.py
|
Ornstein89/LeadersOfDigital2021_FoxhoundTeam
|
a376525b07b900bd69dbc274daef66ddad7f1800
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.urls import path, include
from src.base import urls as base_api
urlpatterns = [
path('admin/', admin.site.urls),
path('rest_api/', include(
base_api.urlpatterns
)),
]
| 21.181818
| 37
| 0.690987
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 19
| 0.081545
|
65630efee031ba20e8ef330189053978c1325cc3
| 1,610
|
py
|
Python
|
mobilib/voronoi.py
|
simberaj/mobilib
|
ae350d095a34f53704bd4aaaf7f45e573bda779a
|
[
"MIT"
] | null | null | null |
mobilib/voronoi.py
|
simberaj/mobilib
|
ae350d095a34f53704bd4aaaf7f45e573bda779a
|
[
"MIT"
] | null | null | null |
mobilib/voronoi.py
|
simberaj/mobilib
|
ae350d095a34f53704bd4aaaf7f45e573bda779a
|
[
"MIT"
] | null | null | null |
"""Compute ordinary Voronoi diagrams in Shapely geometries."""
import operator
import numpy
import scipy.spatial
import shapely.geometry
import shapely.geometry.base
import shapely.prepared
def pointset_bounds(coords):
return (
min(coords, key=operator.itemgetter(0))[0],
min(coords, key=operator.itemgetter(1))[1],
max(coords, key=operator.itemgetter(0))[0],
max(coords, key=operator.itemgetter(1))[1],
)
def bounds_to_limiting_generators(minx, miny, maxx, maxy):
addx = maxx - minx
addy = maxy - miny
return [
(minx - addx, miny - addy),
(maxx + addx, miny - addy),
(minx - addx, maxy + addy),
(maxx + addx, maxy + addy),
]
def cells(points, extent=None):
if extent is None:
bbox = pointset_bounds(points)
extent_prep = None
else:
if not isinstance(extent, shapely.geometry.base.BaseGeometry):
extent = shapely.geometry.box(*extent)
bbox = extent.bounds
extent_prep = shapely.prepared.prep(extent)
boundgens = bounds_to_limiting_generators(*bbox)
diagram = scipy.spatial.Voronoi(numpy.concatenate((points, boundgens)))
for reg_i in diagram.point_region[:-len(boundgens)]:
coords = diagram.vertices[diagram.regions[reg_i]]
poly = shapely.geometry.Polygon(coords)
if extent_prep is None or extent_prep.contains(poly):
yield poly
else:
yield extent.intersection(poly)
def cells_shapely(points, extent=None):
return cells(numpy.array([pt.coords[0] for pt in points]), extent=extent)
| 30.377358
| 77
| 0.657143
| 0
| 0
| 768
| 0.477019
| 0
| 0
| 0
| 0
| 62
| 0.038509
|
65632cf97d92397090002645e3a65de42c6a7e8a
| 12,147
|
py
|
Python
|
ml4tc/scripts/plot_composite_saliency_map.py
|
thunderhoser/ml4tc
|
dd97972675c462634cf43fa9ad486049429095e9
|
[
"MIT"
] | 2
|
2021-08-24T04:24:22.000Z
|
2021-09-29T07:52:21.000Z
|
ml4tc/scripts/plot_composite_saliency_map.py
|
thunderhoser/ml4tc
|
dd97972675c462634cf43fa9ad486049429095e9
|
[
"MIT"
] | null | null | null |
ml4tc/scripts/plot_composite_saliency_map.py
|
thunderhoser/ml4tc
|
dd97972675c462634cf43fa9ad486049429095e9
|
[
"MIT"
] | null | null | null |
"""Plots composite saliency map."""
import argparse
import numpy
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot
from gewittergefahr.gg_utils import general_utils as gg_general_utils
from gewittergefahr.gg_utils import file_system_utils
from gewittergefahr.plotting import imagemagick_utils
from ml4tc.utils import normalization
from ml4tc.machine_learning import saliency
from ml4tc.machine_learning import neural_net
from ml4tc.plotting import plotting_utils
from ml4tc.plotting import satellite_plotting
from ml4tc.plotting import predictor_plotting
MAX_COLOUR_PERCENTILE = 99.
SHIPS_BUILTIN_LAG_TIMES_HOURS = numpy.array([numpy.nan, 0, 1.5, 3])
COLOUR_BAR_FONT_SIZE = 12
SCALAR_SATELLITE_FONT_SIZE = 20
LAGGED_SHIPS_FONT_SIZE = 20
FORECAST_SHIPS_FONT_SIZE = 10
FIGURE_RESOLUTION_DPI = 300
PANEL_SIZE_PX = int(2.5e6)
SALIENCY_FILE_ARG_NAME = 'input_saliency_file_name'
NORMALIZATION_FILE_ARG_NAME = 'input_normalization_file_name'
PLOT_INPUT_GRAD_ARG_NAME = 'plot_input_times_grad'
SPATIAL_COLOUR_MAP_ARG_NAME = 'spatial_colour_map_name'
NONSPATIAL_COLOUR_MAP_ARG_NAME = 'nonspatial_colour_map_name'
SMOOTHING_RADIUS_ARG_NAME = 'smoothing_radius_px'
OUTPUT_DIR_ARG_NAME = 'output_dir_name'
SALIENCY_FILE_HELP_STRING = (
'Path to saliency file. Will be read by `saliency.read_composite_file`.'
)
NORMALIZATION_FILE_HELP_STRING = (
'Path to file with normalization params (will be used to denormalize '
'brightness-temperature maps before plotting). Will be read by '
'`normalization.read_file`.'
)
PLOT_INPUT_GRAD_HELP_STRING = (
'Boolean flag. If 1 (0), will plot input * gradient (saliency).'
)
SPATIAL_COLOUR_MAP_HELP_STRING = (
'Name of colour scheme for spatial saliency maps. Must be accepted by '
'`matplotlib.pyplot.get_cmap`.'
)
NONSPATIAL_COLOUR_MAP_HELP_STRING = (
'Name of colour scheme for non-spatial saliency maps. Must be accepted by '
'`matplotlib.pyplot.get_cmap`.'
)
SMOOTHING_RADIUS_HELP_STRING = (
'Smoothing radius (number of pixels) for saliency maps. If you do not want'
' to smooth, make this 0 or negative.'
)
OUTPUT_DIR_HELP_STRING = 'Name of output directory. Images will be saved here.'
INPUT_ARG_PARSER = argparse.ArgumentParser()
INPUT_ARG_PARSER.add_argument(
'--' + SALIENCY_FILE_ARG_NAME, type=str, required=True,
help=SALIENCY_FILE_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + NORMALIZATION_FILE_ARG_NAME, type=str, required=True,
help=NORMALIZATION_FILE_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + PLOT_INPUT_GRAD_ARG_NAME, type=int, required=True,
help=PLOT_INPUT_GRAD_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + SPATIAL_COLOUR_MAP_ARG_NAME, type=str, required=False,
default='BuGn', help=SPATIAL_COLOUR_MAP_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + NONSPATIAL_COLOUR_MAP_ARG_NAME, type=str, required=False,
default='binary', help=NONSPATIAL_COLOUR_MAP_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + SMOOTHING_RADIUS_ARG_NAME, type=float, required=False, default=-1,
help=SMOOTHING_RADIUS_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + OUTPUT_DIR_ARG_NAME, type=str, required=True,
help=OUTPUT_DIR_HELP_STRING
)
def _plot_brightness_temp_saliency(
saliency_dict, model_metadata_dict, normalization_table_xarray,
colour_map_object, plot_input_times_grad, output_dir_name):
"""Plots saliency for brightness temp for each lag time at one init time.
:param saliency_dict: See doc for `_plot_scalar_satellite_saliency`.
:param model_metadata_dict: Same.
:param normalization_table_xarray: xarray table returned by
`normalization.read_file`.
:param colour_map_object: See doc for `_plot_scalar_satellite_saliency`.
:param plot_input_times_grad: Same.
:param output_dir_name: Same.
"""
predictor_matrices = [
None if p is None else numpy.expand_dims(p, axis=0)
for p in saliency_dict[saliency.THREE_PREDICTORS_KEY]
]
if plot_input_times_grad:
this_key = saliency.THREE_INPUT_GRAD_KEY
else:
this_key = saliency.THREE_SALIENCY_KEY
saliency_matrices = [
None if p is None else numpy.expand_dims(p, axis=0)
for p in saliency_dict[this_key]
]
num_lag_times = predictor_matrices[0].shape[3]
grid_latitudes_deg_n = numpy.linspace(
-10, 10, num=predictor_matrices[0].shape[1], dtype=float
)
grid_latitude_matrix_deg_n = numpy.expand_dims(grid_latitudes_deg_n, axis=1)
grid_latitude_matrix_deg_n = numpy.repeat(
grid_latitude_matrix_deg_n, axis=1, repeats=num_lag_times
)
grid_longitudes_deg_e = numpy.linspace(
300, 320, num=predictor_matrices[0].shape[2], dtype=float
)
grid_longitude_matrix_deg_e = numpy.expand_dims(
grid_longitudes_deg_e, axis=1
)
grid_longitude_matrix_deg_e = numpy.repeat(
grid_longitude_matrix_deg_e, axis=1, repeats=num_lag_times
)
figure_objects, axes_objects, pathless_output_file_names = (
predictor_plotting.plot_brightness_temp_one_example(
predictor_matrices_one_example=predictor_matrices,
model_metadata_dict=model_metadata_dict,
cyclone_id_string='2005AL12', init_time_unix_sec=0,
grid_latitude_matrix_deg_n=grid_latitude_matrix_deg_n,
grid_longitude_matrix_deg_e=grid_longitude_matrix_deg_e,
normalization_table_xarray=normalization_table_xarray,
border_latitudes_deg_n=numpy.array([20.]),
border_longitudes_deg_e=numpy.array([330.])
)
)
validation_option_dict = (
model_metadata_dict[neural_net.VALIDATION_OPTIONS_KEY]
)
num_model_lag_times = len(
validation_option_dict[neural_net.SATELLITE_LAG_TIMES_KEY]
)
all_saliency_values = numpy.concatenate([
numpy.ravel(s) for s in saliency_matrices if s is not None
])
min_abs_contour_value = numpy.percentile(
numpy.absolute(all_saliency_values), 100. - MAX_COLOUR_PERCENTILE
)
max_abs_contour_value = numpy.percentile(
numpy.absolute(all_saliency_values), MAX_COLOUR_PERCENTILE
)
panel_file_names = [''] * num_model_lag_times
for k in range(num_model_lag_times):
min_abs_contour_value, max_abs_contour_value = (
satellite_plotting.plot_saliency(
saliency_matrix=saliency_matrices[0][0, ..., k, 0],
axes_object=axes_objects[k],
latitude_array_deg_n=grid_latitude_matrix_deg_n[:, k],
longitude_array_deg_e=grid_longitude_matrix_deg_e[:, k],
min_abs_contour_value=min_abs_contour_value,
max_abs_contour_value=max_abs_contour_value,
half_num_contours=10,
colour_map_object=colour_map_object
)
)
panel_file_names[k] = '{0:s}/{1:s}'.format(
output_dir_name, pathless_output_file_names[k]
)
print('Saving figure to file: "{0:s}"...'.format(
panel_file_names[k]
))
figure_objects[k].savefig(
panel_file_names[k], dpi=FIGURE_RESOLUTION_DPI,
pad_inches=0, bbox_inches='tight'
)
pyplot.close(figure_objects[k])
imagemagick_utils.resize_image(
input_file_name=panel_file_names[k],
output_file_name=panel_file_names[k],
output_size_pixels=PANEL_SIZE_PX
)
concat_figure_file_name = '{0:s}/brightness_temp_concat.jpg'.format(
output_dir_name
)
plotting_utils.concat_panels(
panel_file_names=panel_file_names,
concat_figure_file_name=concat_figure_file_name
)
this_cmap_object, this_cnorm_object = (
satellite_plotting.get_colour_scheme()
)
plotting_utils.add_colour_bar(
figure_file_name=concat_figure_file_name,
colour_map_object=this_cmap_object,
colour_norm_object=this_cnorm_object,
orientation_string='vertical', font_size=COLOUR_BAR_FONT_SIZE,
cbar_label_string='Brightness temp (K)',
tick_label_format_string='{0:d}'
)
colour_norm_object = pyplot.Normalize(
vmin=min_abs_contour_value, vmax=max_abs_contour_value
)
label_string = 'Absolute {0:s}'.format(
'input times gradient' if plot_input_times_grad else 'saliency'
)
plotting_utils.add_colour_bar(
figure_file_name=concat_figure_file_name,
colour_map_object=colour_map_object,
colour_norm_object=colour_norm_object,
orientation_string='vertical', font_size=COLOUR_BAR_FONT_SIZE,
cbar_label_string=label_string, tick_label_format_string='{0:.2g}'
)
def _run(saliency_file_name, normalization_file_name, plot_input_times_grad,
spatial_colour_map_name, nonspatial_colour_map_name,
smoothing_radius_px, output_dir_name):
"""Plots composite saliency map.
This is effectively the main method.
:param saliency_file_name: See documentation at top of file.
:param normalization_file_name: Same.
:param plot_input_times_grad: Same.
:param spatial_colour_map_name: Same.
:param nonspatial_colour_map_name: Same.
:param smoothing_radius_px: Same.
:param output_dir_name: Same.
"""
spatial_colour_map_object = pyplot.get_cmap(spatial_colour_map_name)
nonspatial_colour_map_object = pyplot.get_cmap(nonspatial_colour_map_name)
file_system_utils.mkdir_recursive_if_necessary(
directory_name=output_dir_name
)
# Read files.
print('Reading data from: "{0:s}"...'.format(saliency_file_name))
saliency_dict = saliency.read_composite_file(saliency_file_name)
if plot_input_times_grad:
this_key = saliency.THREE_INPUT_GRAD_KEY
else:
this_key = saliency.THREE_SALIENCY_KEY
if smoothing_radius_px > 0 and saliency_dict[this_key][0] is not None:
print((
'Smoothing maps with Gaussian filter (e-folding radius of {0:.1f} '
'pixels)...'
).format(smoothing_radius_px))
num_lag_times = saliency_dict[this_key][0].shape[-2]
for k in range(num_lag_times):
saliency_dict[this_key][0][..., k, 0] = (
gg_general_utils.apply_gaussian_filter(
input_matrix=saliency_dict[this_key][0][..., k, 0],
e_folding_radius_grid_cells=smoothing_radius_px
)
)
model_file_name = saliency_dict[saliency.MODEL_FILE_KEY]
model_metafile_name = neural_net.find_metafile(
model_file_name=model_file_name, raise_error_if_missing=True
)
print('Reading metadata from: "{0:s}"...'.format(model_metafile_name))
model_metadata_dict = neural_net.read_metafile(model_metafile_name)
print('Reading data from: "{0:s}"...'.format(normalization_file_name))
normalization_table_xarray = normalization.read_file(
normalization_file_name
)
# Plot saliency map.
_plot_brightness_temp_saliency(
saliency_dict=saliency_dict, model_metadata_dict=model_metadata_dict,
normalization_table_xarray=normalization_table_xarray,
colour_map_object=spatial_colour_map_object,
plot_input_times_grad=plot_input_times_grad,
output_dir_name=output_dir_name
)
if __name__ == '__main__':
INPUT_ARG_OBJECT = INPUT_ARG_PARSER.parse_args()
_run(
saliency_file_name=getattr(INPUT_ARG_OBJECT, SALIENCY_FILE_ARG_NAME),
normalization_file_name=getattr(
INPUT_ARG_OBJECT, NORMALIZATION_FILE_ARG_NAME
),
plot_input_times_grad=bool(getattr(
INPUT_ARG_OBJECT, PLOT_INPUT_GRAD_ARG_NAME
)),
spatial_colour_map_name=getattr(
INPUT_ARG_OBJECT, SPATIAL_COLOUR_MAP_ARG_NAME
),
nonspatial_colour_map_name=getattr(
INPUT_ARG_OBJECT, NONSPATIAL_COLOUR_MAP_ARG_NAME
),
smoothing_radius_px=getattr(
INPUT_ARG_OBJECT, SMOOTHING_RADIUS_ARG_NAME
),
output_dir_name=getattr(INPUT_ARG_OBJECT, OUTPUT_DIR_ARG_NAME)
)
| 36.697885
| 80
| 0.724788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,190
| 0.180291
|
6563e55d840f23f4832429e81437245ecfd8105f
| 16,089
|
py
|
Python
|
Autocoders/Python/src/fprime_ac/models/CompFactory.py
|
LeStarch/lgtm-fprime
|
904b0311fe647745b29075d44259d1dc1f4284ae
|
[
"Apache-2.0"
] | 1
|
2021-01-30T15:53:01.000Z
|
2021-01-30T15:53:01.000Z
|
Autocoders/Python/src/fprime_ac/models/CompFactory.py
|
LeStarch/lgtm-fprime
|
904b0311fe647745b29075d44259d1dc1f4284ae
|
[
"Apache-2.0"
] | 1
|
2020-12-03T14:14:51.000Z
|
2020-12-03T14:14:51.000Z
|
Autocoders/Python/src/fprime_ac/models/CompFactory.py
|
LeStarch/lgtm-fprime
|
904b0311fe647745b29075d44259d1dc1f4284ae
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# ===============================================================================
# NAME: CompFactory.py
#
# DESCRIPTION: This is a factory class for instancing the Component
# and building up the Port and Arg configuration required.
#
# AUTHOR: reder
# EMAIL: reder@jpl.nasa.gov
# DATE CREATED : Feb. 11. 2013
#
# Copyright 2013, California Institute of Technology.
# ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged.
# ===============================================================================
#
# Python standard modules
#
import logging
import sys
from fprime_ac.models import (
Arg,
Channel,
Command,
Component,
Event,
InternalInterface,
Parameter,
Port,
Serialize,
)
# Global logger init. below.
PRINT = logging.getLogger("output")
DEBUG = logging.getLogger("debug")
class CompFactory:
"""
This is a concrete factory method object used to create the interface
to the code generation. A single call to create is made that returns
a the component object containing all the ports, includes, args, etc.
"""
__parsed = None
__instance = None
__configured_visitors = None
def __init__(self):
"""
Private Constructor (singleton pattern)
"""
self.__parsed = None
self.__instance = None
self.__configured_visitors = dict()
def getInstance():
"""
Return instance of singleton.
"""
if CompFactory.__instance is None:
CompFactory.__instance = CompFactory()
return CompFactory.__instance
# define static method
getInstance = staticmethod(getInstance)
def create(
self, the_parsed_component_xml, parsed_port_xml_list, parsed_serializable_list
):
"""
Create a component model here.
"""
x = the_parsed_component_xml
comp_obj = x.get_component()
comp_port_obj_list = x.get_ports()
comp_command_obj_list = x.get_commands()
comp_channel_obj_list = x.get_channels()
comp_parameter_obj_list = x.get_parameters()
comp_event_obj_list = x.get_events()
comp_internal_interface_obj_list = x.get_internal_interfaces()
comp_included_enums_list = x.get_enum_type_files()
#
comp_namespace = comp_obj.get_namespace()
comp_name = comp_obj.get_name()
comp_kind = comp_obj.get_kind()
comp_comment = comp_obj.get_comment()
comp_modeler = comp_obj.get_modeler()
if comp_namespace is None:
comp_full_name = comp_name
else:
comp_full_name = comp_namespace + "::" + comp_name
# get original filename here...
comp_xml_filename = x.get_xml_filename()
#
comp_xml_port_files = x.get_port_type_files()
comp_c_header_files = x.get_header_files()
has_guarded_ports = False
num_async_ports = 0
num_sync_ports = 0 # includes guarded ports
#
# print ("Component: %s"%comp_name)
incl_list = []
#
# Create list of ports with all ports of the component.
#
port_obj_list = []
for port_obj in comp_port_obj_list:
n = port_obj.get_name()
t = port_obj.get_type()
d = port_obj.get_direction()
s = port_obj.get_sync()
r = port_obj.get_role()
if s == "sync" or s == "guarded":
num_sync_ports += 1
if s == "async":
num_async_ports += 1
p = port_obj.get_priority()
if s == "guarded":
has_guarded_ports = True
c = port_obj.get_comment()
m = port_obj.get_max_number()
f = port_obj.get_full()
port_obj_list.append(Port.Port(n, t, d, s, p, f, c, max_number=m, role=r))
command_obj_list = []
for command_obj in comp_command_obj_list:
m = command_obj.get_mnemonic()
o = command_obj.get_opcodes()
s = command_obj.get_sync()
p = command_obj.get_priority()
f = command_obj.get_full()
if s == "guarded":
has_guarded_ports = True
if s == "sync" or s == "guarded":
num_sync_ports += 1
if s == "async":
num_async_ports += 1
c = command_obj.get_comment()
arg_obj_list = []
for a in command_obj.get_args():
name = a.get_name()
atype = a.get_type()
comment = a.get_comment()
size = a.get_size()
arg_obj_list.append(Arg.Arg(name, atype, None, size, comment))
command_obj_list.append(
Command.Command(
m,
o,
arg_obj_list,
s,
p,
c,
comp_xml_filename,
comp_full_name,
component_base_name=comp_name,
base_opcode=command_obj.get_base_opcode(),
full=f,
)
)
channel_obj_list = []
for channel_obj in comp_channel_obj_list:
i = channel_obj.get_ids()
n = channel_obj.get_name()
t = channel_obj.get_type()
s = channel_obj.get_size()
c = channel_obj.get_comment()
a = channel_obj.get_abbrev()
f = channel_obj.get_format_string()
u = channel_obj.get_update()
l = channel_obj.get_limits()
channel_obj_list.append(
Channel.Channel(
ids=i,
name=n,
ctype=t,
size=s,
abbrev=a,
format_string=f,
update=u,
limits=l,
comment=c,
xml_filename=comp_xml_filename,
component_name=comp_full_name,
component_base_name=comp_name,
)
)
event_obj_list = []
for event_obj in comp_event_obj_list:
i = event_obj.get_ids()
n = event_obj.get_name()
s = event_obj.get_severity()
f = event_obj.get_format_string()
t = event_obj.get_throttle()
c = event_obj.get_comment()
arg_obj_list = []
for a in event_obj.get_args():
name = a.get_name()
atype = a.get_type()
size = a.get_size()
comment = a.get_comment()
arg_obj_list.append(Arg.Arg(name, atype, None, size, comment))
event_obj_list.append(
Event.Event(
i,
n,
s,
f,
t,
arg_obj_list,
c,
comp_xml_filename,
comp_full_name,
component_base_name=comp_name,
)
)
internal_interface_obj_list = []
for internal_interface_obj in comp_internal_interface_obj_list:
# borrow this for check
num_async_ports += 1
n = internal_interface_obj.get_name()
p = internal_interface_obj.get_priority()
f = internal_interface_obj.get_full()
c = internal_interface_obj.get_comment()
arg_obj_list = []
for a in internal_interface_obj.get_args():
name = a.get_name()
atype = a.get_type()
size = a.get_size()
comment = a.get_comment()
arg_obj_list.append(Arg.Arg(name, atype, None, size, comment))
internal_interface_obj_list.append(
InternalInterface.InternalInterface(
n, p, f, arg_obj_list, c, comp_xml_filename, comp_full_name
)
)
parameter_obj_list = []
for parameter_obj in comp_parameter_obj_list:
i = parameter_obj.get_ids()
n = parameter_obj.get_name()
t = parameter_obj.get_type()
set_ops = parameter_obj.get_set_opcodes()
save_ops = parameter_obj.get_save_opcodes()
d = parameter_obj.get_default()
s = parameter_obj.get_size()
c = parameter_obj.get_comment()
parameter_obj_list.append(
Parameter.Parameter(
i,
n,
t,
set_ops,
save_ops,
d,
s,
c,
comp_xml_filename,
comp_full_name,
base_setop=parameter_obj.get_base_setop(),
base_saveop=parameter_obj.get_base_saveop(),
)
)
serializable_obj_list = []
for serializable_obj in parsed_serializable_list:
f = serializable_obj.get_xml_filename()
n = serializable_obj.get_name()
ns = serializable_obj.get_namespace()
c = serializable_obj.get_comment()
x = serializable_obj.get_includes()
# shouldn't be c includes
m = serializable_obj.get_members()
t = serializable_obj.get_typeid()
serializable_obj_list.append(
Serialize.Serialize(f, n, ns, c, x, None, m, t)
)
#
# Check here to make sure all the port types in the component XML
# exist in the port XMLs
#
interface_xml_list = [
parsed_port_obj.get_interface().get_name()
for parsed_port_obj in parsed_port_xml_list
]
for port_obj in port_obj_list:
t = port_obj.get_type()
## Skip if special port. (If there role)
## Namespaces for special ports are set above
if (
(t not in interface_xml_list)
and (t.lower() != "serial")
and (port_obj.get_role() is None)
):
PRINT.info(
"ERROR: Missing port type definition in component XML (name: %s, type: %s)"
% (port_obj.get_name(), t)
)
sys.exit(-1)
#
# Check here to make sure all the port types in the component XML
# exist in the port XMLs
#
# interface_xml_list = [parsed_command_obj.get_interface().get_name() for parsed_command_obj in parsed_command_xml_list]
# print interface_xml_list
# for command_obj in command_obj_list:
# t = command_obj.get_type()
# if (t not in interface_xml_list):
# PRINT.info("ERROR: Missing command type definition in component XML (name: %s, type: %s)" % (command_obj.get_type(),t))
# sys.exit(-1)
# #
# Add port type specifics to port object.
# Specifics are things like: args, includes, etc.
for port_obj in port_obj_list:
for parsed_port_obj in parsed_port_xml_list:
# print "Meta: Name: %s, Type: %s" % (port_obj.get_name(), port_obj.get_type())
# print "Meta: Port Type: %s, Port Interface: %s" % (port_obj.get_type(),parsed_port_obj.get_interface().get_name())
if port_obj.get_type() == parsed_port_obj.get_interface().get_name():
arg_obj_list = []
incl_list = parsed_port_obj.get_include_header_files()
namespace = parsed_port_obj.get_interface().get_namespace()
if_comment = parsed_port_obj.get_interface().get_comment()
return_type = parsed_port_obj.get_interface().get_return_type()
return_modifier = (
parsed_port_obj.get_interface().get_return_modifier()
)
for a in parsed_port_obj.get_args():
name = a.get_name()
atype = a.get_type()
comment = a.get_comment()
modifier = a.get_modifier()
size = a.get_size()
arg_obj_list.append(
Arg.Arg(name, atype, modifier, size, comment)
)
port_obj.set(
namespace, arg_obj_list, incl_list, None, None, if_comment
)
port_obj.set_return(return_type, return_modifier)
# check some rules
# 1) No return values for async ports
if (port_obj.get_sync() == "async") and (return_type is not None):
PRINT.info(
'ERROR: %s: Port "%s" cannot be asynchronous and have a return value'
% (
the_parsed_component_xml.get_xml_filename(),
port_obj.get_name(),
)
)
sys.exit(-1)
# 2) Serial ports can't have roles
if (port_obj.get_type() == "Serial") and (
port_obj.get_role() is not None
):
PRINT.info(
'ERROR: %s: Port "%s" cannot have a role and be a serialized port'
% (
the_parsed_component_xml.get_xml_filename(),
port_obj.get_name(),
)
)
sys.exit(-1)
# check some component/port rules
# 1) Active or queued need at least one async port/command
if (comp_kind == "active") or (comp_kind == "queued"):
if num_async_ports == 0 and len(parameter_obj_list) == 0:
PRINT.info(
'ERROR: %s: Active/Queued component "%s" needs at least one async port, command, or interface'
% (the_parsed_component_xml.get_xml_filename(), comp_name)
)
sys.exit(-1)
# 2) Queued component needs at least one sync port/command
if comp_kind == "queued":
if num_sync_ports == 0:
PRINT.info(
'ERROR: %s: Queued component "%s" needs at least one sync/guarded port or command'
% (the_parsed_component_xml.get_xml_filename(), comp_name)
)
sys.exit(-1)
parsed_array_list = []
for array_file in the_parsed_component_xml.get_array_type_files():
parsed_array_list.append(array_file.replace("Ai.xml", "Ac.hpp"))
#
# Instance the component here...
#
the_component = Component.Component(
comp_namespace,
comp_name,
comp_kind,
comp_comment,
comp_modeler,
port_obj_list,
command_obj_list,
channel_obj_list,
parameter_obj_list,
event_obj_list,
internal_interface_obj_list,
serializable_obj_list,
comp_xml_filename,
comp_included_enums_list,
)
the_component.set_xml_port_files(comp_xml_port_files + parsed_array_list)
the_component.set_c_header_files(comp_c_header_files)
if has_guarded_ports:
the_component.set_has_guarded_ports()
# for p in the_component.get_ports():
# print p.get_name(), p.get_namespace()
# for a in p.get_args():
# print a.get_name(), a.get_type(), a.get_modifier()
return the_component
| 37.503497
| 144
| 0.514948
| 15,218
| 0.945864
| 0
| 0
| 0
| 0
| 0
| 0
| 3,199
| 0.198831
|
65644397de88c237a6908c085913149171a8b5ff
| 7,896
|
py
|
Python
|
Algo_Ds_Notes-master/Algo_Ds_Notes-master/A_Star_Search_Algorithm/A_Star_Search_Algorithm.py
|
rajatenzyme/Coding-Journey-
|
65a0570153b7e3393d78352e78fb2111223049f3
|
[
"MIT"
] | null | null | null |
Algo_Ds_Notes-master/Algo_Ds_Notes-master/A_Star_Search_Algorithm/A_Star_Search_Algorithm.py
|
rajatenzyme/Coding-Journey-
|
65a0570153b7e3393d78352e78fb2111223049f3
|
[
"MIT"
] | null | null | null |
Algo_Ds_Notes-master/Algo_Ds_Notes-master/A_Star_Search_Algorithm/A_Star_Search_Algorithm.py
|
rajatenzyme/Coding-Journey-
|
65a0570153b7e3393d78352e78fb2111223049f3
|
[
"MIT"
] | null | null | null |
class Node:
"""
A node class used in A* Pathfinding.
parent: it is parent of current node
position: it is current position of node in the maze.
g: cost from start to current Node
h: heuristic based estimated cost for current Node to end Node
f: total cost of present node i.e. : f = g + h
"""
def __init__(self , parent=None , poistion=None):
self.parent = parent
self.position = poistion
self.g = 0
self.f = 0
self.h = 0
def __eq__(self , other):
return self.position == other.position
class FindPath():
def __init__(self , maze , cost , start , end):
self.maze = maze
self.cost = cost
self.start = start
self.end = end
self.move = [ [-1, 0] , # go up
[ 0,-1] , # go left
[ 1, 0] , # go down
[ 0, 1] , # go right
[-1,-1] , # go left-up
[-1, 1] , # go left down
[ 1,-1] , # go right down
[ 1, 1] ] # go right up
def return_path(self,curr_node,cost_matrix):
path = []
no_rows , no_columns = np.shape(cost_matrix)
# here we create the initialized result maze with -1 in every position
res = [[-1 for i in range(no_columns)] for j in range(no_rows)]
#we will iterate over all parents of node and store in path
curr = curr_node
while curr is not None:
path.append(curr.position)
curr = curr.parent
path = path[::-1]
initial_value = 0
# we will insert the path in matrix
for i in range(len(path)):
res[path[i][0]][path[i][1]] = initial_value
initial_value += 1
return res
def search(self):
"""
Returns a list of tuples as a path from the given start to the given end in the given maze
"""
# we will create start node and end node
# we will initialize g, h and f value zero
start_node = Node(None, tuple(self.start))
start_node.g = 0
start_node.h = 0
start_node.f = 0
end_node = Node(None, tuple(self.end))
end_node.g = 0
end_node.h = 0
end_node.f = 0
# we need to initialize both queue and visited list
# we will find the lowest cost node to expand next
queue = []
# we will store all visited node
visited_list = []
# Add the start node
queue.append(start_node)
# calculate the maximiuim number of steps we can move in the matrix
counter = 0
max_steps = (len(self.maze) // 2) ** 10
# Get number of rows and columns
no_rows, no_columns = np.shape(self.maze)
# Loop until you find the end
while len(queue) > 0:
# Every time any node is visited increase the counter
counter += 1
# Get the current node
current_node = queue[0]
current_index = 0
for index, item in enumerate(queue):
if item.f < current_node.f:
current_node = item
current_index = index
# if we hit this point return the path such as it may be no solution or
# computation cost is too high
if counter > max_steps:
print ("Destination cannot be reached")
return self.return_path(current_node , self.maze)
# Pop current node out off
queue.pop(current_index)
# mark it visited
visited_list.append(current_node)
# check if goal is reached or not
if current_node == end_node:
return self.return_path(current_node , self.maze)
# Generate coordinate from all adjacent coordinates
coordinates = []
for move in self.move:
# Get node position
current_node_position = (current_node.position[0] + move[0] , current_node.position[1] + move[1])
# check if all the moves are in maze limit
if (current_node_position[0] > (no_rows - 1) or current_node_position[0] < 0 or current_node_position[1] > (no_columns -1) or current_node_position[1] < 0):
continue
# Make sure walkable terrain
if self.maze[current_node_position[0]][current_node_position[1]] != 0:
continue
# Create new node
new_node = Node(current_node , current_node_position)
# Append
coordinates.append(new_node)
# Loop through children
for child in coordinates:
# Child is on the visited list (search entire visited list)
if len([visited_child for visited_child in visited_list if visited_child == child]) > 0:
continue
# calculate f, g, and h values
child.g = current_node.g + self.cost
# calculated Heuristic costs, this is using eucledian distance
child.h = (((child.position[0] - end_node.position[0]) ** 2) + ((child.position[1] - end_node.position[1]) ** 2))
child.f = child.g + child.h
# Child if already in queue and g cost is already lower
if len([i for i in queue if child == i and child.g > i.g]) > 0:
continue
queue.append(child)
class Preprocess:
def __init__(self , maze , n , m):
self.maze = maze
self.n = n
self.m = m
def check(self , value):
data=''
for i in range(len(value)):
if(value[i] == '[' or value[i] == ']'):
continue
else:
data+=value[i]
return data
def process_text(self):
c=0
matrix = self.maze
matrix = matrix.split(',')
data = []
for i in range(self.n):
l = []
for j in range(self.m):
l.append(int(self.check(matrix[c])))
c += 1
data.append(l)
return data
if __name__ == '__main__':
no_rows = int(input("Enter number of rows: "))
no_cols = int(input("Enter number of columns: "))
matrix = Preprocess(str(input("Enter Matrix: ")) , no_rows , no_cols).process_text()
start_x = int(input("Enter x coordinate of starting node: "))
start_y = int(input("Enter y coordinate of starting node: "))
end_x = int(input("Enter x coordinate of ending node: "))
end_y = int(input("Enter y coordinate of ending node: "))
cost = int(input("Enter cost: "))
start = [start_x , start_y]
end = [end_x , end_y]
path = FindPath(matrix , cost , start , end).search()
if(path != None):
print("Path found: ")
for i in range(len(path)):
for j in range(len(path[i])):
if(path[i][j] == -1):
print(0 , end=" ")
else:
print(path[i][j] , end=" ")
print()
else:
print("No Path found")
#input:
# Enter number of rows: 5
# Enter number of columns: 6
# Enter Matrix: [[0, 1, 0, 0, 0, 0],
# [0, 1, 0, 0, 0, 0],
# [0, 1, 0, 1, 0, 0],
# [0, 1, 0, 0, 1, 0],
# [0, 0, 0, 0, 1, 0]]
# Enter x coordinate of starting node: 0
# Enter y coordinate of starting node: 0
# Enter x coordinate of ending node: 4
# Enter y coordinate of ending node: 5
# Enter cost: 1
#Path found:
# 0 0 0 0 0 0
# 1 0 0 0 0 0
# 2 0 0 0 7 0
# 3 0 0 6 0 8
# 0 4 5 0 0 9
| 37.6
| 172
| 0.517477
| 6,343
| 0.803318
| 0
| 0
| 0
| 0
| 0
| 0
| 2,567
| 0.325101
|
6564bbdf498ee27811e27e457eaf27523a8129cd
| 1,765
|
py
|
Python
|
setup.py
|
ikonst/dql
|
e9d3aa22873076dae5ebd02e35318aa996b1e56a
|
[
"MIT"
] | null | null | null |
setup.py
|
ikonst/dql
|
e9d3aa22873076dae5ebd02e35318aa996b1e56a
|
[
"MIT"
] | null | null | null |
setup.py
|
ikonst/dql
|
e9d3aa22873076dae5ebd02e35318aa996b1e56a
|
[
"MIT"
] | null | null | null |
""" Setup file """
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, "README.rst")).read()
CHANGES = open(os.path.join(HERE, "CHANGES.rst")).read()
REQUIREMENTS = [
"dynamo3>=0.4.7",
"future>=0.15.0",
"pyparsing==2.1.4",
"python-dateutil<2.7.0",
]
EXTRAS = {
"test": ["nose", "mock"],
"lint": ["black", "pylint==2.3.1"],
"doc": ["numpydoc", "sphinx", "sphinx_rtd_theme"],
}
if __name__ == "__main__":
setup(
name="dql",
version="0.5.26",
description="DynamoDB Query Language",
long_description=README + "\n\n" + CHANGES,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
author="Steven Arcangeli",
author_email="stevearc@stevearc.com",
url="http://github.com/stevearc/dql",
keywords="aws dynamo dynamodb sql",
license="MIT",
platforms="any",
include_package_data=True,
packages=find_packages(exclude=("tests",)),
entry_points={"console_scripts": ["dql = dql:main"]},
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS + EXTRAS["test"],
extras_require=EXTRAS,
)
| 31.517857
| 61
| 0.571671
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 814
| 0.46119
|
65682d4917d1f98a115d05b121741cbfe858fff9
| 3,368
|
py
|
Python
|
runner.py
|
leodenault/mhw_optimizer
|
3478ea07361291c6fa0bbd9fc0b9906914321772
|
[
"MIT"
] | null | null | null |
runner.py
|
leodenault/mhw_optimizer
|
3478ea07361291c6fa0bbd9fc0b9906914321772
|
[
"MIT"
] | null | null | null |
runner.py
|
leodenault/mhw_optimizer
|
3478ea07361291c6fa0bbd9fc0b9906914321772
|
[
"MIT"
] | 1
|
2020-03-31T01:51:56.000Z
|
2020-03-31T01:51:56.000Z
|
import multiprocessing
import csv_exporter
from combination.brute_force_combination_algorithm import \
BruteForceCombinationAlgorithm
from combination.combiner import Combiner
from combination.constrained_combination_algorithm import \
ConstrainedCombinationAlgorithm
from config.config_importer import ConfigImporter
from equipment.equipment_piece import BodyPart
from mhw_db_loaders.armour_loader import ArmourLoader
from mhw_db_loaders.armour_set_loader import ArmourSetLoader
from mhw_db_loaders.armour_set_skill_loader import ArmourSetSkillLoader
from mhw_db_loaders.charm_loader import CharmLoader
from mhw_db_loaders.data_loader import load_json
from mhw_db_loaders.skill_loader import SkillLoader
from scorer import Scorer
def count_armour_pieces(body_part, armour_pieces) -> int:
return len(
[
piece
for piece in armour_pieces
if piece.body_part == body_part
]
)
def run(
config_location: str,
skills_location: str,
armour_sets_location: str,
armour_location: str,
charms_location: str,
export_location: str
):
loaded_skills = SkillLoader(load_json(skills_location)).load()
skills = loaded_skills.by_id()
skills_by_name = loaded_skills.by_name()
skill_ranks = loaded_skills.skill_ranks_by_id()
config = ConfigImporter(config_location, skills_by_name).load()
print("Successfully loaded config.")
armour_sets = ArmourSetLoader(load_json(armour_sets_location)).load()
armour_set_skills = ArmourSetSkillLoader(
armour_sets,
skills,
skill_ranks
).load()
armour_pieces = ArmourLoader(
config,
armour_sets,
skill_ranks,
load_json(armour_location)
).load()
charms = CharmLoader(config, skill_ranks, load_json(charms_location)).load()
print(
"Loaded {} equipment pieces {{\n"
" head: {}\n"
" chest: {}\n"
" gloves: {}\n"
" waist: {}\n"
" legs: {}\n"
" charms: {}\n"
"}}".format(
len(armour_pieces),
count_armour_pieces(BodyPart.HEAD, armour_pieces),
count_armour_pieces(BodyPart.CHEST, armour_pieces),
count_armour_pieces(BodyPart.GLOVES, armour_pieces),
count_armour_pieces(BodyPart.WAIST, armour_pieces),
count_armour_pieces(BodyPart.LEGS, armour_pieces),
len(charms)
)
)
equipment = armour_pieces + charms
equipment_by_body_part = {body_part: [] for body_part in BodyPart}
for piece in equipment:
equipment_by_body_part[piece.body_part].append(piece)
# Reserve one CPU for the progress bar, if possible.
num_worker_cpus = max(multiprocessing.cpu_count() - 1, 1)
combinations = Combiner(
equipment_by_body_part,
[
BruteForceCombinationAlgorithm(
num_worker_cpus,
config.result_limit
),
ConstrainedCombinationAlgorithm(
config.skill_config,
skills_by_name,
armour_set_skills,
config.result_limit
)
],
Scorer(config, skills_by_name, skill_ranks),
num_worker_cpus
).generate_combinations(config)
csv_exporter.export_combinations(combinations, skill_ranks, export_location)
| 33.68
| 80
| 0.68171
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 208
| 0.061758
|
65689df9a53af776eb5d8ed7fadb2de295eee509
| 1,959
|
py
|
Python
|
setup.py
|
AurelienLourot/lsankidb
|
74479dd26e3a1ba7e40aca7740668998ff3ab944
|
[
"Unlicense"
] | 26
|
2018-04-21T15:49:19.000Z
|
2022-03-01T00:42:37.000Z
|
setup.py
|
AurelienLourot/lsankidb
|
74479dd26e3a1ba7e40aca7740668998ff3ab944
|
[
"Unlicense"
] | 2
|
2019-02-02T04:20:04.000Z
|
2020-05-08T09:03:28.000Z
|
setup.py
|
AurelienLourot/lsankidb
|
74479dd26e3a1ba7e40aca7740668998ff3ab944
|
[
"Unlicense"
] | 1
|
2020-08-19T20:59:18.000Z
|
2020-08-19T20:59:18.000Z
|
from setuptools import setup
import src
setup(name='lsankidb',
version=src.__version__,
install_requires=['AnkiTools'],
description='"ls" for your local Anki database.',
#FIXME this duplicates README.md
long_description="""
.. image:: https://cdn.jsdelivr.net/gh/AurelienLourot/lsankidb@c9735756451d135f94601b816469128e0cdadba2/thirdparty/logo.png
:height: 64px
:width: 64px
:align: right
lsankidb
========
``ls`` for your local `Anki <https://apps.ankiweb.net/>`__ database.
Dump all your Anki terms in order to save them, search them, ``grep`` them or ``diff`` them.
::
$ lsankidb
Listing /home/me/.local/share/Anki2/User 1/collection.anki2 ...
Default
French
['Hello', 'Bonjour']
['How are you?', 'Comment ça va ?']
German
['Hello', 'Hallo']
['How are you?', "Wie geht's?"]
`See on GitHub. <https://github.com/AurelienLourot/lsankidb>`__
""",
keywords=['anki',
'terminal',
'cli',
'dump',
'ls',],
author='Aurelien Lourot',
author_email='aurelien.lourot@gmail.com',
url='https://github.com/AurelienLourot/lsankidb',
download_url='https://github.com/AurelienLourot/lsankidb/tarball/'
+ src.__version__,
license='public domain',
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: Public Domain',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Education',
'Topic :: Utilities'],
packages=['src'],
entry_points="""
[console_scripts]
lsankidb = src.lsankidb:main
""")
| 30.609375
| 123
| 0.566616
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,330
| 0.678571
|
656a26c896dba737b3e9298b257c0fdd56db182e
| 274
|
py
|
Python
|
output/models/ibm_data/valid/s3_12/s3_12v06_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ibm_data/valid/s3_12/s3_12v06_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ibm_data/valid/s3_12/s3_12v06_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.ibm_data.valid.s3_12.s3_12v06_xsd.s3_12v06 import (
ChildTypeBase,
ChildTypeDerived,
CtAlt1,
CtAlt2,
CtBase,
Root,
)
__all__ = [
"ChildTypeBase",
"ChildTypeDerived",
"CtAlt1",
"CtAlt2",
"CtBase",
"Root",
]
| 15.222222
| 70
| 0.616788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 63
| 0.229927
|
656a2c3eae1d6b7509dea9e1654cbf2d7ca142de
| 936
|
py
|
Python
|
main/component/database.py
|
nguyentranhoan/uit-mobile
|
8546312b01373d94cf00c64f7eacb769e0f4ccce
|
[
"BSD-3-Clause"
] | null | null | null |
main/component/database.py
|
nguyentranhoan/uit-mobile
|
8546312b01373d94cf00c64f7eacb769e0f4ccce
|
[
"BSD-3-Clause"
] | null | null | null |
main/component/database.py
|
nguyentranhoan/uit-mobile
|
8546312b01373d94cf00c64f7eacb769e0f4ccce
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
from injector import inject, singleton
from starlette.config import Config
from common.database import BaseDatabase
LOGGER = logging.getLogger(__name__)
@singleton
@inject
class MasterDatabase(BaseDatabase):
def __init__(self, config: Config) -> None:
super().__init__(config)
self.__database_url: str = config('MASTER_DATABASE_URL', str)
LOGGER.debug('Master Session Maker Initialized')
self.test_connection()
@property
def get_db_url(self) -> str:
return self.__database_url
@singleton
@inject
class ReplicaDatabase(BaseDatabase):
def __init__(self, config: Config) -> None:
super().__init__(config)
self.__database_url: str = config('REPLICA_DATABASE_URL', str)
LOGGER.debug('Replica Session Maker Initialized')
self.test_connection()
@property
def get_db_url(self) -> str:
return self.__database_url
| 22.829268
| 70
| 0.702991
| 721
| 0.770299
| 0
| 0
| 759
| 0.810897
| 0
| 0
| 112
| 0.119658
|
656a96b402f3415f23db4722e5168fd52c75cff5
| 832
|
py
|
Python
|
preml/showtime/showie.py
|
5amron/pre-ml
|
3dff146d89468f4db0b7a9d92f3b0a26854efaf8
|
[
"MIT"
] | 3
|
2017-09-03T17:55:54.000Z
|
2018-11-24T13:11:19.000Z
|
preml/showtime/showie.py
|
5amron/pre-ml
|
3dff146d89468f4db0b7a9d92f3b0a26854efaf8
|
[
"MIT"
] | 2
|
2021-12-08T14:51:24.000Z
|
2021-12-09T15:42:09.000Z
|
preml/showtime/showie.py
|
5amron/pre-ml
|
3dff146d89468f4db0b7a9d92f3b0a26854efaf8
|
[
"MIT"
] | 2
|
2019-07-16T01:28:48.000Z
|
2020-04-12T21:23:08.000Z
|
from . import baco_show
# solution === (new_dataset, best_ant_road, acc_before_run, best_fit_so_far, total_feature_num, best_selected_features_num, best_fitnesses_each_iter, average_fitnesses_each_iter ,num_of_features_selected_by_best_ant_each_iter, time_temp, sample_num)
def draw_baco(solution):
if(len(solution) != 11):
print("+++ can't draw the solution due to problem with it! +++")
return
(new_dataset, best_ant_road, acc_before_run, best_fit_so_far, total_feature_num, best_selected_features_num, best_fitnesses_each_iter, average_fitnesses_each_iter ,num_of_features_selected_by_best_ant_each_iter, time_temp, sample_num) = solution
baco_show.show_res_for_this_run(best_fitnesses_each_iter, average_fitnesses_each_iter, num_of_features_selected_by_best_ant_each_iter, total_feature_num)
| 48.941176
| 249
| 0.824519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 306
| 0.367788
|
656d941b23f4bcfcea63953ee33a50e5ae179565
| 2,035
|
py
|
Python
|
PyFlow/UI/EncodeResources.py
|
QuentinTournier40/AnimationFreeCAD
|
8eaff8356ec68b948a721b83a6888b652278db8a
|
[
"Apache-2.0"
] | null | null | null |
PyFlow/UI/EncodeResources.py
|
QuentinTournier40/AnimationFreeCAD
|
8eaff8356ec68b948a721b83a6888b652278db8a
|
[
"Apache-2.0"
] | null | null | null |
PyFlow/UI/EncodeResources.py
|
QuentinTournier40/AnimationFreeCAD
|
8eaff8356ec68b948a721b83a6888b652278db8a
|
[
"Apache-2.0"
] | 1
|
2022-02-03T08:03:30.000Z
|
2022-02-03T08:03:30.000Z
|
# Copyright 2015-2019 Ilgar Lunin, Pedro Cabrera
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import subprocess
from PySide2 import __binding__
binding = __import__(__binding__)
path = os.path.dirname(binding.__file__)
if __binding__ == "PySide2":
app = 'pyside2-rcc.exe'
elif __binding__ == "PySide":
app = 'pyside-rcc.exe'
elif __binding__ == "PyQt4":
app = 'pyrcc4.exe'
elif __binding__ == "PyQt5":
app = 'pyrcc5.exe'
def main():
print('Encoding : Resources')
filepath = os.path.abspath("./resources")
resourceFile = 'Resources.qrc'
with open(resourceFile, 'w') as outf:
outf.write('<RCC>\n <qresource>\n')
for root, dirs, files in os.walk("resources"):
for file in files:
if '.qrc' not in file:
dirname = os.path.relpath(os.path.join(root, file))
print(dirname)
write = ' <file alias="%s">%s</file>\n' % (
file, dirname)
outf.write(write)
outf.write(" </qresource>\n</RCC>")
outf.close()
args = [os.path.join(path, app), "-compress", "2", "-threshold", "3", '-o',
os.path.join(os.path.dirname(filepath), r'resources.py'), resourceFile]
p = subprocess.Popen(
args, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
print(out)
print(err)
# import resources
print('Compiled : Resources')
if __name__ == "__main__":
main()
| 31.796875
| 83
| 0.634889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 914
| 0.44914
|
656dd6e36b81c8ae6c5f6face7c1c54668fb5ff1
| 307
|
py
|
Python
|
stix_shifter_modules/secretserver/stix_transmission/delete_connector.py
|
grimmjow8/stix-shifter
|
7d252fc241a606f0141ed50d64368d8a5e7e5c5a
|
[
"Apache-2.0"
] | 129
|
2019-10-09T17:13:03.000Z
|
2022-03-03T08:25:46.000Z
|
stix_shifter_modules/secretserver/stix_transmission/delete_connector.py
|
grimmjow8/stix-shifter
|
7d252fc241a606f0141ed50d64368d8a5e7e5c5a
|
[
"Apache-2.0"
] | 415
|
2019-10-03T14:29:20.000Z
|
2022-03-31T18:23:41.000Z
|
stix_shifter_modules/secretserver/stix_transmission/delete_connector.py
|
grimmjow8/stix-shifter
|
7d252fc241a606f0141ed50d64368d8a5e7e5c5a
|
[
"Apache-2.0"
] | 178
|
2019-10-08T22:18:48.000Z
|
2022-03-21T11:04:05.000Z
|
from stix_shifter_utils.modules.base.stix_transmission.base_delete_connector import BaseDeleteConnector
class DeleteConnector(BaseDeleteConnector):
def __init__(self, api_client):
self.api_client = api_client
def delete_query_connection(self, search_id):
return {"success": True}
| 30.7
| 103
| 0.781759
| 200
| 0.651466
| 0
| 0
| 0
| 0
| 0
| 0
| 9
| 0.029316
|
65710b598ac66d11ae7f738f8a51d26c406a0a31
| 6,525
|
py
|
Python
|
cron/sync_cs_schedule.py
|
vovagalchenko/onsite-inflight
|
7acd4bc6a12b89ab09b465a81ae495bef35bab0a
|
[
"MIT"
] | null | null | null |
cron/sync_cs_schedule.py
|
vovagalchenko/onsite-inflight
|
7acd4bc6a12b89ab09b465a81ae495bef35bab0a
|
[
"MIT"
] | 1
|
2016-05-24T00:00:10.000Z
|
2016-05-24T00:00:10.000Z
|
cron/sync_cs_schedule.py
|
vovagalchenko/onsite-inflight
|
7acd4bc6a12b89ab09b465a81ae495bef35bab0a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
import pprint
from model.cs_rep import CS_Rep
from pytz import timezone, utc
from datetime import datetime, timedelta
from lib.calendar import Google_Calendar, google_ts_to_datetime, DEFAULT_DATE, LOS_ANGELES_TZ
from lib.conf import CFG
from model.db_session import DB_Session_Factory
from json import dumps, loads
from oauth2client.client import AccessTokenRefreshError
from apiclient.errors import HttpError
from pytz import timezone
import pdb
import re
target_weekday = 3 # Thursday
target_timerange = [12, 14]
target_calendar_id = "box.com_gk9hfef9s7fulrq0t3mftrvevk@group.calendar.google.com"
def get_ts_from_event(event, ts_key):
return google_ts_to_datetime(event.get(ts_key, {}).get('dateTime', DEFAULT_DATE))
def main(argv):
calendar = Google_Calendar.get_calendar()
db_session = DB_Session_Factory.get_db_session()
now = datetime.now()
today_weekday = now.weekday()
next_target_weekday = now + timedelta(days = (target_weekday - today_weekday + 6)%7 + 1)
la_timezone = timezone(LOS_ANGELES_TZ)
start_period_naive = datetime(next_target_weekday.year, next_target_weekday.month, next_target_weekday.day, target_timerange[0])
start_period = la_timezone.localize(start_period_naive)
end_period_naive = datetime(next_target_weekday.year, next_target_weekday.month, next_target_weekday.day, target_timerange[1])
end_period = la_timezone.localize(end_period_naive)
print str(start_period) + " - " + str(end_period)
try:
cs_rep_list = db_session.query(CS_Rep).order_by(CS_Rep.email)
source_events = {}
for cs_rep in cs_rep_list:
current_period_start = start_period_naive
current_period_end = start_period_naive + timedelta(hours = 1)
print "Checking calendar for " + cs_rep.name
source_events_request = calendar.service.events().list(calendarId = cs_rep.email, timeZone = LOS_ANGELES_TZ, timeMin = start_period.isoformat(), timeMax = end_period.isoformat(), orderBy = 'startTime', singleEvents = True, maxAttendees = 1000)
while (source_events_request != None):
response = source_events_request.execute(calendar.http)
for event in response.get('items', []):
summary = event.get('summary', '')
start_time = get_ts_from_event(event, 'start')
end_time = get_ts_from_event(event, 'end')
if start_time < start_period_naive or end_time > end_period_naive or start_time < current_period_start or end_time - start_time > timedelta(hours=1):
continue
while current_period_end < end_time:
current_period_start = current_period_start + timedelta(hours = 1)
current_period_end = current_period_end + timedelta(hours = 1)
match = re.search("\*$", summary)
if match:
source_events[event['id']] = event
current_period_start = current_period_start + timedelta(hours = 1)
current_period_end = current_period_end + timedelta(hours = 1)
else:
print "no match: " + summary
source_events_request = calendar.service.events().list_next(source_events_request, response)
to_delete = []
to_update = {}
target_events_request = calendar.service.events().list(calendarId = target_calendar_id, timeZone = LOS_ANGELES_TZ, timeMin = start_period.isoformat(), timeMax = end_period.isoformat(), orderBy = 'startTime', singleEvents = True)
while (target_events_request != None):
response = target_events_request.execute(calendar.http)
for event in response.get('items', []):
source_event = source_events.get(event['id'], None)
if source_event is None:
to_delete.append(event)
else:
to_update[event['id']] = {'before' : event, 'after' : source_events[event['id']].copy()}
del source_events[event['id']]
target_events_request = calendar.service.events().list_next(target_events_request, response)
for event in to_delete:
print "Removing: " + event.get('summary', "")
calendar.service.events().delete(calendarId = target_calendar_id, eventId = event['id']).execute(calendar.http)
for event_id in to_update:
original_event = to_update[event_id]['before']
original_start = get_ts_from_event(original_event, 'start')
original_end = get_ts_from_event(original_event, 'end')
after_event = to_update[event_id]['after']
after_start = get_ts_from_event(after_event, 'start')
after_end = get_ts_from_event(after_event, 'end')
if original_start != after_start or original_end != after_end:
original_event['start'] = after_event['start']
original_event['end'] = after_event['end']
print "Updating: " + original_event.get('summary', "")
calendar.service.events().update(calendarId = target_calendar_id, eventId = event_id, body = original_event).execute(calendar.http)
for event_id in source_events:
source_event = source_events[event_id]
print "Adding: " + source_event.get('summary', "")
source_event['organizer'] = {'self' : True}
source_event['location'] = '4440-3-4 The Marina'
while True:
try:
calendar.service.events().import_(calendarId = target_calendar_id, body = source_event).execute(calendar.http)
break
except HttpError as e:
error_data = loads(e.content)
print error_data['error']['code']
if error_data.get('error', {'code' : None}).get('code', None) == 400:
source_event['sequence'] += 1
else:
sys.stderr.write("HTTP Error: " + e.content)
exit(1)
except AccessTokenRefreshError:
print ("The credentials have been revoked or expired, please re-run"
"the application to re-authorize")
if __name__ == '__main__':
main(sys.argv)
| 50.976563
| 255
| 0.630192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 585
| 0.089655
|
6572eb6865b171103c8335e5748af8c03fb39e11
| 5,500
|
py
|
Python
|
hardware/opentrons_hardware/drivers/can_bus/can_messenger.py
|
anuwrag/opentrons
|
28c8d76a19e367c6bd38f5290faaa32abf378715
|
[
"Apache-2.0"
] | null | null | null |
hardware/opentrons_hardware/drivers/can_bus/can_messenger.py
|
anuwrag/opentrons
|
28c8d76a19e367c6bd38f5290faaa32abf378715
|
[
"Apache-2.0"
] | null | null | null |
hardware/opentrons_hardware/drivers/can_bus/can_messenger.py
|
anuwrag/opentrons
|
28c8d76a19e367c6bd38f5290faaa32abf378715
|
[
"Apache-2.0"
] | null | null | null |
"""Can messenger class."""
from __future__ import annotations
import asyncio
from inspect import Traceback
from typing import List, Optional, Callable, Tuple
import logging
from opentrons_hardware.drivers.can_bus.abstract_driver import AbstractCanDriver
from opentrons_hardware.firmware_bindings.arbitration_id import (
ArbitrationId,
ArbitrationIdParts,
)
from opentrons_hardware.firmware_bindings.message import CanMessage
from opentrons_hardware.firmware_bindings.constants import NodeId, MessageId
from opentrons_hardware.firmware_bindings.messages.messages import (
MessageDefinition,
get_definition,
)
from opentrons_hardware.firmware_bindings.utils import BinarySerializableException
log = logging.getLogger(__name__)
MessageListenerCallback = Callable[[MessageDefinition, ArbitrationId], None]
"""Incoming message listener."""
class CanMessenger:
"""High level can messaging class wrapping a CanDriver.
The background task can be controlled with start/stop methods.
To receive message notifications add a listener using add_listener
"""
def __init__(self, driver: AbstractCanDriver) -> None:
"""Constructor.
Args:
driver: The can bus driver to use.
"""
self._drive = driver
self._listeners: List[MessageListenerCallback] = []
self._task: Optional[asyncio.Task[None]] = None
async def send(self, node_id: NodeId, message: MessageDefinition) -> None:
"""Send a message."""
# TODO (amit, 2021-11-05): Use function code when it is better defined.
arbitration_id = ArbitrationId(
parts=ArbitrationIdParts(
message_id=message.message_id,
node_id=node_id,
function_code=0,
originating_node_id=NodeId.host,
)
)
data = message.payload.serialize()
log.info(
f"Sending -->\n\tarbitration_id: {arbitration_id},\n\t"
f"payload: {message.payload}"
)
await self._drive.send(
message=CanMessage(arbitration_id=arbitration_id, data=data)
)
def start(self) -> None:
"""Start the reader task."""
if self._task:
log.warning("task already running.")
return
self._task = asyncio.get_event_loop().create_task(self._read_task_shield())
async def stop(self) -> None:
"""Stop the reader task."""
if self._task:
self._task.cancel()
try:
await self._task
except asyncio.CancelledError:
log.info("Task cancelled.")
else:
log.warning("task not running.")
def add_listener(self, listener: MessageListenerCallback) -> None:
"""Add a message listener."""
self._listeners.append(listener)
def remove_listener(self, listener: MessageListenerCallback) -> None:
"""Remove a message listener."""
self._listeners.remove(listener)
async def _read_task_shield(self) -> None:
try:
await self._read_task()
except asyncio.CancelledError:
pass
except Exception:
log.exception("Exception in read")
raise
async def _read_task(self) -> None:
"""Read task."""
async for message in self._drive:
message_definition = get_definition(
MessageId(message.arbitration_id.parts.message_id)
)
if message_definition:
try:
build = message_definition.payload_type.build(message.data)
log.info(
f"Received <--\n\tarbitration_id: {message.arbitration_id},\n\t"
f"payload: {build}"
)
for listener in self._listeners:
listener(message_definition(payload=build), message.arbitration_id) # type: ignore[arg-type] # noqa: E501
except BinarySerializableException:
log.exception(f"Failed to build from {message}")
else:
log.error(f"Message {message} is not recognized.")
class WaitableCallback:
"""MessageListenerCallback that can be awaited or iterated."""
def __init__(self, messenger: CanMessenger) -> None:
"""Constructor.
Args:
messenger: Messenger to listen on.
"""
self._messenger = messenger
self._queue: asyncio.Queue[
Tuple[MessageDefinition, ArbitrationId]
] = asyncio.Queue()
def __call__(
self, message: MessageDefinition, arbitration_id: ArbitrationId
) -> None:
"""Callback."""
self._queue.put_nowait((message, arbitration_id))
def __enter__(self) -> WaitableCallback:
"""Enter context manager."""
self._messenger.add_listener(self)
return self
def __exit__(
self, exc_type: type, exc_val: BaseException, exc_tb: Traceback
) -> None:
"""Exit context manager."""
self._messenger.remove_listener(self)
def __aiter__(self) -> WaitableCallback:
"""Enter iterator."""
return self
async def __anext__(self) -> Tuple[MessageDefinition, ArbitrationId]:
"""Async next."""
return await self.read()
async def read(self) -> Tuple[MessageDefinition, ArbitrationId]:
"""Read next message."""
return await self._queue.get()
| 33.333333
| 131
| 0.623818
| 4,638
| 0.843273
| 0
| 0
| 0
| 0
| 2,506
| 0.455636
| 1,212
| 0.220364
|
65745bf4fb319f381c713d4d3577e3e4308c9cad
| 3,390
|
py
|
Python
|
document.py
|
miguelps/web-document-scanner
|
e1d61a6cf37a2f79805098bfa22173f4c7aab8d8
|
[
"MIT"
] | 23
|
2017-09-28T12:56:38.000Z
|
2022-01-06T04:14:55.000Z
|
document.py
|
miguelps/web-document-scanner
|
e1d61a6cf37a2f79805098bfa22173f4c7aab8d8
|
[
"MIT"
] | null | null | null |
document.py
|
miguelps/web-document-scanner
|
e1d61a6cf37a2f79805098bfa22173f4c7aab8d8
|
[
"MIT"
] | 17
|
2018-01-12T07:10:37.000Z
|
2020-11-14T10:00:59.000Z
|
import cv2
import rect
import numpy as np
class Scanner(object):
def __init__(self):
pass
def __del__(self):
pass
def auto_canny(self, image, sigma=0.33):
v = np.median(image)
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
edged = cv2.Canny(image, lower, upper)
# return the edged image
return edged
# http://www.pyimagesearch.com/2014/08/25/4-point-opencv-getperspective-transform-example/
def four_point_transform(self, image, rect):
# obtain a consistent order of the points and unpack them
# individually
(tl, tr, br, bl) = rect
# compute the width of the new image, which will be the
# maximum distance between bottom-right and bottom-left
# x-coordiates or the top-right and top-left x-coordinates
widthA = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
widthB = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
maxWidth = max(int(widthA), int(widthB))
# compute the height of the new image, which will be the
# maximum distance between the top-right and bottom-right
# y-coordinates or the top-left and bottom-left y-coordinates
heightA = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
heightB = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
maxHeight = max(int(heightA), int(heightB))
# now that we have the dimensions of the new image, construct
# the set of destination points to obtain a "birds eye view",
# (i.e. top-down view) of the image, again specifying points
# in the top-left, top-right, bottom-right, and bottom-left
# order
dst = np.array([
[0, 0],
[maxWidth - 1, 0],
[maxWidth - 1, maxHeight - 1],
[0, maxHeight - 1]], dtype = "float32")
# compute the perspective transform matrix and then apply it
M = cv2.getPerspectiveTransform(rect, dst)
warped = cv2.warpPerspective(image, M, (maxWidth, maxHeight))
# return the warped image
return warped
# https://github.com/vipul-sharma20/document-scanner
def detect_edge(self, image, enabled_transform = False):
dst = None
orig = image.copy()
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
blurred = cv2.GaussianBlur(gray, (5, 5), 0)
edged = cv2.Canny(blurred, 0, 20)
_, contours, _ = cv2.findContours(edged, cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE)
contours = sorted(contours, key=cv2.contourArea, reverse=True)
for cnt in contours:
epsilon = 0.051 * cv2.arcLength(cnt, True)
approx = cv2.approxPolyDP(cnt, epsilon, True)
if len(approx) == 4:
target = approx
cv2.drawContours(image, [target], -1, (0, 255, 0), 2)
if enabled_transform:
approx = rect.rectify(target)
# pts2 = np.float32([[0,0],[800,0],[800,800],[0,800]])
# M = cv2.getPerspectiveTransform(approx,pts2)
# dst = cv2.warpPerspective(orig,M,(800,800))
dst = self.four_point_transform(orig, approx)
break
return image, dst
| 37.666667
| 94
| 0.562832
| 3,336
| 0.984071
| 0
| 0
| 0
| 0
| 0
| 0
| 1,066
| 0.314454
|
657481068e2f17e43b44ef100d1a13a239c78046
| 797
|
py
|
Python
|
54.py
|
geethakamath18/Leetcode
|
8e55e0a47ee35ed100b30dda6682c7ce1033d4b2
|
[
"MIT"
] | null | null | null |
54.py
|
geethakamath18/Leetcode
|
8e55e0a47ee35ed100b30dda6682c7ce1033d4b2
|
[
"MIT"
] | null | null | null |
54.py
|
geethakamath18/Leetcode
|
8e55e0a47ee35ed100b30dda6682c7ce1033d4b2
|
[
"MIT"
] | null | null | null |
#LeetCode problem 54: Spiral Matrix
class Solution:
def spiralOrder(self, matrix: List[List[int]]) -> List[int]:
if(len(matrix)==0 or len(matrix[0])==0):
return []
res=[]
rb=0
re=len(matrix)
cb=0
ce=len(matrix[0])
while(re>rb and ce>cb):
for j in range(cb,ce):
res.append(matrix[rb][j])
for k in range(rb+1,re-1):
res.append(matrix[k][ce-1])
if(re!=rb+1):
for l in range(ce-1,cb-1,-1):
res.append(matrix[re-1][l])
if(cb!=ce-1):
for m in range(re-2,rb,-1):
res.append(matrix[m][cb])
rb+=1
ce-=1
cb+=1
re-=1
return(res)
| 30.653846
| 64
| 0.425345
| 761
| 0.954831
| 0
| 0
| 0
| 0
| 0
| 0
| 35
| 0.043915
|
657754f3c56c1f682c95288c291fac1e80bdecd4
| 717
|
py
|
Python
|
pytype/pyi/parse_pyi.py
|
CyberFlameGO/pytype
|
c8cbeea997634455b5abcb27c76c58aa0dfc25ae
|
[
"Apache-2.0"
] | null | null | null |
pytype/pyi/parse_pyi.py
|
CyberFlameGO/pytype
|
c8cbeea997634455b5abcb27c76c58aa0dfc25ae
|
[
"Apache-2.0"
] | null | null | null |
pytype/pyi/parse_pyi.py
|
CyberFlameGO/pytype
|
c8cbeea997634455b5abcb27c76c58aa0dfc25ae
|
[
"Apache-2.0"
] | null | null | null |
# python3
"""Testing code to run the typed_ast based pyi parser."""
import sys
from pytype import module_utils
from pytype.pyi import parser
from pytype.pyi.types import ParseError # pylint: disable=g-importing-member
from pytype.pytd import pytd_utils
if __name__ == '__main__':
filename = sys.argv[1]
with open(filename, 'r') as f:
src = f.read()
module_name = module_utils.path_to_module_name(filename)
version = (3, 6)
try:
out, _ = parser.parse_pyi_debug(
src, filename, module_name, version, None)
except ParseError as e:
print(e)
sys.exit(1)
print('------pytd--------------')
print(out)
print('------round trip--------------')
print(pytd_utils.Print(out))
| 21.727273
| 77
| 0.659693
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 173
| 0.241283
|
6577dab5ac11bbb023397905f08425826f206066
| 176
|
py
|
Python
|
backend/university/admin.py
|
andriyandrushko0/univowl
|
da613316021f7b41b133b5b6e360cc6b9db60504
|
[
"MIT"
] | null | null | null |
backend/university/admin.py
|
andriyandrushko0/univowl
|
da613316021f7b41b133b5b6e360cc6b9db60504
|
[
"MIT"
] | null | null | null |
backend/university/admin.py
|
andriyandrushko0/univowl
|
da613316021f7b41b133b5b6e360cc6b9db60504
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import *
admin.site.register(University)
admin.site.register(Faculty)
admin.site.register(Subject)
admin.site.register(Teacher)
| 19.555556
| 32
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
65780384622ed08f0a6ec4a97dc9199a0a7d8975
| 1,333
|
py
|
Python
|
src/train.py
|
shuvoxcd01/neural_tic_tac_toe
|
a988230ff3dd0d882ebc0fb19630c9ff22fef629
|
[
"Apache-2.0"
] | null | null | null |
src/train.py
|
shuvoxcd01/neural_tic_tac_toe
|
a988230ff3dd0d882ebc0fb19630c9ff22fef629
|
[
"Apache-2.0"
] | null | null | null |
src/train.py
|
shuvoxcd01/neural_tic_tac_toe
|
a988230ff3dd0d882ebc0fb19630c9ff22fef629
|
[
"Apache-2.0"
] | null | null | null |
from pettingzoo.classic import tictactoe_v3
import os
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
import tensorflow as tf
if tf.test.gpu_device_name():
print('GPU found')
else:
print("No GPU found")
from src.q_learning.agent.agent import Agent
from src.q_learning.network.dqn import DQN
from src.q_learning.policies.epsilon_greedy_policy import EpsilonGreedyPolicy
from src.q_learning.policies.greedy_policy import GreedyPolicy
from src.q_learning.q_learning import QLearning
from src.q_learning.transition_table.transition_table import TransitionTable
env = tictactoe_v3.env()
env.reset()
input_shape = (3, 3, 2)
num_actions = 9
agents = {}
for agent_name in env.agents:
q_network = DQN.get_q_network(input_shape=input_shape, num_actions=num_actions)
target_q_network = DQN.clone(q_network)
transition_table = TransitionTable()
behavior_policy = EpsilonGreedyPolicy(q_network=q_network)
target_policy = GreedyPolicy(q_network=target_q_network)
agent = Agent(q_network=q_network, target_q_network=target_q_network, transition_table=transition_table,
behavior_policy=behavior_policy, target_policy=target_policy, agent_name=agent_name)
agents[agent_name] = agent
env.reset()
q_learning = QLearning(env=env, num_actions=num_actions, agents=agents)
q_learning.train(1000000)
| 32.512195
| 108
| 0.795199
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 51
| 0.03826
|
657895d9e23f4c721777cf7c3eb7603b0f1ecfd7
| 199
|
py
|
Python
|
app/gunicorn_settings.py
|
jsonbinit/jsonbinit-api
|
babbf16f1fafe913d9fdb2646cb7a9542ec19355
|
[
"MIT"
] | 3
|
2020-06-24T14:55:51.000Z
|
2020-06-25T23:12:13.000Z
|
app/gunicorn_settings.py
|
jsonbinit/jsonbinit-api
|
babbf16f1fafe913d9fdb2646cb7a9542ec19355
|
[
"MIT"
] | 3
|
2020-04-23T10:39:25.000Z
|
2020-06-24T15:22:25.000Z
|
app/gunicorn_settings.py
|
jsonbinit/jsonbinit-api
|
babbf16f1fafe913d9fdb2646cb7a9542ec19355
|
[
"MIT"
] | null | null | null |
import multiprocessing
import os
bind = "{0}:{1}".format(os.environ.get('HOST', '0.0.0.0'), os.environ.get('PORT', '8080'))
workers = os.environ.get('WORKERS', multiprocessing.cpu_count() * 2 + 1)
| 28.428571
| 90
| 0.668342
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 45
| 0.226131
|
65795b0076fedc356eabb71c92883de2bfec241d
| 261
|
py
|
Python
|
tests/test_crud/conftest.py
|
amisadmin/fastapi_amis_admin
|
07967a31c46cb6e8e0b4ca703d6d815c2091624a
|
[
"Apache-2.0"
] | 166
|
2022-02-05T15:52:51.000Z
|
2022-03-31T10:57:35.000Z
|
tests/test_crud/conftest.py
|
amisadmin/fastapi_amis_admin
|
07967a31c46cb6e8e0b4ca703d6d815c2091624a
|
[
"Apache-2.0"
] | 9
|
2022-02-17T07:32:58.000Z
|
2022-03-31T13:46:24.000Z
|
tests/test_crud/conftest.py
|
amisadmin/fastapi_amis_admin
|
07967a31c46cb6e8e0b4ca703d6d815c2091624a
|
[
"Apache-2.0"
] | 15
|
2022-02-10T07:24:17.000Z
|
2022-03-24T04:08:10.000Z
|
import pytest
from tests.test_crud.main import app
@pytest.fixture(scope='session', autouse=True)
def startup():
import asyncio
# asyncio.run(app.router.startup())
loop = asyncio.get_event_loop()
loop.run_until_complete(app.router.startup())
| 21.75
| 49
| 0.731801
| 0
| 0
| 0
| 0
| 206
| 0.789272
| 0
| 0
| 44
| 0.168582
|
657a75ed6a167a9a6dc1817f3c5d4306180485f8
| 794
|
py
|
Python
|
Ehemalige/models.py
|
wmles/olymp
|
97b1a256982c2a75c39ba3a855b63a147d4409c5
|
[
"MIT"
] | null | null | null |
Ehemalige/models.py
|
wmles/olymp
|
97b1a256982c2a75c39ba3a855b63a147d4409c5
|
[
"MIT"
] | null | null | null |
Ehemalige/models.py
|
wmles/olymp
|
97b1a256982c2a75c39ba3a855b63a147d4409c5
|
[
"MIT"
] | null | null | null |
from django.db import models
""" Konzept:
Man soll sich als Ehemaliger in die DB eintragen können (Nutzeraccount
dafür nötig?)
Es gibt ein Textfeld für den Lebenslauf und die Auswahl des aktuellen Ortes
und der Tätigkeit.
ehem, das kommt mir ziemlich eng vor, so wie die alte Version der olymp-db
"""
from Grundgeruest.models import Grundklasse
from seite.settings import AUTH_USER_MODEL as user_model
class Ehemaliger(Grundklasse):
""" Der Lebenslauf einer Person, optional Verknüpfung zum Nutzer """
lebenslauf = models.TextField()
ort = models.CharField(max_length=255)
taetigkeit = models.CharField(max_length=255, verbose_name='Tätigkeit')
nutzer = models.ForeignKey(user_model, null=True, blank=True)
class Meta:
verbose_name_plural = 'Ehemalige'
| 33.083333
| 76
| 0.760705
| 385
| 0.480649
| 0
| 0
| 0
| 0
| 0
| 0
| 373
| 0.465668
|
657b2220279cfe93a6ac57a28426feba8dfe7ccf
| 15,921
|
py
|
Python
|
market_maker/market_maker.py
|
Quant-Network/sample-market-maker
|
4c47b60be66b1aead901400ba5fe96abf5e73c1b
|
[
"Apache-2.0"
] | null | null | null |
market_maker/market_maker.py
|
Quant-Network/sample-market-maker
|
4c47b60be66b1aead901400ba5fe96abf5e73c1b
|
[
"Apache-2.0"
] | null | null | null |
market_maker/market_maker.py
|
Quant-Network/sample-market-maker
|
4c47b60be66b1aead901400ba5fe96abf5e73c1b
|
[
"Apache-2.0"
] | 1
|
2021-04-27T12:02:41.000Z
|
2021-04-27T12:02:41.000Z
|
from __future__ import absolute_import
from time import sleep
import sys
from datetime import datetime
from os.path import getmtime
import random
import requests
import atexit
import signal
import logging
from market_maker.bitmex import BitMEX
from market_maker.settings import settings
from market_maker.utils import log, constants, errors, math
# Used for reloading the bot - saves modified times of key files
import os
watched_files_mtimes = [(f, getmtime(f)) for f in settings.WATCHED_FILES]
class ExchangeInterface:
def __init__(self):
self.logger = logging.getLogger('root')
if len(sys.argv) > 1:
self.symbol = sys.argv[1]
else:
self.symbol = settings.SYMBOL
self.bitmex = BitMEX(base_url=settings.BASE_URL, symbol=self.symbol,
apiKey=settings.BITMEX_API_KEY, apiSecret=settings.BITMEX_API_SECRET,
orderIDPrefix=settings.ORDERID_PREFIX, postOnly=settings.POST_ONLY,
timeout=settings.TIMEOUT)
def cancel_order(self, order):
tickLog = self.get_instrument()['tickLog']
self.logger.info("Canceling: %s %d @ %.*f" % (order['side'], order['orderQty'], tickLog, order['price']))
while True:
try:
self.bitmex.cancel(order['orderID'])
sleep(settings.API_REST_INTERVAL)
except ValueError as e:
self.logger.info(e)
sleep(settings.API_ERROR_INTERVAL)
else:
break
def cancel_all_orders(self):
self.logger.info("Resetting current position. Canceling all existing orders.")
tickLog = self.get_instrument()['tickLog']
# In certain cases, a WS update might not make it through before we call this.
# For that reason, we grab via HTTP to ensure we grab them all.
orders = self.bitmex.http_open_orders()
for order in orders:
self.logger.info("Canceling: %s %d @ %.*f" % (order['side'], order['orderQty'], tickLog, order['price']))
if len(orders):
self.bitmex.cancel([order['orderID'] for order in orders])
sleep(settings.API_REST_INTERVAL)
def get_portfolio(self):
contracts = settings.CONTRACTS
portfolio = {}
for symbol in contracts:
position = self.bitmex.position(symbol=symbol)
instrument = self.bitmex.instrument(symbol=symbol)
if instrument['isQuanto']:
future_type = "Quanto"
elif instrument['isInverse']:
future_type = "Inverse"
elif not instrument['isQuanto'] and not instrument['isInverse']:
future_type = "Linear"
else:
raise NotImplementedError("Unknown future type; not quanto or inverse: %s" % instrument['symbol'])
if instrument['underlyingToSettleMultiplier'] is None:
multiplier = float(instrument['multiplier']) / float(instrument['quoteToSettleMultiplier'])
else:
multiplier = float(instrument['multiplier']) / float(instrument['underlyingToSettleMultiplier'])
portfolio[symbol] = {
"currentQty": float(position['currentQty']),
"futureType": future_type,
"multiplier": multiplier,
"markPrice": float(instrument['markPrice']),
"spot": float(instrument['indicativeSettlePrice'])
}
return portfolio
def calc_delta(self):
"""Calculate currency delta for portfolio"""
portfolio = self.get_portfolio()
spot_delta = 0
mark_delta = 0
for symbol in portfolio:
item = portfolio[symbol]
if item['futureType'] == "Quanto":
spot_delta += item['currentQty'] * item['multiplier'] * item['spot']
mark_delta += item['currentQty'] * item['multiplier'] * item['markPrice']
elif item['futureType'] == "Inverse":
spot_delta += (item['multiplier'] / item['spot']) * item['currentQty']
mark_delta += (item['multiplier'] / item['markPrice']) * item['currentQty']
elif item['futureType'] == "Linear":
spot_delta += item['multiplier'] * item['currentQty']
mark_delta += item['multiplier'] * item['currentQty']
basis_delta = mark_delta - spot_delta
delta = {
"spot": spot_delta,
"mark_price": mark_delta,
"basis": basis_delta
}
return delta
def get_delta(self, symbol=None):
if symbol is None:
symbol = self.symbol
return self.get_position(symbol)['currentQty']
def get_instrument(self, symbol=None):
if symbol is None:
symbol = self.symbol
return self.bitmex.instrument(symbol)
def get_margin(self):
return self.bitmex.funds()
def get_orders(self):
return self.bitmex.open_orders()
def get_highest_buy(self):
buys = [o for o in self.get_orders() if o['side'] == 'Buy']
if not len(buys):
return {'price': -2**32}
highest_buy = max(buys or [], key=lambda o: o['price'])
return highest_buy if highest_buy else {'price': -2**32}
def get_lowest_sell(self):
sells = [o for o in self.get_orders() if o['side'] == 'Sell']
if not len(sells):
return {'price': 2**32}
lowest_sell = min(sells or [], key=lambda o: o['price'])
return lowest_sell if lowest_sell else {'price': 2**32} # ought to be enough for anyone
def get_position(self, symbol=None):
if symbol is None:
symbol = self.symbol
return self.bitmex.position(symbol)
def get_ticker(self, symbol=None):
if symbol is None:
symbol = self.symbol
return self.bitmex.ticker_data(symbol)
def is_open(self):
"""Check that websockets are still open."""
return not self.bitmex.ws.exited
def is_stable(self):
"""Check that websockets are still stable for use."""
return self.bitmex.ws.is_client_stable()
def check_market_open(self):
instrument = self.get_instrument()
if instrument["state"] != "Open" and instrument["state"] != "Closed":
raise errors.MarketClosedError("The instrument %s is not open. State: %s" %
(self.symbol, instrument["state"]))
def check_if_orderbook_empty(self):
"""This function checks whether the order book is empty"""
instrument = self.get_instrument()
if instrument['midPrice'] is None:
raise errors.MarketEmptyError("Orderbook is empty, cannot quote")
def amend_bulk_orders(self, orders):
return self.bitmex.amend_bulk_orders(orders)
def create_bulk_orders(self, orders):
return self.bitmex.create_bulk_orders(orders)
def cancel_bulk_orders(self, orders):
return self.bitmex.cancel([order['orderID'] for order in orders])
class OrderManager:
def __init__(self):
self.logger = logging.getLogger('root')
self.exchange = ExchangeInterface()
# Once exchange is created, register exit handler that will always cancel orders
# on any error.
atexit.register(self.exit)
signal.signal(signal.SIGTERM, self.exit)
self.logger.info("Using symbol %s." % self.exchange.symbol)
self.logger.info("Order Manager initializing, connecting to BitMEX. Live run: executing real trades.")
self.start_time = datetime.now()
self.instrument = self.exchange.get_instrument()
self.starting_qty = self.exchange.get_delta()
self.running_qty = self.starting_qty
self.reset()
def reset(self):
self.exchange.cancel_all_orders()
self.sanity_check()
self.print_status()
# Create orders and converge.
self.place_orders()
def print_status(self):
"""Print the current MM status."""
raise NotImplementedError("This method has not been implemented.")
def get_ticker(self):
ticker = self.exchange.get_ticker()
# Set up our buy & sell positions
self.start_position_buy = ticker["buy"]
self.start_position_sell = ticker["sell"]
return ticker
def get_price_offset(self, index):
"""Given an index (1, -1) return the price for that side of the book.
-1 is a buy, 1 is a sell."""
# Offset mode: We define a naive quoting and execution method which is basically try to chase the best bids.
start_position = self.start_position_buy if index < 0 else self.start_position_sell
return math.toNearest(start_position, self.instrument['tickSize'])
###
# Orders
###
def place_orders(self):
"""Create order items for use in convergence."""
raise NotImplementedError("This method has not been implemented.")
def prepare_order(self, index):
"""Create an order object."""
raise NotImplementedError("This method has not been implemented.")
def converge_orders(self, buy_orders, sell_orders):
"""Converge the orders we currently have in the book with what we want to be in the book.
This involves amending any open orders and creating new ones if any have filled completely.
We start from the closest orders outward."""
tickLog = self.exchange.get_instrument()['tickLog']
to_amend = []
to_create = []
to_cancel = []
buys_matched = 0
sells_matched = 0
existing_orders = self.exchange.get_orders()
# Check all existing orders and match them up with what we want to place.
# If there's an open one, we might be able to amend it to fit what we want.
for order in existing_orders:
try:
if order['side'] == 'Buy':
desired_order = buy_orders[buys_matched]
buys_matched += 1
else:
desired_order = sell_orders[sells_matched]
sells_matched += 1
# Found an existing order. Do we need to amend it?
if desired_order['orderQty'] != order['orderQty'] or desired_order['price'] != order['price']:
to_amend.append({'orderID': order['orderID'], 'orderQty': desired_order['orderQty'],
'price': desired_order['price'], 'side': order['side']})
except IndexError:
# Will throw if there isn't a desired order to match. In that case, cancel it.
to_cancel.append(order)
while buys_matched < len(buy_orders):
to_create.append(buy_orders[buys_matched])
buys_matched += 1
while sells_matched < len(sell_orders):
to_create.append(sell_orders[sells_matched])
sells_matched += 1
if len(to_amend) > 0:
for amended_order in reversed(to_amend):
reference_order = [o for o in existing_orders if o['orderID'] == amended_order['orderID']][0]
self.logger.info("Amending %4s: %d @ %.*f to %d @ %.*f (%+.*f)" % (
amended_order['side'],
reference_order['leavesQty'], tickLog, reference_order['price'],
(amended_order['orderQty'] - reference_order['cumQty']), tickLog, amended_order['price'],
tickLog, (amended_order['price'] - reference_order['price'])
))
# This can fail if an order has closed in the time we were processing.
# The API will send us `invalid ordStatus`, which means that the order's status (Filled/Canceled)
# made it not amendable.
# If that happens, we need to catch it and re-tick.
try:
self.exchange.amend_bulk_orders(to_amend)
except requests.exceptions.HTTPError as e:
errorObj = e.response.json()
if errorObj['error']['message'] == 'Invalid ordStatus':
self.logger.warn("Amending failed. Waiting for order data to converge and retrying.")
sleep(0.5)
return self.place_orders()
else:
self.logger.error("Unknown error on amend: %s." % errorObj)
if len(to_create) > 0:
self.logger.info("Creating %d orders:" % (len(to_create)))
for order in reversed(to_create):
self.logger.info("%4s %d @ %.*f" % (order['side'], order['orderQty'], tickLog, order['price']))
self.exchange.create_bulk_orders(to_create)
# Could happen if we exceed a delta limit
if len(to_cancel) > 0:
self.logger.info("Canceling %d orders:" % (len(to_cancel)))
for order in reversed(to_cancel):
self.logger.info("%4s %d @ %.*f" % (order['side'], order['leavesQty'], tickLog, order['price']))
self.exchange.cancel_bulk_orders(to_cancel)
###
# Sanity
##
def sanity_check(self):
"""Perform checks before placing orders."""
# Check if OB is empty - if so, can't quote.
self.exchange.check_if_orderbook_empty()
# Ensure market is still open.
self.exchange.check_market_open()
# Get ticker, which sets price offsets and prints some debugging info.
ticker = self.get_ticker()
# Exchange websockets stability check:
if not self.check_exchange_state_stability():
self.logger.warning("Sanity exchange websockets stability check failed, exchange data is no longer reliable. Restarting...")
self.restart()
# Sanity check:
if self.get_price_offset(-1) >= ticker["sell"] or self.get_price_offset(1) <= ticker["buy"]:
self.logger.error("Buy: %s, Sell: %s" % (self.start_position_buy, self.start_position_sell))
self.logger.error("First buy position: %s\nBitMEX Best Ask: %s\nFirst sell position: %s\nBitMEX Best Bid: %s" %
(self.get_price_offset(-1), ticker["sell"], self.get_price_offset(1), ticker["buy"]))
self.logger.error("Sanity check failed, exchange data is inconsistent")
self.restart()
###
# Helpers
###
def determine_contracts_amt(self, satoshi_amt, price):
'''This method calculates the number of contracts for give satoshi amt and contract price. Note: the minimum returned value is 1'''
return max(1, int((satoshi_amt/constants.XBt_TO_XBT) * price))
###
# Running
###
def check_file_change(self):
"""Restart if any files we're watching have changed."""
for f, mtime in watched_files_mtimes:
if getmtime(f) > mtime:
self.restart()
def check_connection(self):
"""Ensure the WS connections are still open."""
return self.exchange.is_open()
def check_exchange_state_stability(self):
"""Ensure the WS is still stable for use."""
return self.exchange.is_stable()
def exit(self):
raise NotImplementedError("This method has not been implemented.")
def run_loop(self):
raise NotImplementedError("This method has not been implemented.")
def restart(self):
raise NotImplementedError("This method has not been implemented.")
#
# Helpers
#
def XBt_to_XBT(XBt):
return float(XBt) / constants.XBt_TO_XBT
def cost(instrument, quantity, price):
mult = instrument["multiplier"]
P = mult * price if mult >= 0 else mult / price
return abs(quantity * P)
def margin(instrument, quantity, price):
return cost(instrument, quantity, price) * instrument["initMargin"]
| 39.408416
| 139
| 0.606746
| 15,062
| 0.946046
| 0
| 0
| 0
| 0
| 0
| 0
| 4,481
| 0.281452
|
657cf1129b53d3a0b63a22aabb558fd5bc616640
| 179
|
py
|
Python
|
pythonfiles/scoring.py
|
amrut-prabhu/loan-default-prediction
|
2e0a91529a71c69e93d7b30decefc59f2627406f
|
[
"MIT"
] | 2
|
2020-05-06T15:11:56.000Z
|
2020-05-24T13:51:55.000Z
|
pythonfiles/scoring.py
|
amrut-prabhu/loan-default-prediction
|
2e0a91529a71c69e93d7b30decefc59f2627406f
|
[
"MIT"
] | null | null | null |
pythonfiles/scoring.py
|
amrut-prabhu/loan-default-prediction
|
2e0a91529a71c69e93d7b30decefc59f2627406f
|
[
"MIT"
] | 2
|
2018-09-23T07:09:51.000Z
|
2021-12-16T17:58:14.000Z
|
import numpy as np
def import_accuracy(y_test, predictions):
errors = abs(predictions - y_test)
mape = 100 * (errors / y_test)
accuracy = 100 - np.mean(mape)
return accuracy
| 22.375
| 41
| 0.726257
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
657d1df2ec7237f7821e8629ba8c0b4d674b5456
| 2,406
|
py
|
Python
|
app/core/tests/test_admin.py
|
ido777/newish
|
298a3d5babf411ba1eb777101eb6e8f70b9e495f
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
ido777/newish
|
298a3d5babf411ba1eb777101eb6e8f70b9e495f
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
ido777/newish
|
298a3d5babf411ba1eb777101eb6e8f70b9e495f
|
[
"MIT"
] | null | null | null |
import pytest
from django.urls import reverse
@pytest.mark.skip(reason="WIP moving to pytest tests")
def test_with_authenticated_client(client, django_user_model):
email = 'admin@somewhere.com'
password = 'password123'
admin_user = django_user_model.objects.create_superuser(
email, password)
client.force_login(user=admin_user)
user = django_user_model.objects.create_user('user@somewhere.com', password='password123',
name='Test user full name')
url = reverse('admin:core_user_changelist')
res = client.get(url)
assert user.name in res
assert user.email in res
def test_user_page_change(client, django_user_model):
"""Test that the user edit page works"""
email = 'admin@somewhere.com'
password = 'password123'
admin_user = django_user_model.objects.create_superuser(
email, password)
client.force_login(user=admin_user)
user = django_user_model.objects.create_user('user@somewhere.com', password='password123',
name='Test user full name')
url = reverse('admin:core_user_change', args=[user.id])
res = client.get(url)
assert res.status_code == 200
def test_create_user_page(client, django_user_model):
"""Test that the create user page works"""
email = 'admin@somewhere.com'
password = 'password123'
admin_user = django_user_model.objects.create_superuser(
email, password)
client.force_login(user=admin_user)
url = reverse('admin:core_user_add')
res = client.get(url)
assert res.status_code == 200
'''
@pytest.mark.django_db
def test_user_create():
User.objects.create_user('user@somewhere.com', password='password123', name='Test user full name')
assert User.objects.count() == 1
@pytest.mark.parametrize(
'admin, user, client',
get_user_model().objects.create_superuser(
'admin@somewhere.com', password='password123'),
get_user_model().objects.create_user(
'user@somewhere.com', password='password123', name='Test user full name'),
Client()
)
@pytest.mark.db
def test_users_listed(admin, user, client):
"""Test that users are listed on the user page """
url = reverse('admin:core_user_changelist')
res = client.get(url)
assert user.name in res
assert user.email in res
'''
| 31.246753
| 102
| 0.676226
| 0
| 0
| 0
| 0
| 611
| 0.253948
| 0
| 0
| 1,158
| 0.481297
|
657f66cb6267b45323c6fdaa161920c2b665fce3
| 24,375
|
py
|
Python
|
XOconv/pycgtypes/mat4.py
|
jsburg/xdsme
|
3fc9ed185ab78e1a42306edf24e681981eacd221
|
[
"BSD-3-Clause"
] | 16
|
2016-05-20T11:19:40.000Z
|
2021-01-01T19:44:23.000Z
|
XOconv/pycgtypes/mat4.py
|
jsburg/xdsme
|
3fc9ed185ab78e1a42306edf24e681981eacd221
|
[
"BSD-3-Clause"
] | 11
|
2016-09-09T15:00:15.000Z
|
2021-05-07T15:02:10.000Z
|
XOconv/pycgtypes/mat4.py
|
jsburg/xdsme
|
3fc9ed185ab78e1a42306edf24e681981eacd221
|
[
"BSD-3-Clause"
] | 9
|
2016-12-15T16:00:06.000Z
|
2021-09-10T08:34:14.000Z
|
######################################################################
# mat4 - Matrix class (4x4 matrix)
#
# Copyright (C) 2002, Matthias Baas (baas@ira.uka.de)
#
# You may distribute under the terms of the BSD license, as
# specified in the file license.txt.
####################################################################
import types, math, copy
from vec3 import vec3 as _vec3
from vec4 import vec4 as _vec4
from mat3 import mat3 as _mat3
# [ 0 1 2 3 ]
# [ 4 5 6 7 ]
# [ 8 9 10 11 ]
# [ 12 13 14 15 ]
# mat4
class mat4:
"""Matrix class (4x4).
This class represents a 4x4 matrix that can be used to store
affine transformations.
"""
def __init__(self, *args):
"Constructor"
# No arguments
if len(args)==0:
self.mlist = 16*[0.0]
# 1 argument (list, scalar or mat4)
elif len(args)==1:
T = type(args[0])
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.mlist = [args[0],0.0,0.0,0.0,
0.0,args[0],0.0,0.0,
0.0,0.0,args[0],0.0,
0.0,0.0,0.0,args[0]]
# mat4
elif isinstance(args[0], mat4):
self.mlist = copy.copy(args[0].mlist)
# String
elif T==types.StringType:
s=args[0].replace(","," ").replace(" "," ").strip().split(" ")
self.mlist=map(lambda x: float(x), s)
else:
self.mlist = list(args[0])
# 4 arguments (sequences)
elif len(args)==4:
a,b,c,d=args
self.mlist = [a[0], b[0], c[0], d[0],
a[1], b[1], c[1], d[1],
a[2], b[2], c[2], d[2],
a[3], b[3], c[3], d[3]]
# 16 arguments
elif len(args)==16:
self.mlist = list(args)
else:
raise TypeError,"mat4() arg can't be converted to mat4"
# Check if there are really 16 elements in the list
if len(self.mlist)!=16:
raise TypeError, "mat4(): Wrong number of matrix elements ("+`len(self.mlist)`+" instead of 16)"
def __repr__(self):
return 'mat4('+`self.mlist`[1:-1]+')'
def __str__(self):
fmt="%9.4f"
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return ('['+fmt%m11+', '+fmt%m12+', '+fmt%m13+', '+fmt%m14+']\n'+
'['+fmt%m21+', '+fmt%m22+', '+fmt%m23+', '+fmt%m24+']\n'+
'['+fmt%m31+', '+fmt%m32+', '+fmt%m33+', '+fmt%m34+']\n'+
'['+fmt%m41+', '+fmt%m42+', '+fmt%m43+', '+fmt%m44+']')
def __eq__(self, other):
"""== operator"""
if isinstance(other, mat4):
return self.mlist==other.mlist
else:
return 0
def __ne__(self, other):
"""!= operator"""
if isinstance(other, mat4):
return self.mlist!=other.mlist
else:
return 1
def __add__(self, other):
"""Matrix addition.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M+M
[ 2.0000, 4.0000, 6.0000, 8.0000]
[ 10.0000, 12.0000, 14.0000, 16.0000]
[ 18.0000, 20.0000, 22.0000, 24.0000]
[ 26.0000, 28.0000, 30.0000, 32.0000]
"""
if isinstance(other, mat4):
return mat4(map(lambda x,y: x+y, self.mlist, other.mlist))
else:
raise TypeError, "unsupported operand type for +"
def __sub__(self, other):
"""Matrix subtraction.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M-M
[ 0.0000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 0.0000]
"""
if isinstance(other, mat4):
return mat4(map(lambda x,y: x-y, self.mlist, other.mlist))
else:
raise TypeError, "unsupported operand type for -"
def __mul__(self, other):
"""Multiplication.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M*2.0
[ 2.0000, 4.0000, 6.0000, 8.0000]
[ 10.0000, 12.0000, 14.0000, 16.0000]
[ 18.0000, 20.0000, 22.0000, 24.0000]
[ 26.0000, 28.0000, 30.0000, 32.0000]
>>> print 2.0*M
[ 2.0000, 4.0000, 6.0000, 8.0000]
[ 10.0000, 12.0000, 14.0000, 16.0000]
[ 18.0000, 20.0000, 22.0000, 24.0000]
[ 26.0000, 28.0000, 30.0000, 32.0000]
>>> print M*M
[ 90.0000, 100.0000, 110.0000, 120.0000]
[ 202.0000, 228.0000, 254.0000, 280.0000]
[ 314.0000, 356.0000, 398.0000, 440.0000]
[ 426.0000, 484.0000, 542.0000, 600.0000]
>>> print M*_vec3(1,2,3)
(0.1765, 0.4510, 0.7255)
>>> print _vec3(1,2,3)*M
(0.7083, 0.8056, 0.9028)
"""
T = type(other)
# mat4*scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return mat4(map(lambda x,other=other: x*other, self.mlist))
# mat4*vec3
if isinstance(other, _vec3):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
w = float(m41*other.x + m42*other.y + m43*other.z + m44)
return _vec3(m11*other.x + m12*other.y + m13*other.z + m14,
m21*other.x + m22*other.y + m23*other.z + m24,
m31*other.x + m32*other.y + m33*other.z + m34)/w
# mat4*vec4
if isinstance(other, _vec4):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return _vec4(m11*other.x + m12*other.y + m13*other.z + m14*other.w,
m21*other.x + m22*other.y + m23*other.z + m24*other.w,
m31*other.x + m32*other.y + m33*other.z + m34*other.w,
m41*other.x + m42*other.y + m43*other.z + m44*other.w)
# mat4*mat4
if isinstance(other, mat4):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
n11,n12,n13,n14,n21,n22,n23,n24,n31,n32,n33,n34,n41,n42,n43,n44 = other.mlist
return mat4( m11*n11+m12*n21+m13*n31+m14*n41,
m11*n12+m12*n22+m13*n32+m14*n42,
m11*n13+m12*n23+m13*n33+m14*n43,
m11*n14+m12*n24+m13*n34+m14*n44,
m21*n11+m22*n21+m23*n31+m24*n41,
m21*n12+m22*n22+m23*n32+m24*n42,
m21*n13+m22*n23+m23*n33+m24*n43,
m21*n14+m22*n24+m23*n34+m24*n44,
m31*n11+m32*n21+m33*n31+m34*n41,
m31*n12+m32*n22+m33*n32+m34*n42,
m31*n13+m32*n23+m33*n33+m34*n43,
m31*n14+m32*n24+m33*n34+m34*n44,
m41*n11+m42*n21+m43*n31+m44*n41,
m41*n12+m42*n22+m43*n32+m44*n42,
m41*n13+m42*n23+m43*n33+m44*n43,
m41*n14+m42*n24+m43*n34+m44*n44)
# unsupported
else:
raise TypeError, "unsupported operand type for *"
def __rmul__(self, other):
T = type(other)
# scalar*mat4
if T==types.FloatType or T==types.IntType or T==types.LongType:
return mat4(map(lambda x,other=other: other*x, self.mlist))
# vec4*mat4
if isinstance(other, _vec4):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return _vec4(other.x*m11 + other.y*m21 + other.z*m31 + other.w*m41,
other.x*m12 + other.y*m22 + other.z*m32 + other.w*m42,
other.x*m13 + other.y*m23 + other.z*m33 + other.w*m43,
other.x*m14 + other.y*m24 + other.z*m34 + other.w*m44)
# vec3*mat4
if isinstance(other, _vec3):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
w = float(other.x*m14 + other.y*m24 + other.z*m34 + m44)
return _vec3(other.x*m11 + other.y*m21 + other.z*m31 + m41,
other.x*m12 + other.y*m22 + other.z*m32 + m42,
other.x*m13 + other.y*m23 + other.z*m33 + m43)/w
# mat4*mat4
if isinstance(other, mat4):
return self.__mul__(other)
# unsupported
else:
raise TypeError, "unsupported operand type for *"
def __div__(self, other):
"""Division
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M/2.0
[ 0.5000, 1.0000, 1.5000, 2.0000]
[ 2.5000, 3.0000, 3.5000, 4.0000]
[ 4.5000, 5.0000, 5.5000, 6.0000]
[ 6.5000, 7.0000, 7.5000, 8.0000]
"""
T = type(other)
# mat4/scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return mat4(map(lambda x,other=other: x/other, self.mlist))
# unsupported
else:
raise TypeError, "unsupported operand type for /"
def __mod__(self, other):
"""Modulo.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M%5.0
[ 1.0000, 2.0000, 3.0000, 4.0000]
[ 0.0000, 1.0000, 2.0000, 3.0000]
[ 4.0000, 0.0000, 1.0000, 2.0000]
[ 3.0000, 4.0000, 0.0000, 1.0000]
"""
T = type(other)
# mat4%scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return mat4(map(lambda x,other=other: x%other, self.mlist))
# unsupported
else:
raise TypeError, "unsupported operand type for %"
def __neg__(self):
"""Negation.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print -M
[ -1.0000, -2.0000, -3.0000, -4.0000]
[ -5.0000, -6.0000, -7.0000, -8.0000]
[ -9.0000, -10.0000, -11.0000, -12.0000]
[ -13.0000, -14.0000, -15.0000, -16.0000]
"""
return mat4(map(lambda x: -x, self.mlist))
def __pos__(self):
"""
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print +M
[ 1.0000, 2.0000, 3.0000, 4.0000]
[ 5.0000, 6.0000, 7.0000, 8.0000]
[ 9.0000, 10.0000, 11.0000, 12.0000]
[ 13.0000, 14.0000, 15.0000, 16.0000]
"""
return mat4(map(lambda x: +x, self.mlist))
def __len__(self):
return 4
def __getitem__(self, key):
if type(key)==types.IntType:
if key<0 or key>3:
raise IndexError,"index out of range"
m=self.mlist
if key==0: return [m[0],m[4],m[8],m[12]]
elif key==1: return [m[1],m[5],m[9],m[13]]
elif key==2: return [m[2],m[6],m[10],m[14]]
elif key==3: return [m[3],m[7],m[11],m[15]]
elif type(key)==types.TupleType:
i,j=key
if i<0 or i>3 or j<0 or j>3:
raise IndexError, "index out of range"
return self.mlist[i*4+j]
else:
raise TypeError,"index must be integer or 2-tuple"
def __setitem__(self, key, value):
if type(key)==types.IntType:
if key<0 or key>3:
raise IndexError,"index out of range"
m=self.mlist
if key==0: m[0],m[4],m[8],m[12]=value
elif key==1: m[1],m[5],m[9],m[13]=value
elif key==2: m[2],m[6],m[10],m[14]=value
elif key==3: m[3],m[7],m[11],m[15]=value
elif type(key)==types.TupleType:
i,j=key
if i<0 or i>3 or j<0 or j>3:
raise IndexError, "index out of range"
self.mlist[i*4+j] = value
else:
raise TypeError,"index must be integer or 2-tuple"
def getRow(self, idx):
"""Return row (as vec4)."""
m=self.mlist
if idx==0: return _vec4(m[0], m[1], m[2], m[3])
elif idx==1: return _vec4(m[4], m[5], m[6], m[7])
elif idx==2: return _vec4(m[8], m[9], m[10], m[11])
elif idx==3: return _vec4(m[12], m[13], m[14], m[15])
else:
raise IndexError,"index out of range"
def setRow(self, idx, value):
"""Set row."""
m=self.mlist
if idx==0: m[0],m[1],m[2],m[3] = value
elif idx==1: m[4],m[5],m[6],m[7] = value
elif idx==2: m[8],m[9],m[10],m[11] = value
elif idx==3: m[12],m[13],m[14],m[15] = value
else:
raise IndexError,"index out of range"
def getColumn(self, idx):
"""Return column (as vec4)."""
m=self.mlist
if idx==0: return _vec4(m[0], m[4], m[8], m[12])
elif idx==1: return _vec4(m[1], m[5], m[9], m[13])
elif idx==2: return _vec4(m[2], m[6], m[10], m[14])
elif idx==3: return _vec4(m[3], m[7], m[11], m[15])
else:
raise IndexError,"index out of range"
def setColumn(self, idx, value):
"""Set column."""
m=self.mlist
if idx==0: m[0],m[4],m[8],m[12] = value
elif idx==1: m[1],m[5],m[9],m[13] = value
elif idx==2: m[2],m[6],m[10],m[14] = value
elif idx==3: m[3],m[7],m[11],m[15] = value
else:
raise IndexError,"index out of range"
def toList(self, rowmajor=0):
"""Return a list containing the matrix elements.
By default the list is in column-major order (which can directly be
used in OpenGL or RenderMan). If you set the optional argument
rowmajor to 1, you'll get the list in row-major order.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M.toList()
[1, 5, 9, 13, 2, 6, 10, 14, 3, 7, 11, 15, 4, 8, 12, 16]
>>> print M.toList(rowmajor=1)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
"""
if rowmajor:
return copy.copy(self.mlist)
else:
return self.transpose().mlist
def identity(self):
"""Return identity matrix.
>>> print mat4().identity()
[ 1.0000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 1.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 1.0000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 1.0000]
"""
return mat4(1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0)
def transpose(self):
"""Transpose matrix.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M.transpose()
[ 1.0000, 5.0000, 9.0000, 13.0000]
[ 2.0000, 6.0000, 10.0000, 14.0000]
[ 3.0000, 7.0000, 11.0000, 15.0000]
[ 4.0000, 8.0000, 12.0000, 16.0000]
"""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return mat4(m11,m21,m31,m41,
m12,m22,m32,m42,
m13,m23,m33,m43,
m14,m24,m34,m44)
def determinant(self):
"""Return determinant.
>>> M=mat4(2.0,0,0,0, 0,2.0,0,0, 0,0,2.0,0, 0,0,0,2.0)
>>> print M.determinant()
16.0
"""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return m11*m22*m33*m44 \
-m11*m22*m34*m43 \
+m11*m23*m34*m42 \
-m11*m23*m32*m44 \
+m11*m24*m32*m43 \
-m11*m24*m33*m42 \
-m12*m23*m34*m41 \
+m12*m23*m31*m44 \
-m12*m24*m31*m43 \
+m12*m24*m33*m41 \
-m12*m21*m33*m44 \
+m12*m21*m34*m43 \
+m13*m24*m31*m42 \
-m13*m24*m32*m41 \
+m13*m21*m32*m44 \
-m13*m21*m34*m42 \
+m13*m22*m34*m41 \
-m13*m22*m31*m44 \
-m14*m21*m32*m43 \
+m14*m21*m33*m42 \
-m14*m22*m33*m41 \
+m14*m22*m31*m43 \
-m14*m23*m31*m42 \
+m14*m23*m32*m41
def _submat(self, i,j):
M=_mat3()
for k in range(3):
for l in range(3):
t=(k,l)
if k>=i:
t=(k+1,t[1])
if l>=j:
t=(t[0],l+1)
M[k,l] = self[t]
return M
def inverse(self):
"""Return inverse matrix.
>>> M=mat4(0,-2.0,0,0, 2.0,0,0,0, 0,0,2,0, 0,0,0,2)
>>> print M.inverse()
[ 0.0000, 0.5000, 0.0000, 0.0000]
[ -0.5000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 0.5000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 0.5000]
"""
Mi=mat4()
d=self.determinant()
for i in range(4):
for j in range(4):
sign=1-((i+j)%2)*2
m3=self._submat(i,j)
Mi[j,i]=sign*m3.determinant()/d
return Mi
def translation(self, t):
"""Return translation matrix."""
return mat4(1.0, 0.0, 0.0, t.x,
0.0, 1.0, 0.0, t.y,
0.0, 0.0, 1.0, t.z,
0.0, 0.0, 0.0, 1.0)
def scaling(self, s):
"""Return scaling matrix."""
return mat4(s.x, 0.0, 0.0, 0.0,
0.0, s.y, 0.0, 0.0,
0.0, 0.0, s.z, 0.0,
0.0, 0.0, 0.0, 1.0)
def rotation(self, angle, axis):
"""Return rotation matrix.
angle must be given in radians. axis should be of type vec3.
"""
sqr_a = axis.x*axis.x
sqr_b = axis.y*axis.y
sqr_c = axis.z*axis.z
len2 = sqr_a+sqr_b+sqr_c
k2 = math.cos(angle)
k1 = (1.0-k2)/len2
k3 = math.sin(angle)/math.sqrt(len2)
k1ab = k1*axis.x*axis.y
k1ac = k1*axis.x*axis.z
k1bc = k1*axis.y*axis.z
k3a = k3*axis.x
k3b = k3*axis.y
k3c = k3*axis.z
return mat4( k1*sqr_a+k2, k1ab-k3c, k1ac+k3b, 0.0,
k1ab+k3c, k1*sqr_b+k2, k1bc-k3a, 0.0,
k1ac-k3b, k1bc+k3a, k1*sqr_c+k2, 0.0,
0.0, 0.0, 0.0, 1.0)
def translate(self, t):
"""Concatenate a translation."""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
self.mlist[3] = m11*t.x + m12*t.y + m13*t.z + m14
self.mlist[7] = m21*t.x + m22*t.y + m23*t.z + m24
self.mlist[11] = m31*t.x + m32*t.y + m33*t.z + m34
self.mlist[15] = m41*t.x + m42*t.y + m43*t.z + m44
return self
def scale(self, s):
"""Concatenate a scaling."""
self.mlist[0] *= s.x
self.mlist[1] *= s.y
self.mlist[2] *= s.z
self.mlist[4] *= s.x
self.mlist[5] *= s.y
self.mlist[6] *= s.z
self.mlist[8] *= s.x
self.mlist[9] *= s.y
self.mlist[10] *= s.z
self.mlist[12] *= s.x
self.mlist[13] *= s.y
self.mlist[14] *= s.z
return self
def rotate(self, angle, axis):
"""Concatenate a rotation.
angle must be given in radians. axis should be of type vec3.
"""
R=self.rotation(angle, axis)
self.mlist = (self*R).mlist
return self
def frustum(self, left, right, bottom, top, near, far):
"""equivalent to the OpenGL command glFrustum()"""
return mat4( (2.0*near)/(right-left), 0.0, float(right+left)/(right-left), 0.0,
0.0, (2.0*near)/(top-bottom), float(top+bottom)/(top-bottom), 0.0,
0.0, 0.0, -float(far+near)/(far-near), -(2.0*far*near)/(far-near),
0.0, 0.0, -1.0, 0.0)
def perspective(self, fovy, aspect, near, far):
"""von Mesa ubernommen (glu.c)"""
top = near * math.tan(fovy * math.pi / 360.0)
bottom = -top
left = bottom * aspect
right = top * aspect
return self.frustum(left, right, bottom, top, near, far)
def lookAt(self, pos, target, up=_vec3(0,0,1)):
"""Look from pos to target.
The resulting transformation moves the origin to pos and
rotates so that The z-axis points to target. The y-axis is
as close as possible to the up vector.
"""
dir = (target - pos).normalize()
up = up.normalize()
up -= (up * dir) * dir
try:
up = up.normalize()
except:
# We're looking along the up direction, so choose
# an arbitrary direction that is perpendicular to dir
# as new up.
up = dir.ortho()
right = up.cross(dir).normalize()
self.mlist=[right.x, up.x, dir.x, pos.x,
right.y, up.y, dir.y, pos.y,
right.z, up.z, dir.z, pos.z,
0.0, 0.0, 0.0, 1.0]
return self
def ortho(self):
"""Return a matrix with orthogonal base vectors.
Makes the x-, y- and z-axis orthogonal.
The fourth column and row remain untouched.
"""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
x = _vec3(m11, m21, m31)
y = _vec3(m12, m22, m32)
z = _vec3(m13, m23, m33)
xl = x.length()
xl*=xl
y = y - ((x*y)/xl)*x
z = z - ((x*z)/xl)*x
yl = y.length()
yl*=yl
z = z - ((y*z)/yl)*y
return mat4( x.x, y.x, z.x, m14,
x.y, y.y, z.y, m24,
x.z, y.z, z.z, m34,
m41, m42, m43, m44)
def decompose(self):
"""Decomposes the matrix into a translation, rotation and scaling part.
Returns a tuple (translation, rotation, scaling). The
translation and scaling parts are given as vec3's, the rotation
is still given as a mat4.
"""
dummy = self.ortho()
dummy.setRow(3,_vec4(0.0, 0.0, 0.0, 1.0))
x = dummy.getColumn(0)
y = dummy.getColumn(1)
z = dummy.getColumn(2)
xl = x.length()
yl = y.length()
zl = z.length()
scale = _vec3(xl,yl,zl)
x/=xl
y/=yl
z/=zl
dummy.setColumn(0,x)
dummy.setColumn(1,y)
dummy.setColumn(2,z)
if dummy.determinant()<0.0:
dummy.setColumn(0,-x)
scale.x=-scale.x
return (_vec3(self.mlist[3], self.mlist[7], self.mlist[11]),
dummy,
scale)
def getMat3(self):
"""Convert to mat3 by discarding 4th row and column.
"""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return _mat3(m11,m12,m13,
m21,m22,m23,
m31,m32,m33)
######################################################################
def _test():
import doctest, mat4
failed, total = doctest.testmod(mat4)
print "%d/%d failed" % (failed, total)
if __name__=="__main__":
_test()
| 36.057692
| 109
| 0.454974
| 23,549
| 0.966113
| 0
| 0
| 0
| 0
| 0
| 0
| 7,645
| 0.313641
|