commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
31fff75ff499604453cd3cd07ad75496f5e0d222
|
android_webview/tools/known_incompatible.py
|
android_webview/tools/known_incompatible.py
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""List of known-incompatibly-licensed directories for Android WebView.
This is not used by the webview_licenses tool itself; it is effectively a
"cache" of the output of webview_licenses.GetIncompatibleDirectories() for the
subset of repositories that WebView needs.
We store a copy here because GetIncompatibleDirectories() doesn't work properly
after things have been removed from the tree - it can no longer see the
README.chromium files for previously-removed directories, but they may have
newly added files in them. As long as this list is up to date, we can remove the
things listed first, and then just run the tool afterwards to validate that it
was sufficient. If the tool returns any extra directories then the snapshotting
process will stop and this list must be updated.
"""
KNOWN_INCOMPATIBLE = [
'base/third_party/xdg_mime',
'breakpad',
'chrome/installer/mac/third_party/xz',
'chrome/test/data',
'third_party/active_doc',
'third_party/apple_apsl',
'third_party/apple_sample_code',
'third_party/bsdiff',
'third_party/bspatch',
'third_party/sudden_motion_sensor',
'third_party/swiftshader',
'third_party/talloc',
'third_party/webdriver',
'third_party/wtl',
'tools/chrome_remote_control/third_party/websocket-client',
]
|
Add list of files with licenses not wanted in webview.
|
Android: Add list of files with licenses not wanted in webview.
The script which merges Chromium code into the Android tree needs to
know which directories to remove for licensing reasons; it's much easier
if this is just a static list in the tree which can subsequently be
validated with the webview_licences tool. The list should change very
rarely.
R=mnaganov@chromium.org
BUG=
Review URL: https://codereview.chromium.org/11040056
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@160369 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
Jonekee/chromium.src,mogoweb/chromium-crosswalk,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,jaruba/chromium.src,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,littlstar/chromium.src,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,mogoweb/chromium-crosswalk,zcbenz/cefode-chromium,littlstar/chromium.src,ondra-novak/chromium.src,dushu1203/chromium.src,Chilledheart/chromium,ltilve/chromium,timopulkkinen/BubbleFish,axinging/chromium-crosswalk,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,Chilledheart/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,junmin-zhu/chromium-rivertrail,Chilledheart/chromium,chuan9/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,axinging/chromium-crosswalk,anirudhSK/chromium,hujiajie/pa-chromium,M4sse/chromium.src,timopulkkinen/BubbleFish,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,Chilledheart/chromium,timopulkkinen/BubbleFish,M4sse/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,jaruba/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,ChromiumWebApps/chromium,jaruba/chromium.src,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,patrickm/chromium.src,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,zcbenz/cefode-chromium,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,dednal/chromium.src,dednal/chromium.src,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,markYoungH/chromium.src,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,Jonekee/chromium.src,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,dednal/chromium.src,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,fujunwei/chromium-crosswalk,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,axinging/chromium-crosswalk,patrickm/chromium.src,hujiajie/pa-chromium,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,Jonekee/chromium.src,ltilve/chromium,Chilledheart/chromium,Chilledheart/chromium,markYoungH/chromium.src,dushu1203/chromium.src,nacl-webkit/chrome_deps,Chilledheart/chromium,anirudhSK/chromium,ChromiumWebApps/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,nacl-webkit/chrome_deps,dednal/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,Fireblend/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,jaruba/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,littlstar/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,chuan9/chromium-crosswalk,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,patrickm/chromium.src,zcbenz/cefode-chromium,M4sse/chromium.src,ltilve/chromium,ondra-novak/chromium.src,dednal/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,zcbenz/cefode-chromium,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,nacl-webkit/chrome_deps,dednal/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,ChromiumWebApps/chromium,hujiajie/pa-chromium,ChromiumWebApps/chromium,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,littlstar/chromium.src,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,jaruba/chromium.src,hujiajie/pa-chromium,ltilve/chromium,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,junmin-zhu/chromium-rivertrail,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,ltilve/chromium,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,anirudhSK/chromium,ltilve/chromium,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,markYoungH/chromium.src
|
Android: Add list of files with licenses not wanted in webview.
The script which merges Chromium code into the Android tree needs to
know which directories to remove for licensing reasons; it's much easier
if this is just a static list in the tree which can subsequently be
validated with the webview_licences tool. The list should change very
rarely.
R=mnaganov@chromium.org
BUG=
Review URL: https://codereview.chromium.org/11040056
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@160369 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""List of known-incompatibly-licensed directories for Android WebView.
This is not used by the webview_licenses tool itself; it is effectively a
"cache" of the output of webview_licenses.GetIncompatibleDirectories() for the
subset of repositories that WebView needs.
We store a copy here because GetIncompatibleDirectories() doesn't work properly
after things have been removed from the tree - it can no longer see the
README.chromium files for previously-removed directories, but they may have
newly added files in them. As long as this list is up to date, we can remove the
things listed first, and then just run the tool afterwards to validate that it
was sufficient. If the tool returns any extra directories then the snapshotting
process will stop and this list must be updated.
"""
KNOWN_INCOMPATIBLE = [
'base/third_party/xdg_mime',
'breakpad',
'chrome/installer/mac/third_party/xz',
'chrome/test/data',
'third_party/active_doc',
'third_party/apple_apsl',
'third_party/apple_sample_code',
'third_party/bsdiff',
'third_party/bspatch',
'third_party/sudden_motion_sensor',
'third_party/swiftshader',
'third_party/talloc',
'third_party/webdriver',
'third_party/wtl',
'tools/chrome_remote_control/third_party/websocket-client',
]
|
<commit_before><commit_msg>Android: Add list of files with licenses not wanted in webview.
The script which merges Chromium code into the Android tree needs to
know which directories to remove for licensing reasons; it's much easier
if this is just a static list in the tree which can subsequently be
validated with the webview_licences tool. The list should change very
rarely.
R=mnaganov@chromium.org
BUG=
Review URL: https://codereview.chromium.org/11040056
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@160369 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""List of known-incompatibly-licensed directories for Android WebView.
This is not used by the webview_licenses tool itself; it is effectively a
"cache" of the output of webview_licenses.GetIncompatibleDirectories() for the
subset of repositories that WebView needs.
We store a copy here because GetIncompatibleDirectories() doesn't work properly
after things have been removed from the tree - it can no longer see the
README.chromium files for previously-removed directories, but they may have
newly added files in them. As long as this list is up to date, we can remove the
things listed first, and then just run the tool afterwards to validate that it
was sufficient. If the tool returns any extra directories then the snapshotting
process will stop and this list must be updated.
"""
KNOWN_INCOMPATIBLE = [
'base/third_party/xdg_mime',
'breakpad',
'chrome/installer/mac/third_party/xz',
'chrome/test/data',
'third_party/active_doc',
'third_party/apple_apsl',
'third_party/apple_sample_code',
'third_party/bsdiff',
'third_party/bspatch',
'third_party/sudden_motion_sensor',
'third_party/swiftshader',
'third_party/talloc',
'third_party/webdriver',
'third_party/wtl',
'tools/chrome_remote_control/third_party/websocket-client',
]
|
Android: Add list of files with licenses not wanted in webview.
The script which merges Chromium code into the Android tree needs to
know which directories to remove for licensing reasons; it's much easier
if this is just a static list in the tree which can subsequently be
validated with the webview_licences tool. The list should change very
rarely.
R=mnaganov@chromium.org
BUG=
Review URL: https://codereview.chromium.org/11040056
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@160369 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""List of known-incompatibly-licensed directories for Android WebView.
This is not used by the webview_licenses tool itself; it is effectively a
"cache" of the output of webview_licenses.GetIncompatibleDirectories() for the
subset of repositories that WebView needs.
We store a copy here because GetIncompatibleDirectories() doesn't work properly
after things have been removed from the tree - it can no longer see the
README.chromium files for previously-removed directories, but they may have
newly added files in them. As long as this list is up to date, we can remove the
things listed first, and then just run the tool afterwards to validate that it
was sufficient. If the tool returns any extra directories then the snapshotting
process will stop and this list must be updated.
"""
KNOWN_INCOMPATIBLE = [
'base/third_party/xdg_mime',
'breakpad',
'chrome/installer/mac/third_party/xz',
'chrome/test/data',
'third_party/active_doc',
'third_party/apple_apsl',
'third_party/apple_sample_code',
'third_party/bsdiff',
'third_party/bspatch',
'third_party/sudden_motion_sensor',
'third_party/swiftshader',
'third_party/talloc',
'third_party/webdriver',
'third_party/wtl',
'tools/chrome_remote_control/third_party/websocket-client',
]
|
<commit_before><commit_msg>Android: Add list of files with licenses not wanted in webview.
The script which merges Chromium code into the Android tree needs to
know which directories to remove for licensing reasons; it's much easier
if this is just a static list in the tree which can subsequently be
validated with the webview_licences tool. The list should change very
rarely.
R=mnaganov@chromium.org
BUG=
Review URL: https://codereview.chromium.org/11040056
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@160369 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""List of known-incompatibly-licensed directories for Android WebView.
This is not used by the webview_licenses tool itself; it is effectively a
"cache" of the output of webview_licenses.GetIncompatibleDirectories() for the
subset of repositories that WebView needs.
We store a copy here because GetIncompatibleDirectories() doesn't work properly
after things have been removed from the tree - it can no longer see the
README.chromium files for previously-removed directories, but they may have
newly added files in them. As long as this list is up to date, we can remove the
things listed first, and then just run the tool afterwards to validate that it
was sufficient. If the tool returns any extra directories then the snapshotting
process will stop and this list must be updated.
"""
KNOWN_INCOMPATIBLE = [
'base/third_party/xdg_mime',
'breakpad',
'chrome/installer/mac/third_party/xz',
'chrome/test/data',
'third_party/active_doc',
'third_party/apple_apsl',
'third_party/apple_sample_code',
'third_party/bsdiff',
'third_party/bspatch',
'third_party/sudden_motion_sensor',
'third_party/swiftshader',
'third_party/talloc',
'third_party/webdriver',
'third_party/wtl',
'tools/chrome_remote_control/third_party/websocket-client',
]
|
|
e58f75c0c0196d2c3cb9df3e24978142ac542933
|
bot/action/extra/messages/stored_message.py
|
bot/action/extra/messages/stored_message.py
|
class StoredMessageMapper:
def from_api(self, message):
data = message.data.copy()
self.__replace_with_id_if_present(data, "from")
self.__replace_with_id_if_present(data, "forward_from")
self.__replace_with_id_if_present(data, "reply_to_message", "message_id")
self.__delete_if_present(data, "chat")
self.__delete_if_present(data, "message_id")
self.__delete_if_present(data, "entities")
self.__replace_list_with_item_with_biggest(data, "photo", "height")
self.__delete_if_present(data.get("sticker"), "thumb")
self.__delete_if_present(data.get("document"), "thumb")
self.__delete_if_present(data.get("video_note"), "thumb")
self.__delete_if_present(data.get("video"), "thumb")
return data
def __replace_with_id_if_present(self, dict, key, id_key="id"):
if self.__is_present(dict, key):
dict[key] = dict[key][id_key]
@staticmethod
def __delete_if_present(dict, key):
if dict is not None:
dict.pop(key, None)
def __replace_list_with_item_with_biggest(self, dict, key, attr):
if self.__is_present(dict, key):
biggest = None
for item in dict[key]:
if biggest is None or item[attr] >= biggest[attr]:
biggest = item
if biggest is not None:
dict[key] = biggest
@staticmethod
def __is_present(dict, key):
return dict.get(key) is not None
|
Create StoredMessageMapper from MessageStorageHandler code
|
Create StoredMessageMapper from MessageStorageHandler code
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
Create StoredMessageMapper from MessageStorageHandler code
|
class StoredMessageMapper:
def from_api(self, message):
data = message.data.copy()
self.__replace_with_id_if_present(data, "from")
self.__replace_with_id_if_present(data, "forward_from")
self.__replace_with_id_if_present(data, "reply_to_message", "message_id")
self.__delete_if_present(data, "chat")
self.__delete_if_present(data, "message_id")
self.__delete_if_present(data, "entities")
self.__replace_list_with_item_with_biggest(data, "photo", "height")
self.__delete_if_present(data.get("sticker"), "thumb")
self.__delete_if_present(data.get("document"), "thumb")
self.__delete_if_present(data.get("video_note"), "thumb")
self.__delete_if_present(data.get("video"), "thumb")
return data
def __replace_with_id_if_present(self, dict, key, id_key="id"):
if self.__is_present(dict, key):
dict[key] = dict[key][id_key]
@staticmethod
def __delete_if_present(dict, key):
if dict is not None:
dict.pop(key, None)
def __replace_list_with_item_with_biggest(self, dict, key, attr):
if self.__is_present(dict, key):
biggest = None
for item in dict[key]:
if biggest is None or item[attr] >= biggest[attr]:
biggest = item
if biggest is not None:
dict[key] = biggest
@staticmethod
def __is_present(dict, key):
return dict.get(key) is not None
|
<commit_before><commit_msg>Create StoredMessageMapper from MessageStorageHandler code<commit_after>
|
class StoredMessageMapper:
def from_api(self, message):
data = message.data.copy()
self.__replace_with_id_if_present(data, "from")
self.__replace_with_id_if_present(data, "forward_from")
self.__replace_with_id_if_present(data, "reply_to_message", "message_id")
self.__delete_if_present(data, "chat")
self.__delete_if_present(data, "message_id")
self.__delete_if_present(data, "entities")
self.__replace_list_with_item_with_biggest(data, "photo", "height")
self.__delete_if_present(data.get("sticker"), "thumb")
self.__delete_if_present(data.get("document"), "thumb")
self.__delete_if_present(data.get("video_note"), "thumb")
self.__delete_if_present(data.get("video"), "thumb")
return data
def __replace_with_id_if_present(self, dict, key, id_key="id"):
if self.__is_present(dict, key):
dict[key] = dict[key][id_key]
@staticmethod
def __delete_if_present(dict, key):
if dict is not None:
dict.pop(key, None)
def __replace_list_with_item_with_biggest(self, dict, key, attr):
if self.__is_present(dict, key):
biggest = None
for item in dict[key]:
if biggest is None or item[attr] >= biggest[attr]:
biggest = item
if biggest is not None:
dict[key] = biggest
@staticmethod
def __is_present(dict, key):
return dict.get(key) is not None
|
Create StoredMessageMapper from MessageStorageHandler codeclass StoredMessageMapper:
def from_api(self, message):
data = message.data.copy()
self.__replace_with_id_if_present(data, "from")
self.__replace_with_id_if_present(data, "forward_from")
self.__replace_with_id_if_present(data, "reply_to_message", "message_id")
self.__delete_if_present(data, "chat")
self.__delete_if_present(data, "message_id")
self.__delete_if_present(data, "entities")
self.__replace_list_with_item_with_biggest(data, "photo", "height")
self.__delete_if_present(data.get("sticker"), "thumb")
self.__delete_if_present(data.get("document"), "thumb")
self.__delete_if_present(data.get("video_note"), "thumb")
self.__delete_if_present(data.get("video"), "thumb")
return data
def __replace_with_id_if_present(self, dict, key, id_key="id"):
if self.__is_present(dict, key):
dict[key] = dict[key][id_key]
@staticmethod
def __delete_if_present(dict, key):
if dict is not None:
dict.pop(key, None)
def __replace_list_with_item_with_biggest(self, dict, key, attr):
if self.__is_present(dict, key):
biggest = None
for item in dict[key]:
if biggest is None or item[attr] >= biggest[attr]:
biggest = item
if biggest is not None:
dict[key] = biggest
@staticmethod
def __is_present(dict, key):
return dict.get(key) is not None
|
<commit_before><commit_msg>Create StoredMessageMapper from MessageStorageHandler code<commit_after>class StoredMessageMapper:
def from_api(self, message):
data = message.data.copy()
self.__replace_with_id_if_present(data, "from")
self.__replace_with_id_if_present(data, "forward_from")
self.__replace_with_id_if_present(data, "reply_to_message", "message_id")
self.__delete_if_present(data, "chat")
self.__delete_if_present(data, "message_id")
self.__delete_if_present(data, "entities")
self.__replace_list_with_item_with_biggest(data, "photo", "height")
self.__delete_if_present(data.get("sticker"), "thumb")
self.__delete_if_present(data.get("document"), "thumb")
self.__delete_if_present(data.get("video_note"), "thumb")
self.__delete_if_present(data.get("video"), "thumb")
return data
def __replace_with_id_if_present(self, dict, key, id_key="id"):
if self.__is_present(dict, key):
dict[key] = dict[key][id_key]
@staticmethod
def __delete_if_present(dict, key):
if dict is not None:
dict.pop(key, None)
def __replace_list_with_item_with_biggest(self, dict, key, attr):
if self.__is_present(dict, key):
biggest = None
for item in dict[key]:
if biggest is None or item[attr] >= biggest[attr]:
biggest = item
if biggest is not None:
dict[key] = biggest
@staticmethod
def __is_present(dict, key):
return dict.get(key) is not None
|
|
7f9c94f99dcaed8c97d6288d9fbbc483a963c2d7
|
stdnum/at/businessid.py
|
stdnum/at/businessid.py
|
# businessid.py - functions for handling Austrian company register numbers
#
# Copyright (C) 2015 Holvi Payment Services Oy
# Copyright (C) 2012, 2013 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Austrian Company Register Numbers.
The Austrian company register number consist of digits followed by a single
letter, e.g. "122119m". Sometimes it is presented with preceding "FN", e.g.
"FN 122119m".
>>> validate('FN 122119m')
'122119m'
>>> validate('122119m')
'122119m'
>>> validate('m123123')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('abc')
Traceback (most recent call last):
...
InvalidFormat: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any valid separators and removes surrounding whitespace.
Preceding "FN" is also removed."""
number = clean(number, ' -./').strip()
if number.upper().startswith('FN'):
number = number[2:]
return number
def validate(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
number = compact(number)
if not number[-1:].isalpha() or not number[:-1].isdigit():
raise InvalidFormat()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
Add company register number validation for Austria
|
Add company register number validation for Austria
|
Python
|
lgpl-2.1
|
arthurdejong/python-stdnum,t0mk/python-stdnum,dchoruzy/python-stdnum,arthurdejong/python-stdnum,holvi/python-stdnum,arthurdejong/python-stdnum,holvi/python-stdnum,holvi/python-stdnum
|
Add company register number validation for Austria
|
# businessid.py - functions for handling Austrian company register numbers
#
# Copyright (C) 2015 Holvi Payment Services Oy
# Copyright (C) 2012, 2013 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Austrian Company Register Numbers.
The Austrian company register number consist of digits followed by a single
letter, e.g. "122119m". Sometimes it is presented with preceding "FN", e.g.
"FN 122119m".
>>> validate('FN 122119m')
'122119m'
>>> validate('122119m')
'122119m'
>>> validate('m123123')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('abc')
Traceback (most recent call last):
...
InvalidFormat: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any valid separators and removes surrounding whitespace.
Preceding "FN" is also removed."""
number = clean(number, ' -./').strip()
if number.upper().startswith('FN'):
number = number[2:]
return number
def validate(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
number = compact(number)
if not number[-1:].isalpha() or not number[:-1].isdigit():
raise InvalidFormat()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
<commit_before><commit_msg>Add company register number validation for Austria<commit_after>
|
# businessid.py - functions for handling Austrian company register numbers
#
# Copyright (C) 2015 Holvi Payment Services Oy
# Copyright (C) 2012, 2013 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Austrian Company Register Numbers.
The Austrian company register number consist of digits followed by a single
letter, e.g. "122119m". Sometimes it is presented with preceding "FN", e.g.
"FN 122119m".
>>> validate('FN 122119m')
'122119m'
>>> validate('122119m')
'122119m'
>>> validate('m123123')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('abc')
Traceback (most recent call last):
...
InvalidFormat: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any valid separators and removes surrounding whitespace.
Preceding "FN" is also removed."""
number = clean(number, ' -./').strip()
if number.upper().startswith('FN'):
number = number[2:]
return number
def validate(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
number = compact(number)
if not number[-1:].isalpha() or not number[:-1].isdigit():
raise InvalidFormat()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
Add company register number validation for Austria# businessid.py - functions for handling Austrian company register numbers
#
# Copyright (C) 2015 Holvi Payment Services Oy
# Copyright (C) 2012, 2013 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Austrian Company Register Numbers.
The Austrian company register number consist of digits followed by a single
letter, e.g. "122119m". Sometimes it is presented with preceding "FN", e.g.
"FN 122119m".
>>> validate('FN 122119m')
'122119m'
>>> validate('122119m')
'122119m'
>>> validate('m123123')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('abc')
Traceback (most recent call last):
...
InvalidFormat: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any valid separators and removes surrounding whitespace.
Preceding "FN" is also removed."""
number = clean(number, ' -./').strip()
if number.upper().startswith('FN'):
number = number[2:]
return number
def validate(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
number = compact(number)
if not number[-1:].isalpha() or not number[:-1].isdigit():
raise InvalidFormat()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
<commit_before><commit_msg>Add company register number validation for Austria<commit_after># businessid.py - functions for handling Austrian company register numbers
#
# Copyright (C) 2015 Holvi Payment Services Oy
# Copyright (C) 2012, 2013 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Austrian Company Register Numbers.
The Austrian company register number consist of digits followed by a single
letter, e.g. "122119m". Sometimes it is presented with preceding "FN", e.g.
"FN 122119m".
>>> validate('FN 122119m')
'122119m'
>>> validate('122119m')
'122119m'
>>> validate('m123123')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('abc')
Traceback (most recent call last):
...
InvalidFormat: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any valid separators and removes surrounding whitespace.
Preceding "FN" is also removed."""
number = clean(number, ' -./').strip()
if number.upper().startswith('FN'):
number = number[2:]
return number
def validate(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
number = compact(number)
if not number[-1:].isalpha() or not number[:-1].isdigit():
raise InvalidFormat()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid company register
number. This only checks the formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
|
2f308fbefad5f5cee8b6e160e9a89fda7f4e1ba9
|
tests/test_renderers.py
|
tests/test_renderers.py
|
from flask import Flask
from flask_webapi import WebAPI, APIView, renderer, route
from flask_webapi.renderers import PickleRenderer
from unittest import TestCase
class TestRenderer(TestCase):
def setUp(self):
self.app = Flask(__name__)
self.api = WebAPI(self.app)
self.api.load_module('tests.test_renderers')
self.client = self.app.test_client()
def test_pickle_renderer(self):
response = self.client.post('/add')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers['content-type'], 'application/pickle')
class BasicView(APIView):
@route('/add', methods=['POST'])
@renderer(PickleRenderer)
def add(self):
return {}
|
Add unit tests for pickle renderer
|
Add unit tests for pickle renderer
|
Python
|
mit
|
viniciuschiele/flask-webapi
|
Add unit tests for pickle renderer
|
from flask import Flask
from flask_webapi import WebAPI, APIView, renderer, route
from flask_webapi.renderers import PickleRenderer
from unittest import TestCase
class TestRenderer(TestCase):
def setUp(self):
self.app = Flask(__name__)
self.api = WebAPI(self.app)
self.api.load_module('tests.test_renderers')
self.client = self.app.test_client()
def test_pickle_renderer(self):
response = self.client.post('/add')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers['content-type'], 'application/pickle')
class BasicView(APIView):
@route('/add', methods=['POST'])
@renderer(PickleRenderer)
def add(self):
return {}
|
<commit_before><commit_msg>Add unit tests for pickle renderer<commit_after>
|
from flask import Flask
from flask_webapi import WebAPI, APIView, renderer, route
from flask_webapi.renderers import PickleRenderer
from unittest import TestCase
class TestRenderer(TestCase):
def setUp(self):
self.app = Flask(__name__)
self.api = WebAPI(self.app)
self.api.load_module('tests.test_renderers')
self.client = self.app.test_client()
def test_pickle_renderer(self):
response = self.client.post('/add')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers['content-type'], 'application/pickle')
class BasicView(APIView):
@route('/add', methods=['POST'])
@renderer(PickleRenderer)
def add(self):
return {}
|
Add unit tests for pickle rendererfrom flask import Flask
from flask_webapi import WebAPI, APIView, renderer, route
from flask_webapi.renderers import PickleRenderer
from unittest import TestCase
class TestRenderer(TestCase):
def setUp(self):
self.app = Flask(__name__)
self.api = WebAPI(self.app)
self.api.load_module('tests.test_renderers')
self.client = self.app.test_client()
def test_pickle_renderer(self):
response = self.client.post('/add')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers['content-type'], 'application/pickle')
class BasicView(APIView):
@route('/add', methods=['POST'])
@renderer(PickleRenderer)
def add(self):
return {}
|
<commit_before><commit_msg>Add unit tests for pickle renderer<commit_after>from flask import Flask
from flask_webapi import WebAPI, APIView, renderer, route
from flask_webapi.renderers import PickleRenderer
from unittest import TestCase
class TestRenderer(TestCase):
def setUp(self):
self.app = Flask(__name__)
self.api = WebAPI(self.app)
self.api.load_module('tests.test_renderers')
self.client = self.app.test_client()
def test_pickle_renderer(self):
response = self.client.post('/add')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers['content-type'], 'application/pickle')
class BasicView(APIView):
@route('/add', methods=['POST'])
@renderer(PickleRenderer)
def add(self):
return {}
|
|
8174893b7226f53f89d7f52cfcb5dd073557da07
|
tools/letters_parser.py
|
tools/letters_parser.py
|
#coding=utf-8
import json
FILE_PATH = '/file-path'
FILE_NAME = 'file-name'
def main():
fp = open(FILE_PATH + FILE_NAME + '.txt', 'r')
out = {}
line = fp.readline()
out['law'] = line
out['letters'] = []
count = 0
while True:
line = fp.readline()
if not line:
break
elif line == "\n":
count += 1
if count == 2:
line = fp.readline()
if '無' in line or '款' in line:
if '無' in line:
entry = ""
else:
entry = line
out['letters'].append({'entries':entry, 'contents':[]})
else:
out['letters'][-1]['contents'].append({'title': line, 'subcontent':[]})
count = 0
else:
# to do
# add order and class for the subcontent object
out['letters'][-1]['contents'][-1]['subcontent'].append(line)
with open(FILE_PATH + FILE_NAME + '.json', 'w') as outfile:
json.dump(out, outfile, ensure_ascii=False)
main()
|
Add the prototype of the parser of letters
|
Add the prototype of the parser of letters
|
Python
|
mit
|
LWAlphaMonkey/ArchitectureLaw,LWAlphaMonkey/ArchitectureLaw
|
Add the prototype of the parser of letters
|
#coding=utf-8
import json
FILE_PATH = '/file-path'
FILE_NAME = 'file-name'
def main():
fp = open(FILE_PATH + FILE_NAME + '.txt', 'r')
out = {}
line = fp.readline()
out['law'] = line
out['letters'] = []
count = 0
while True:
line = fp.readline()
if not line:
break
elif line == "\n":
count += 1
if count == 2:
line = fp.readline()
if '無' in line or '款' in line:
if '無' in line:
entry = ""
else:
entry = line
out['letters'].append({'entries':entry, 'contents':[]})
else:
out['letters'][-1]['contents'].append({'title': line, 'subcontent':[]})
count = 0
else:
# to do
# add order and class for the subcontent object
out['letters'][-1]['contents'][-1]['subcontent'].append(line)
with open(FILE_PATH + FILE_NAME + '.json', 'w') as outfile:
json.dump(out, outfile, ensure_ascii=False)
main()
|
<commit_before><commit_msg>Add the prototype of the parser of letters<commit_after>
|
#coding=utf-8
import json
FILE_PATH = '/file-path'
FILE_NAME = 'file-name'
def main():
fp = open(FILE_PATH + FILE_NAME + '.txt', 'r')
out = {}
line = fp.readline()
out['law'] = line
out['letters'] = []
count = 0
while True:
line = fp.readline()
if not line:
break
elif line == "\n":
count += 1
if count == 2:
line = fp.readline()
if '無' in line or '款' in line:
if '無' in line:
entry = ""
else:
entry = line
out['letters'].append({'entries':entry, 'contents':[]})
else:
out['letters'][-1]['contents'].append({'title': line, 'subcontent':[]})
count = 0
else:
# to do
# add order and class for the subcontent object
out['letters'][-1]['contents'][-1]['subcontent'].append(line)
with open(FILE_PATH + FILE_NAME + '.json', 'w') as outfile:
json.dump(out, outfile, ensure_ascii=False)
main()
|
Add the prototype of the parser of letters#coding=utf-8
import json
FILE_PATH = '/file-path'
FILE_NAME = 'file-name'
def main():
fp = open(FILE_PATH + FILE_NAME + '.txt', 'r')
out = {}
line = fp.readline()
out['law'] = line
out['letters'] = []
count = 0
while True:
line = fp.readline()
if not line:
break
elif line == "\n":
count += 1
if count == 2:
line = fp.readline()
if '無' in line or '款' in line:
if '無' in line:
entry = ""
else:
entry = line
out['letters'].append({'entries':entry, 'contents':[]})
else:
out['letters'][-1]['contents'].append({'title': line, 'subcontent':[]})
count = 0
else:
# to do
# add order and class for the subcontent object
out['letters'][-1]['contents'][-1]['subcontent'].append(line)
with open(FILE_PATH + FILE_NAME + '.json', 'w') as outfile:
json.dump(out, outfile, ensure_ascii=False)
main()
|
<commit_before><commit_msg>Add the prototype of the parser of letters<commit_after>#coding=utf-8
import json
FILE_PATH = '/file-path'
FILE_NAME = 'file-name'
def main():
fp = open(FILE_PATH + FILE_NAME + '.txt', 'r')
out = {}
line = fp.readline()
out['law'] = line
out['letters'] = []
count = 0
while True:
line = fp.readline()
if not line:
break
elif line == "\n":
count += 1
if count == 2:
line = fp.readline()
if '無' in line or '款' in line:
if '無' in line:
entry = ""
else:
entry = line
out['letters'].append({'entries':entry, 'contents':[]})
else:
out['letters'][-1]['contents'].append({'title': line, 'subcontent':[]})
count = 0
else:
# to do
# add order and class for the subcontent object
out['letters'][-1]['contents'][-1]['subcontent'].append(line)
with open(FILE_PATH + FILE_NAME + '.json', 'w') as outfile:
json.dump(out, outfile, ensure_ascii=False)
main()
|
|
cd5678640e17cca0517c14f7d172b2ca1f10a560
|
rarecommends.py
|
rarecommends.py
|
#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
# grab all the data...
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
demarcator = 'pb2' # surrounds records of recommended stuff
relevant_block = re.findall(demarcator+'(.*?)'+demarcator, all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('class=music>'+'(.*?)'+'</a', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
relevant_block = re.findall('reviewArchive'+'(.*?)'+'</article>', all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('<h1>'+'(.*?)'+'</h1>', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
|
Fix Resident Advisor regex to match new page design
|
Fix Resident Advisor regex to match new page design
|
Python
|
mit
|
oldhill/ra-recommends
|
#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
# grab all the data...
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
demarcator = 'pb2' # surrounds records of recommended stuff
relevant_block = re.findall(demarcator+'(.*?)'+demarcator, all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('class=music>'+'(.*?)'+'</a', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
Fix Resident Advisor regex to match new page design
|
#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
relevant_block = re.findall('reviewArchive'+'(.*?)'+'</article>', all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('<h1>'+'(.*?)'+'</h1>', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
# grab all the data...
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
demarcator = 'pb2' # surrounds records of recommended stuff
relevant_block = re.findall(demarcator+'(.*?)'+demarcator, all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('class=music>'+'(.*?)'+'</a', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
<commit_msg>Fix Resident Advisor regex to match new page design<commit_after>
|
#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
relevant_block = re.findall('reviewArchive'+'(.*?)'+'</article>', all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('<h1>'+'(.*?)'+'</h1>', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
# grab all the data...
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
demarcator = 'pb2' # surrounds records of recommended stuff
relevant_block = re.findall(demarcator+'(.*?)'+demarcator, all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('class=music>'+'(.*?)'+'</a', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
Fix Resident Advisor regex to match new page design#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
relevant_block = re.findall('reviewArchive'+'(.*?)'+'</article>', all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('<h1>'+'(.*?)'+'</h1>', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
# grab all the data...
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
demarcator = 'pb2' # surrounds records of recommended stuff
relevant_block = re.findall(demarcator+'(.*?)'+demarcator, all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('class=music>'+'(.*?)'+'</a', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
<commit_msg>Fix Resident Advisor regex to match new page design<commit_after>#!/usr/bin/python
# Gets stuff from Resident Advisor's awesome 'RA Recommends' section
# Note: this is just regexing the HTML... so may stop working if page structure changes
# This is not written by, or affiliated with Resident Advisor at all
# 2013 oldhill // MIT license
import urllib2
import re
def getResidentData():
web_page = urllib2.urlopen('http://www.residentadvisor.net/reviews.aspx?format=recommend')
all_the_html = web_page.read()
relevant_block = re.findall('reviewArchive'+'(.*?)'+'</article>', all_the_html, re.DOTALL)[0]
# zeroing in...
relevant_string = re.findall('<h1>'+'(.*?)'+'</h1>', relevant_block, re.DOTALL)[0]
return relevant_string
def recommendedArtist():
relevant_set = getResidentData()
artist = relevant_set.split('-')[0].strip()
return artist
def recommendedWork():
relevant_set = getResidentData()
work = relevant_set.split('-')[1].strip()
return work
# Use this to test; it should print artist and album/track name on command line
def main():
test_artist = recommendedArtist()
test_work = recommendedWork()
print '\n>>>>>>>>>>>>>>>>>\n'
print test_artist
print test_work
print '\n>>>>>>>>>>>>>>>>>\n'
if __name__ == '__main__':
main()
|
d9bdaf77e2b9834efd3539ec6ce599804a84b5dc
|
update_webhooks.py
|
update_webhooks.py
|
import requests
URL = "https://stripe.com/docs/api/curl/sections?all_sections=1&version=2019-02-19&cacheControlVersion=4"
response = requests.get(URL)
data = response.json()
event_types = data["event_types"]["data"]["event_types"]
class_template = """class {class_name}Webhook(Webhook):
name = "{name}"
description = "{description}"
"""
header = """from .base import Webhook
"""
with open("pinax/stripe/webhooks/generated.py", "wb") as fp:
fp.write(header.encode("utf-8"))
for index, event_type in enumerate(event_types):
name = event_type["type"]
description = event_type["description"].replace('"', "'")
class_name = name.replace(".", " ").replace("_", " ").title().replace(" ", "")
code = class_template.format(
class_name=class_name,
name=name,
description=description
)
if index + 1 == len(event_types):
code = f"{code.strip()}\n"
fp.write(code.encode("utf-8"))
print(f"{name} added...")
fp.close()
|
Add script to generate webhooks
|
Add script to generate webhooks
|
Python
|
mit
|
pinax/django-stripe-payments
|
Add script to generate webhooks
|
import requests
URL = "https://stripe.com/docs/api/curl/sections?all_sections=1&version=2019-02-19&cacheControlVersion=4"
response = requests.get(URL)
data = response.json()
event_types = data["event_types"]["data"]["event_types"]
class_template = """class {class_name}Webhook(Webhook):
name = "{name}"
description = "{description}"
"""
header = """from .base import Webhook
"""
with open("pinax/stripe/webhooks/generated.py", "wb") as fp:
fp.write(header.encode("utf-8"))
for index, event_type in enumerate(event_types):
name = event_type["type"]
description = event_type["description"].replace('"', "'")
class_name = name.replace(".", " ").replace("_", " ").title().replace(" ", "")
code = class_template.format(
class_name=class_name,
name=name,
description=description
)
if index + 1 == len(event_types):
code = f"{code.strip()}\n"
fp.write(code.encode("utf-8"))
print(f"{name} added...")
fp.close()
|
<commit_before><commit_msg>Add script to generate webhooks<commit_after>
|
import requests
URL = "https://stripe.com/docs/api/curl/sections?all_sections=1&version=2019-02-19&cacheControlVersion=4"
response = requests.get(URL)
data = response.json()
event_types = data["event_types"]["data"]["event_types"]
class_template = """class {class_name}Webhook(Webhook):
name = "{name}"
description = "{description}"
"""
header = """from .base import Webhook
"""
with open("pinax/stripe/webhooks/generated.py", "wb") as fp:
fp.write(header.encode("utf-8"))
for index, event_type in enumerate(event_types):
name = event_type["type"]
description = event_type["description"].replace('"', "'")
class_name = name.replace(".", " ").replace("_", " ").title().replace(" ", "")
code = class_template.format(
class_name=class_name,
name=name,
description=description
)
if index + 1 == len(event_types):
code = f"{code.strip()}\n"
fp.write(code.encode("utf-8"))
print(f"{name} added...")
fp.close()
|
Add script to generate webhooksimport requests
URL = "https://stripe.com/docs/api/curl/sections?all_sections=1&version=2019-02-19&cacheControlVersion=4"
response = requests.get(URL)
data = response.json()
event_types = data["event_types"]["data"]["event_types"]
class_template = """class {class_name}Webhook(Webhook):
name = "{name}"
description = "{description}"
"""
header = """from .base import Webhook
"""
with open("pinax/stripe/webhooks/generated.py", "wb") as fp:
fp.write(header.encode("utf-8"))
for index, event_type in enumerate(event_types):
name = event_type["type"]
description = event_type["description"].replace('"', "'")
class_name = name.replace(".", " ").replace("_", " ").title().replace(" ", "")
code = class_template.format(
class_name=class_name,
name=name,
description=description
)
if index + 1 == len(event_types):
code = f"{code.strip()}\n"
fp.write(code.encode("utf-8"))
print(f"{name} added...")
fp.close()
|
<commit_before><commit_msg>Add script to generate webhooks<commit_after>import requests
URL = "https://stripe.com/docs/api/curl/sections?all_sections=1&version=2019-02-19&cacheControlVersion=4"
response = requests.get(URL)
data = response.json()
event_types = data["event_types"]["data"]["event_types"]
class_template = """class {class_name}Webhook(Webhook):
name = "{name}"
description = "{description}"
"""
header = """from .base import Webhook
"""
with open("pinax/stripe/webhooks/generated.py", "wb") as fp:
fp.write(header.encode("utf-8"))
for index, event_type in enumerate(event_types):
name = event_type["type"]
description = event_type["description"].replace('"', "'")
class_name = name.replace(".", " ").replace("_", " ").title().replace(" ", "")
code = class_template.format(
class_name=class_name,
name=name,
description=description
)
if index + 1 == len(event_types):
code = f"{code.strip()}\n"
fp.write(code.encode("utf-8"))
print(f"{name} added...")
fp.close()
|
|
cebd5969fda658fd045c1228a8d28cc64fca103e
|
tests/test_ansi.py
|
tests/test_ansi.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test the cprint function."""
from colorise.nix.color_functions import to_ansi
import pytest
@pytest.mark.skip_on_windows
def test_ansi():
assert to_ansi(34, '95') == '\x1b[34;95m'
assert to_ansi(0) == '\x1b[0m'
assert to_ansi() == ''
|
Test ansi escape sequence function
|
Test ansi escape sequence function
|
Python
|
bsd-3-clause
|
MisanthropicBit/colorise
|
Test ansi escape sequence function
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test the cprint function."""
from colorise.nix.color_functions import to_ansi
import pytest
@pytest.mark.skip_on_windows
def test_ansi():
assert to_ansi(34, '95') == '\x1b[34;95m'
assert to_ansi(0) == '\x1b[0m'
assert to_ansi() == ''
|
<commit_before><commit_msg>Test ansi escape sequence function<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test the cprint function."""
from colorise.nix.color_functions import to_ansi
import pytest
@pytest.mark.skip_on_windows
def test_ansi():
assert to_ansi(34, '95') == '\x1b[34;95m'
assert to_ansi(0) == '\x1b[0m'
assert to_ansi() == ''
|
Test ansi escape sequence function#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test the cprint function."""
from colorise.nix.color_functions import to_ansi
import pytest
@pytest.mark.skip_on_windows
def test_ansi():
assert to_ansi(34, '95') == '\x1b[34;95m'
assert to_ansi(0) == '\x1b[0m'
assert to_ansi() == ''
|
<commit_before><commit_msg>Test ansi escape sequence function<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test the cprint function."""
from colorise.nix.color_functions import to_ansi
import pytest
@pytest.mark.skip_on_windows
def test_ansi():
assert to_ansi(34, '95') == '\x1b[34;95m'
assert to_ansi(0) == '\x1b[0m'
assert to_ansi() == ''
|
|
61747aa05f183e7b0712df08c81d9a181cd5abc5
|
tests/test_bbox.py
|
tests/test_bbox.py
|
import unittest
from coral import bbox
class TestBBox(unittest.TestCase):
def setUp(self):
self.a = bbox.BBox((20, 30), (50, 40))
self.b = self.a.scale(2)
self.c = self.b.translate(self.b.width() / 2, 0)
def test_width_height_area(self):
self.assertEqual(self.a.area(), self.a.width() * self.a.height())
def test_scale_area(self):
self.assertEqual(self.a.area() * 4, self.b.area())
def test_scale_center(self):
self.assertEqual(self.a.center(), self.b.center())
def test_translate_area_union(self):
self.assertEqual((self.b | self.c).area(), self.b.area() * 3 / 2)
def test_translate_area_intersection(self):
self.assertEqual((self.b & self.c).area(), self.b.area() / 2)
def test_haspoint(self):
self.assertTrue(self.a.has_point(self.a.center()))
half_width = self.a.width() / 2
half_height = self.a.height() / 2
for dx, dy in ((-half_width, 0), (0, -half_height), (-half_width, -half_height)):
self.assertTrue(self.a.has_point(self.a.translate(dx, dy).center()))
for dx, dy in ((half_width, 0), (0, half_height), (half_width, half_height)):
self.assertFalse(self.a.has_point(self.a.translate(dx, dy).center()))
def test_hasbbox(self):
self.assertFalse(self.a.has_bbox(self.b))
self.assertTrue(self.b.has_bbox(self.a))
def test_compare(self):
self.assertEqual(self.a, self.a.scale(1))
self.assertNotEqual(self.a, self.a.scale(1.2))
self.assertNotEqual(self.a, self.a.scale(0.8))
self.assertTrue(self.a < self.b)
if __name__ == "__main__":
unittest.main()
|
Add some tests for bbox.
|
Add some tests for bbox.
|
Python
|
mit
|
lecram/coral
|
Add some tests for bbox.
|
import unittest
from coral import bbox
class TestBBox(unittest.TestCase):
def setUp(self):
self.a = bbox.BBox((20, 30), (50, 40))
self.b = self.a.scale(2)
self.c = self.b.translate(self.b.width() / 2, 0)
def test_width_height_area(self):
self.assertEqual(self.a.area(), self.a.width() * self.a.height())
def test_scale_area(self):
self.assertEqual(self.a.area() * 4, self.b.area())
def test_scale_center(self):
self.assertEqual(self.a.center(), self.b.center())
def test_translate_area_union(self):
self.assertEqual((self.b | self.c).area(), self.b.area() * 3 / 2)
def test_translate_area_intersection(self):
self.assertEqual((self.b & self.c).area(), self.b.area() / 2)
def test_haspoint(self):
self.assertTrue(self.a.has_point(self.a.center()))
half_width = self.a.width() / 2
half_height = self.a.height() / 2
for dx, dy in ((-half_width, 0), (0, -half_height), (-half_width, -half_height)):
self.assertTrue(self.a.has_point(self.a.translate(dx, dy).center()))
for dx, dy in ((half_width, 0), (0, half_height), (half_width, half_height)):
self.assertFalse(self.a.has_point(self.a.translate(dx, dy).center()))
def test_hasbbox(self):
self.assertFalse(self.a.has_bbox(self.b))
self.assertTrue(self.b.has_bbox(self.a))
def test_compare(self):
self.assertEqual(self.a, self.a.scale(1))
self.assertNotEqual(self.a, self.a.scale(1.2))
self.assertNotEqual(self.a, self.a.scale(0.8))
self.assertTrue(self.a < self.b)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add some tests for bbox.<commit_after>
|
import unittest
from coral import bbox
class TestBBox(unittest.TestCase):
def setUp(self):
self.a = bbox.BBox((20, 30), (50, 40))
self.b = self.a.scale(2)
self.c = self.b.translate(self.b.width() / 2, 0)
def test_width_height_area(self):
self.assertEqual(self.a.area(), self.a.width() * self.a.height())
def test_scale_area(self):
self.assertEqual(self.a.area() * 4, self.b.area())
def test_scale_center(self):
self.assertEqual(self.a.center(), self.b.center())
def test_translate_area_union(self):
self.assertEqual((self.b | self.c).area(), self.b.area() * 3 / 2)
def test_translate_area_intersection(self):
self.assertEqual((self.b & self.c).area(), self.b.area() / 2)
def test_haspoint(self):
self.assertTrue(self.a.has_point(self.a.center()))
half_width = self.a.width() / 2
half_height = self.a.height() / 2
for dx, dy in ((-half_width, 0), (0, -half_height), (-half_width, -half_height)):
self.assertTrue(self.a.has_point(self.a.translate(dx, dy).center()))
for dx, dy in ((half_width, 0), (0, half_height), (half_width, half_height)):
self.assertFalse(self.a.has_point(self.a.translate(dx, dy).center()))
def test_hasbbox(self):
self.assertFalse(self.a.has_bbox(self.b))
self.assertTrue(self.b.has_bbox(self.a))
def test_compare(self):
self.assertEqual(self.a, self.a.scale(1))
self.assertNotEqual(self.a, self.a.scale(1.2))
self.assertNotEqual(self.a, self.a.scale(0.8))
self.assertTrue(self.a < self.b)
if __name__ == "__main__":
unittest.main()
|
Add some tests for bbox.import unittest
from coral import bbox
class TestBBox(unittest.TestCase):
def setUp(self):
self.a = bbox.BBox((20, 30), (50, 40))
self.b = self.a.scale(2)
self.c = self.b.translate(self.b.width() / 2, 0)
def test_width_height_area(self):
self.assertEqual(self.a.area(), self.a.width() * self.a.height())
def test_scale_area(self):
self.assertEqual(self.a.area() * 4, self.b.area())
def test_scale_center(self):
self.assertEqual(self.a.center(), self.b.center())
def test_translate_area_union(self):
self.assertEqual((self.b | self.c).area(), self.b.area() * 3 / 2)
def test_translate_area_intersection(self):
self.assertEqual((self.b & self.c).area(), self.b.area() / 2)
def test_haspoint(self):
self.assertTrue(self.a.has_point(self.a.center()))
half_width = self.a.width() / 2
half_height = self.a.height() / 2
for dx, dy in ((-half_width, 0), (0, -half_height), (-half_width, -half_height)):
self.assertTrue(self.a.has_point(self.a.translate(dx, dy).center()))
for dx, dy in ((half_width, 0), (0, half_height), (half_width, half_height)):
self.assertFalse(self.a.has_point(self.a.translate(dx, dy).center()))
def test_hasbbox(self):
self.assertFalse(self.a.has_bbox(self.b))
self.assertTrue(self.b.has_bbox(self.a))
def test_compare(self):
self.assertEqual(self.a, self.a.scale(1))
self.assertNotEqual(self.a, self.a.scale(1.2))
self.assertNotEqual(self.a, self.a.scale(0.8))
self.assertTrue(self.a < self.b)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add some tests for bbox.<commit_after>import unittest
from coral import bbox
class TestBBox(unittest.TestCase):
def setUp(self):
self.a = bbox.BBox((20, 30), (50, 40))
self.b = self.a.scale(2)
self.c = self.b.translate(self.b.width() / 2, 0)
def test_width_height_area(self):
self.assertEqual(self.a.area(), self.a.width() * self.a.height())
def test_scale_area(self):
self.assertEqual(self.a.area() * 4, self.b.area())
def test_scale_center(self):
self.assertEqual(self.a.center(), self.b.center())
def test_translate_area_union(self):
self.assertEqual((self.b | self.c).area(), self.b.area() * 3 / 2)
def test_translate_area_intersection(self):
self.assertEqual((self.b & self.c).area(), self.b.area() / 2)
def test_haspoint(self):
self.assertTrue(self.a.has_point(self.a.center()))
half_width = self.a.width() / 2
half_height = self.a.height() / 2
for dx, dy in ((-half_width, 0), (0, -half_height), (-half_width, -half_height)):
self.assertTrue(self.a.has_point(self.a.translate(dx, dy).center()))
for dx, dy in ((half_width, 0), (0, half_height), (half_width, half_height)):
self.assertFalse(self.a.has_point(self.a.translate(dx, dy).center()))
def test_hasbbox(self):
self.assertFalse(self.a.has_bbox(self.b))
self.assertTrue(self.b.has_bbox(self.a))
def test_compare(self):
self.assertEqual(self.a, self.a.scale(1))
self.assertNotEqual(self.a, self.a.scale(1.2))
self.assertNotEqual(self.a, self.a.scale(0.8))
self.assertTrue(self.a < self.b)
if __name__ == "__main__":
unittest.main()
|
|
5f69bf0adeee796ce2d66b605f1e65c67bc791bb
|
mininet/test/test_util.py
|
mininet/test/test_util.py
|
#!/usr/bin/env python
"""Package: mininet
Test functions defined in mininet.util."""
import unittest
from mininet.util import quietRun
class testQuietRun( unittest.TestCase ):
"""Test quietRun that runs a command and returns its merged output from
STDOUT and STDIN"""
@staticmethod
def getEchoCmd( n ):
"Return a command that will print n characters"
return "echo -n " + "x" * n
def testEmpty( self ):
"Run a command that prints nothing"
output = quietRun(testQuietRun.getEchoCmd( 0 ) )
self.assertEqual( 0, len( output ) )
def testOneRead( self ):
"""Run a command whose output is entirely read on the first call if
each call reads at most 1024 characters
"""
for n in [ 42, 1024 ]:
output = quietRun( testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
def testMultipleReads( self ):
"Run a command whose output is not entirely read on the first read"
for n in [ 1025, 4242 ]:
output = quietRun(testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
if __name__ == "__main__":
unittest.main()
|
Add unit tests for util
|
Add unit tests for util
|
Python
|
bsd-3-clause
|
mininet/mininet,mininet/mininet,mininet/mininet
|
Add unit tests for util
|
#!/usr/bin/env python
"""Package: mininet
Test functions defined in mininet.util."""
import unittest
from mininet.util import quietRun
class testQuietRun( unittest.TestCase ):
"""Test quietRun that runs a command and returns its merged output from
STDOUT and STDIN"""
@staticmethod
def getEchoCmd( n ):
"Return a command that will print n characters"
return "echo -n " + "x" * n
def testEmpty( self ):
"Run a command that prints nothing"
output = quietRun(testQuietRun.getEchoCmd( 0 ) )
self.assertEqual( 0, len( output ) )
def testOneRead( self ):
"""Run a command whose output is entirely read on the first call if
each call reads at most 1024 characters
"""
for n in [ 42, 1024 ]:
output = quietRun( testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
def testMultipleReads( self ):
"Run a command whose output is not entirely read on the first read"
for n in [ 1025, 4242 ]:
output = quietRun(testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add unit tests for util<commit_after>
|
#!/usr/bin/env python
"""Package: mininet
Test functions defined in mininet.util."""
import unittest
from mininet.util import quietRun
class testQuietRun( unittest.TestCase ):
"""Test quietRun that runs a command and returns its merged output from
STDOUT and STDIN"""
@staticmethod
def getEchoCmd( n ):
"Return a command that will print n characters"
return "echo -n " + "x" * n
def testEmpty( self ):
"Run a command that prints nothing"
output = quietRun(testQuietRun.getEchoCmd( 0 ) )
self.assertEqual( 0, len( output ) )
def testOneRead( self ):
"""Run a command whose output is entirely read on the first call if
each call reads at most 1024 characters
"""
for n in [ 42, 1024 ]:
output = quietRun( testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
def testMultipleReads( self ):
"Run a command whose output is not entirely read on the first read"
for n in [ 1025, 4242 ]:
output = quietRun(testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
if __name__ == "__main__":
unittest.main()
|
Add unit tests for util#!/usr/bin/env python
"""Package: mininet
Test functions defined in mininet.util."""
import unittest
from mininet.util import quietRun
class testQuietRun( unittest.TestCase ):
"""Test quietRun that runs a command and returns its merged output from
STDOUT and STDIN"""
@staticmethod
def getEchoCmd( n ):
"Return a command that will print n characters"
return "echo -n " + "x" * n
def testEmpty( self ):
"Run a command that prints nothing"
output = quietRun(testQuietRun.getEchoCmd( 0 ) )
self.assertEqual( 0, len( output ) )
def testOneRead( self ):
"""Run a command whose output is entirely read on the first call if
each call reads at most 1024 characters
"""
for n in [ 42, 1024 ]:
output = quietRun( testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
def testMultipleReads( self ):
"Run a command whose output is not entirely read on the first read"
for n in [ 1025, 4242 ]:
output = quietRun(testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add unit tests for util<commit_after>#!/usr/bin/env python
"""Package: mininet
Test functions defined in mininet.util."""
import unittest
from mininet.util import quietRun
class testQuietRun( unittest.TestCase ):
"""Test quietRun that runs a command and returns its merged output from
STDOUT and STDIN"""
@staticmethod
def getEchoCmd( n ):
"Return a command that will print n characters"
return "echo -n " + "x" * n
def testEmpty( self ):
"Run a command that prints nothing"
output = quietRun(testQuietRun.getEchoCmd( 0 ) )
self.assertEqual( 0, len( output ) )
def testOneRead( self ):
"""Run a command whose output is entirely read on the first call if
each call reads at most 1024 characters
"""
for n in [ 42, 1024 ]:
output = quietRun( testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
def testMultipleReads( self ):
"Run a command whose output is not entirely read on the first read"
for n in [ 1025, 4242 ]:
output = quietRun(testQuietRun.getEchoCmd( n ) )
self.assertEqual( n, len( output ) )
if __name__ == "__main__":
unittest.main()
|
|
a8ce7fbfecaf7a55d8f31a6e4489d5b8a3fc894b
|
src/gevent_sqlite3.py
|
src/gevent_sqlite3.py
|
#!/usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright 2017 - Luca Versari <veluca93@gmail.com>
"""This file can be imported instead of sqlite3 to make sure that
gevent and sqlite3 play nice together."""
import gevent.hub
import sqlite3
from functools import wraps
def _using_gevent_tp(method):
@wraps(method, ['__name__', '__doc__'])
def apply(*args, **kwargs):
return gevent.hub.get_hub().threadpool.apply(method, args, kwargs)
return apply
class Cursor(sqlite3.Cursor):
""" A greenlet-friendly wrapper around sqlite3.Cursor. """
for method in [sqlite3.Cursor.executemany,
sqlite3.Cursor.executescript,
sqlite3.Cursor.fetchone,
sqlite3.Cursor.fetchmany,
sqlite3.Cursor.fetchall,
sqlite3.Cursor.execute]:
setattr(Cursor, method.__name__, _using_gevent_tp(method))
class Connection(sqlite3.Connection):
""" A greenlet-friendly wrapper around sqlite3.Connection. """
def __init__(self, *args, **kwargs):
# Workaround gevent's thread id monkey patching
kwargs['check_same_thread'] = False
super(Connection, self).__init__(*args, **kwargs)
def cursor(self):
return Cursor(self)
for method in [sqlite3.Connection.commit,
sqlite3.Connection.rollback,
sqlite3.Connection.execute]:
setattr(Connection, method.__name__, _using_gevent_tp(method))
@wraps(sqlite3.connect)
def connect(*args, **kwargs):
kwargs['factory'] = Connection
return sqlite3.connect(*args, **kwargs)
|
Add a sqlite3 gevent wrapper
|
Add a sqlite3 gevent wrapper
|
Python
|
mpl-2.0
|
algorithm-ninja/territoriali-backend
|
Add a sqlite3 gevent wrapper
|
#!/usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright 2017 - Luca Versari <veluca93@gmail.com>
"""This file can be imported instead of sqlite3 to make sure that
gevent and sqlite3 play nice together."""
import gevent.hub
import sqlite3
from functools import wraps
def _using_gevent_tp(method):
@wraps(method, ['__name__', '__doc__'])
def apply(*args, **kwargs):
return gevent.hub.get_hub().threadpool.apply(method, args, kwargs)
return apply
class Cursor(sqlite3.Cursor):
""" A greenlet-friendly wrapper around sqlite3.Cursor. """
for method in [sqlite3.Cursor.executemany,
sqlite3.Cursor.executescript,
sqlite3.Cursor.fetchone,
sqlite3.Cursor.fetchmany,
sqlite3.Cursor.fetchall,
sqlite3.Cursor.execute]:
setattr(Cursor, method.__name__, _using_gevent_tp(method))
class Connection(sqlite3.Connection):
""" A greenlet-friendly wrapper around sqlite3.Connection. """
def __init__(self, *args, **kwargs):
# Workaround gevent's thread id monkey patching
kwargs['check_same_thread'] = False
super(Connection, self).__init__(*args, **kwargs)
def cursor(self):
return Cursor(self)
for method in [sqlite3.Connection.commit,
sqlite3.Connection.rollback,
sqlite3.Connection.execute]:
setattr(Connection, method.__name__, _using_gevent_tp(method))
@wraps(sqlite3.connect)
def connect(*args, **kwargs):
kwargs['factory'] = Connection
return sqlite3.connect(*args, **kwargs)
|
<commit_before><commit_msg>Add a sqlite3 gevent wrapper<commit_after>
|
#!/usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright 2017 - Luca Versari <veluca93@gmail.com>
"""This file can be imported instead of sqlite3 to make sure that
gevent and sqlite3 play nice together."""
import gevent.hub
import sqlite3
from functools import wraps
def _using_gevent_tp(method):
@wraps(method, ['__name__', '__doc__'])
def apply(*args, **kwargs):
return gevent.hub.get_hub().threadpool.apply(method, args, kwargs)
return apply
class Cursor(sqlite3.Cursor):
""" A greenlet-friendly wrapper around sqlite3.Cursor. """
for method in [sqlite3.Cursor.executemany,
sqlite3.Cursor.executescript,
sqlite3.Cursor.fetchone,
sqlite3.Cursor.fetchmany,
sqlite3.Cursor.fetchall,
sqlite3.Cursor.execute]:
setattr(Cursor, method.__name__, _using_gevent_tp(method))
class Connection(sqlite3.Connection):
""" A greenlet-friendly wrapper around sqlite3.Connection. """
def __init__(self, *args, **kwargs):
# Workaround gevent's thread id monkey patching
kwargs['check_same_thread'] = False
super(Connection, self).__init__(*args, **kwargs)
def cursor(self):
return Cursor(self)
for method in [sqlite3.Connection.commit,
sqlite3.Connection.rollback,
sqlite3.Connection.execute]:
setattr(Connection, method.__name__, _using_gevent_tp(method))
@wraps(sqlite3.connect)
def connect(*args, **kwargs):
kwargs['factory'] = Connection
return sqlite3.connect(*args, **kwargs)
|
Add a sqlite3 gevent wrapper#!/usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright 2017 - Luca Versari <veluca93@gmail.com>
"""This file can be imported instead of sqlite3 to make sure that
gevent and sqlite3 play nice together."""
import gevent.hub
import sqlite3
from functools import wraps
def _using_gevent_tp(method):
@wraps(method, ['__name__', '__doc__'])
def apply(*args, **kwargs):
return gevent.hub.get_hub().threadpool.apply(method, args, kwargs)
return apply
class Cursor(sqlite3.Cursor):
""" A greenlet-friendly wrapper around sqlite3.Cursor. """
for method in [sqlite3.Cursor.executemany,
sqlite3.Cursor.executescript,
sqlite3.Cursor.fetchone,
sqlite3.Cursor.fetchmany,
sqlite3.Cursor.fetchall,
sqlite3.Cursor.execute]:
setattr(Cursor, method.__name__, _using_gevent_tp(method))
class Connection(sqlite3.Connection):
""" A greenlet-friendly wrapper around sqlite3.Connection. """
def __init__(self, *args, **kwargs):
# Workaround gevent's thread id monkey patching
kwargs['check_same_thread'] = False
super(Connection, self).__init__(*args, **kwargs)
def cursor(self):
return Cursor(self)
for method in [sqlite3.Connection.commit,
sqlite3.Connection.rollback,
sqlite3.Connection.execute]:
setattr(Connection, method.__name__, _using_gevent_tp(method))
@wraps(sqlite3.connect)
def connect(*args, **kwargs):
kwargs['factory'] = Connection
return sqlite3.connect(*args, **kwargs)
|
<commit_before><commit_msg>Add a sqlite3 gevent wrapper<commit_after>#!/usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright 2017 - Luca Versari <veluca93@gmail.com>
"""This file can be imported instead of sqlite3 to make sure that
gevent and sqlite3 play nice together."""
import gevent.hub
import sqlite3
from functools import wraps
def _using_gevent_tp(method):
@wraps(method, ['__name__', '__doc__'])
def apply(*args, **kwargs):
return gevent.hub.get_hub().threadpool.apply(method, args, kwargs)
return apply
class Cursor(sqlite3.Cursor):
""" A greenlet-friendly wrapper around sqlite3.Cursor. """
for method in [sqlite3.Cursor.executemany,
sqlite3.Cursor.executescript,
sqlite3.Cursor.fetchone,
sqlite3.Cursor.fetchmany,
sqlite3.Cursor.fetchall,
sqlite3.Cursor.execute]:
setattr(Cursor, method.__name__, _using_gevent_tp(method))
class Connection(sqlite3.Connection):
""" A greenlet-friendly wrapper around sqlite3.Connection. """
def __init__(self, *args, **kwargs):
# Workaround gevent's thread id monkey patching
kwargs['check_same_thread'] = False
super(Connection, self).__init__(*args, **kwargs)
def cursor(self):
return Cursor(self)
for method in [sqlite3.Connection.commit,
sqlite3.Connection.rollback,
sqlite3.Connection.execute]:
setattr(Connection, method.__name__, _using_gevent_tp(method))
@wraps(sqlite3.connect)
def connect(*args, **kwargs):
kwargs['factory'] = Connection
return sqlite3.connect(*args, **kwargs)
|
|
3d458090e3d1684e42b5dc9e9d30e268129dce58
|
course_discovery/apps/course_metadata/migrations/0155_auto_20190207_1546.py
|
course_discovery/apps/course_metadata/migrations/0155_auto_20190207_1546.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-07 15:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('course_metadata', '0154_course_entitlement_default_currency'),
]
operations = [
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='course',
),
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='program',
),
migrations.DeleteModel(
name='DegreeCourseCurriculum',
),
migrations.DeleteModel(
name='DegreeProgramCurriculum',
),
]
|
Add migration file for removing unused curriculum models
|
Add migration file for removing unused curriculum models
|
Python
|
agpl-3.0
|
edx/course-discovery,edx/course-discovery,edx/course-discovery,edx/course-discovery
|
Add migration file for removing unused curriculum models
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-07 15:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('course_metadata', '0154_course_entitlement_default_currency'),
]
operations = [
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='course',
),
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='program',
),
migrations.DeleteModel(
name='DegreeCourseCurriculum',
),
migrations.DeleteModel(
name='DegreeProgramCurriculum',
),
]
|
<commit_before><commit_msg>Add migration file for removing unused curriculum models<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-07 15:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('course_metadata', '0154_course_entitlement_default_currency'),
]
operations = [
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='course',
),
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='program',
),
migrations.DeleteModel(
name='DegreeCourseCurriculum',
),
migrations.DeleteModel(
name='DegreeProgramCurriculum',
),
]
|
Add migration file for removing unused curriculum models# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-07 15:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('course_metadata', '0154_course_entitlement_default_currency'),
]
operations = [
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='course',
),
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='program',
),
migrations.DeleteModel(
name='DegreeCourseCurriculum',
),
migrations.DeleteModel(
name='DegreeProgramCurriculum',
),
]
|
<commit_before><commit_msg>Add migration file for removing unused curriculum models<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-07 15:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('course_metadata', '0154_course_entitlement_default_currency'),
]
operations = [
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='course',
),
migrations.RemoveField(
model_name='degreecoursecurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='curriculum',
),
migrations.RemoveField(
model_name='degreeprogramcurriculum',
name='program',
),
migrations.DeleteModel(
name='DegreeCourseCurriculum',
),
migrations.DeleteModel(
name='DegreeProgramCurriculum',
),
]
|
|
e4166b3cf4f37f74f6c7e1be2641f556e5763a1a
|
evalQuadratic.py
|
evalQuadratic.py
|
def evalQuadratic( a, b, c, x ):
a = int ( a )
b = int ( b )
c = int ( c )
x = int ( x )
s = (a * (x ** 2)) + (b * x) + c
return s
a = input( "Enter a: " )
b = input( "Enter b: " )
c = input( "Enter c: " )
x = input( "Enter x: " )
print( "The answer of the quadratic equation is " + str( evalQuadratic( a, b, c, x ) ) )
|
Add the answer to the second question of Assignment 3
|
Add the answer to the second question of Assignment 3
|
Python
|
mit
|
SuyashD95/python-assignments
|
Add the answer to the second question of Assignment 3
|
def evalQuadratic( a, b, c, x ):
a = int ( a )
b = int ( b )
c = int ( c )
x = int ( x )
s = (a * (x ** 2)) + (b * x) + c
return s
a = input( "Enter a: " )
b = input( "Enter b: " )
c = input( "Enter c: " )
x = input( "Enter x: " )
print( "The answer of the quadratic equation is " + str( evalQuadratic( a, b, c, x ) ) )
|
<commit_before><commit_msg>Add the answer to the second question of Assignment 3<commit_after>
|
def evalQuadratic( a, b, c, x ):
a = int ( a )
b = int ( b )
c = int ( c )
x = int ( x )
s = (a * (x ** 2)) + (b * x) + c
return s
a = input( "Enter a: " )
b = input( "Enter b: " )
c = input( "Enter c: " )
x = input( "Enter x: " )
print( "The answer of the quadratic equation is " + str( evalQuadratic( a, b, c, x ) ) )
|
Add the answer to the second question of Assignment 3def evalQuadratic( a, b, c, x ):
a = int ( a )
b = int ( b )
c = int ( c )
x = int ( x )
s = (a * (x ** 2)) + (b * x) + c
return s
a = input( "Enter a: " )
b = input( "Enter b: " )
c = input( "Enter c: " )
x = input( "Enter x: " )
print( "The answer of the quadratic equation is " + str( evalQuadratic( a, b, c, x ) ) )
|
<commit_before><commit_msg>Add the answer to the second question of Assignment 3<commit_after>def evalQuadratic( a, b, c, x ):
a = int ( a )
b = int ( b )
c = int ( c )
x = int ( x )
s = (a * (x ** 2)) + (b * x) + c
return s
a = input( "Enter a: " )
b = input( "Enter b: " )
c = input( "Enter c: " )
x = input( "Enter x: " )
print( "The answer of the quadratic equation is " + str( evalQuadratic( a, b, c, x ) ) )
|
|
92d35c82ce4843129668d5102fa185a02f6d5b7a
|
test/torch/tensors/test_parameter.py
|
test/torch/tensors/test_parameter.py
|
import random
import torch
import torch.nn.functional as F
from torch.nn import Parameter
import syft
from syft.frameworks.torch.tensors import LoggingTensor
class TestLogTensor(object):
def setUp(self):
hook = syft.TorchHook(torch, verbose=True)
self.me = hook.local_worker
self.me.is_client_worker = True
instance_id = str(int(10e10 * random.random()))
bob = syft.VirtualWorker(id=f"bob{instance_id}", hook=hook, is_client_worker=False)
alice = syft.VirtualWorker(id=f"alice{instance_id}", hook=hook, is_client_worker=False)
james = syft.VirtualWorker(id=f"james{instance_id}", hook=hook, is_client_worker=False)
bob.add_workers([alice, james])
alice.add_workers([bob, james])
james.add_workers([bob, alice])
self.hook = hook
self.bob = bob
self.alice = alice
self.james = james
def test_param_on_pointer(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
ptr = tensor.send(self.bob)
param = Parameter(ptr)
local_param = param.get()
assert (local_param.data == tensor).all()
def test_param_send_get(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_back = param_ptr.get()
assert (param_back.data == tensor).all()
def test_param_remote_binary_method(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_double_ptr = param_ptr + param_ptr
param_double_back = param_double_ptr.get()
double_tensor = tensor + tensor
assert (param_double_back.data == double_tensor).all()
|
Add tests for send / get / remote op on parameters
|
Add tests for send / get / remote op on parameters
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
Add tests for send / get / remote op on parameters
|
import random
import torch
import torch.nn.functional as F
from torch.nn import Parameter
import syft
from syft.frameworks.torch.tensors import LoggingTensor
class TestLogTensor(object):
def setUp(self):
hook = syft.TorchHook(torch, verbose=True)
self.me = hook.local_worker
self.me.is_client_worker = True
instance_id = str(int(10e10 * random.random()))
bob = syft.VirtualWorker(id=f"bob{instance_id}", hook=hook, is_client_worker=False)
alice = syft.VirtualWorker(id=f"alice{instance_id}", hook=hook, is_client_worker=False)
james = syft.VirtualWorker(id=f"james{instance_id}", hook=hook, is_client_worker=False)
bob.add_workers([alice, james])
alice.add_workers([bob, james])
james.add_workers([bob, alice])
self.hook = hook
self.bob = bob
self.alice = alice
self.james = james
def test_param_on_pointer(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
ptr = tensor.send(self.bob)
param = Parameter(ptr)
local_param = param.get()
assert (local_param.data == tensor).all()
def test_param_send_get(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_back = param_ptr.get()
assert (param_back.data == tensor).all()
def test_param_remote_binary_method(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_double_ptr = param_ptr + param_ptr
param_double_back = param_double_ptr.get()
double_tensor = tensor + tensor
assert (param_double_back.data == double_tensor).all()
|
<commit_before><commit_msg>Add tests for send / get / remote op on parameters<commit_after>
|
import random
import torch
import torch.nn.functional as F
from torch.nn import Parameter
import syft
from syft.frameworks.torch.tensors import LoggingTensor
class TestLogTensor(object):
def setUp(self):
hook = syft.TorchHook(torch, verbose=True)
self.me = hook.local_worker
self.me.is_client_worker = True
instance_id = str(int(10e10 * random.random()))
bob = syft.VirtualWorker(id=f"bob{instance_id}", hook=hook, is_client_worker=False)
alice = syft.VirtualWorker(id=f"alice{instance_id}", hook=hook, is_client_worker=False)
james = syft.VirtualWorker(id=f"james{instance_id}", hook=hook, is_client_worker=False)
bob.add_workers([alice, james])
alice.add_workers([bob, james])
james.add_workers([bob, alice])
self.hook = hook
self.bob = bob
self.alice = alice
self.james = james
def test_param_on_pointer(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
ptr = tensor.send(self.bob)
param = Parameter(ptr)
local_param = param.get()
assert (local_param.data == tensor).all()
def test_param_send_get(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_back = param_ptr.get()
assert (param_back.data == tensor).all()
def test_param_remote_binary_method(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_double_ptr = param_ptr + param_ptr
param_double_back = param_double_ptr.get()
double_tensor = tensor + tensor
assert (param_double_back.data == double_tensor).all()
|
Add tests for send / get / remote op on parametersimport random
import torch
import torch.nn.functional as F
from torch.nn import Parameter
import syft
from syft.frameworks.torch.tensors import LoggingTensor
class TestLogTensor(object):
def setUp(self):
hook = syft.TorchHook(torch, verbose=True)
self.me = hook.local_worker
self.me.is_client_worker = True
instance_id = str(int(10e10 * random.random()))
bob = syft.VirtualWorker(id=f"bob{instance_id}", hook=hook, is_client_worker=False)
alice = syft.VirtualWorker(id=f"alice{instance_id}", hook=hook, is_client_worker=False)
james = syft.VirtualWorker(id=f"james{instance_id}", hook=hook, is_client_worker=False)
bob.add_workers([alice, james])
alice.add_workers([bob, james])
james.add_workers([bob, alice])
self.hook = hook
self.bob = bob
self.alice = alice
self.james = james
def test_param_on_pointer(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
ptr = tensor.send(self.bob)
param = Parameter(ptr)
local_param = param.get()
assert (local_param.data == tensor).all()
def test_param_send_get(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_back = param_ptr.get()
assert (param_back.data == tensor).all()
def test_param_remote_binary_method(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_double_ptr = param_ptr + param_ptr
param_double_back = param_double_ptr.get()
double_tensor = tensor + tensor
assert (param_double_back.data == double_tensor).all()
|
<commit_before><commit_msg>Add tests for send / get / remote op on parameters<commit_after>import random
import torch
import torch.nn.functional as F
from torch.nn import Parameter
import syft
from syft.frameworks.torch.tensors import LoggingTensor
class TestLogTensor(object):
def setUp(self):
hook = syft.TorchHook(torch, verbose=True)
self.me = hook.local_worker
self.me.is_client_worker = True
instance_id = str(int(10e10 * random.random()))
bob = syft.VirtualWorker(id=f"bob{instance_id}", hook=hook, is_client_worker=False)
alice = syft.VirtualWorker(id=f"alice{instance_id}", hook=hook, is_client_worker=False)
james = syft.VirtualWorker(id=f"james{instance_id}", hook=hook, is_client_worker=False)
bob.add_workers([alice, james])
alice.add_workers([bob, james])
james.add_workers([bob, alice])
self.hook = hook
self.bob = bob
self.alice = alice
self.james = james
def test_param_on_pointer(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
ptr = tensor.send(self.bob)
param = Parameter(ptr)
local_param = param.get()
assert (local_param.data == tensor).all()
def test_param_send_get(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_back = param_ptr.get()
assert (param_back.data == tensor).all()
def test_param_remote_binary_method(self):
"""
"""
self.setUp()
tensor = torch.tensor([1.0, -1.0, 3.0, 4.0])
param = Parameter(data=tensor)
param_ptr = param.send(self.bob)
param_double_ptr = param_ptr + param_ptr
param_double_back = param_double_ptr.get()
double_tensor = tensor + tensor
assert (param_double_back.data == double_tensor).all()
|
|
4453d0158abec35a741f2fb5dcfcd6fa1fd3cd20
|
django/website/contacts/tests/test_views_activation.py
|
django/website/contacts/tests/test_views_activation.py
|
from contacts.views.activation import ResetPassword
from django.conf import settings
def test_reset_password_subject_contains_site_name():
assert '{0}: password recovery'.format(settings.SITE_NAME) == ResetPassword().get_subject()
|
Add test for ResetPassword email subject
|
Add test for ResetPassword email subject
|
Python
|
agpl-3.0
|
daniell/kashana,aptivate/kashana,aptivate/alfie,daniell/kashana,aptivate/kashana,aptivate/kashana,aptivate/alfie,aptivate/alfie,aptivate/alfie,daniell/kashana,aptivate/kashana,daniell/kashana
|
Add test for ResetPassword email subject
|
from contacts.views.activation import ResetPassword
from django.conf import settings
def test_reset_password_subject_contains_site_name():
assert '{0}: password recovery'.format(settings.SITE_NAME) == ResetPassword().get_subject()
|
<commit_before><commit_msg>Add test for ResetPassword email subject<commit_after>
|
from contacts.views.activation import ResetPassword
from django.conf import settings
def test_reset_password_subject_contains_site_name():
assert '{0}: password recovery'.format(settings.SITE_NAME) == ResetPassword().get_subject()
|
Add test for ResetPassword email subjectfrom contacts.views.activation import ResetPassword
from django.conf import settings
def test_reset_password_subject_contains_site_name():
assert '{0}: password recovery'.format(settings.SITE_NAME) == ResetPassword().get_subject()
|
<commit_before><commit_msg>Add test for ResetPassword email subject<commit_after>from contacts.views.activation import ResetPassword
from django.conf import settings
def test_reset_password_subject_contains_site_name():
assert '{0}: password recovery'.format(settings.SITE_NAME) == ResetPassword().get_subject()
|
|
25889cd86d8c6a58793660d52dbdef3562f12b70
|
tests/test_stack_operations.py
|
tests/test_stack_operations.py
|
import pytest
from thinglang.execution.errors import UnknownVariable
from thinglang.runner import run
def test_stack_resolution_in_block():
assert run("""
thing Program
does start
number i = 0
Output.write("outside before, i =", i)
if true
Output.write("inside before, i =", i)
i = 10
Output.write("inside after, i =", i)
Output.write("outside after, i =", i)
""").output == """
outside before, i = 0
inside before, i = 0
inside after, i = 10
outside after, i = 10""".strip()
|
Add test for general scoping case in nested blocks
|
Add test for general scoping case in nested blocks
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
Add test for general scoping case in nested blocks
|
import pytest
from thinglang.execution.errors import UnknownVariable
from thinglang.runner import run
def test_stack_resolution_in_block():
assert run("""
thing Program
does start
number i = 0
Output.write("outside before, i =", i)
if true
Output.write("inside before, i =", i)
i = 10
Output.write("inside after, i =", i)
Output.write("outside after, i =", i)
""").output == """
outside before, i = 0
inside before, i = 0
inside after, i = 10
outside after, i = 10""".strip()
|
<commit_before><commit_msg>Add test for general scoping case in nested blocks<commit_after>
|
import pytest
from thinglang.execution.errors import UnknownVariable
from thinglang.runner import run
def test_stack_resolution_in_block():
assert run("""
thing Program
does start
number i = 0
Output.write("outside before, i =", i)
if true
Output.write("inside before, i =", i)
i = 10
Output.write("inside after, i =", i)
Output.write("outside after, i =", i)
""").output == """
outside before, i = 0
inside before, i = 0
inside after, i = 10
outside after, i = 10""".strip()
|
Add test for general scoping case in nested blocksimport pytest
from thinglang.execution.errors import UnknownVariable
from thinglang.runner import run
def test_stack_resolution_in_block():
assert run("""
thing Program
does start
number i = 0
Output.write("outside before, i =", i)
if true
Output.write("inside before, i =", i)
i = 10
Output.write("inside after, i =", i)
Output.write("outside after, i =", i)
""").output == """
outside before, i = 0
inside before, i = 0
inside after, i = 10
outside after, i = 10""".strip()
|
<commit_before><commit_msg>Add test for general scoping case in nested blocks<commit_after>import pytest
from thinglang.execution.errors import UnknownVariable
from thinglang.runner import run
def test_stack_resolution_in_block():
assert run("""
thing Program
does start
number i = 0
Output.write("outside before, i =", i)
if true
Output.write("inside before, i =", i)
i = 10
Output.write("inside after, i =", i)
Output.write("outside after, i =", i)
""").output == """
outside before, i = 0
inside before, i = 0
inside after, i = 10
outside after, i = 10""".strip()
|
|
cf7e8ec08410ce4567d7fc32f7c37c8a639e2039
|
learn.py
|
learn.py
|
import RPi.GPIO as gpio
import time
class move:
def __init__(self, RF,RB,LF,LB,SLEEP):
self.RF = RF
self.RB = RB
self.LF = LF
self.LB = LB
self.SLEEP = SLEEP
def getRF(self):
return self.RF
def getRB(self):
return self.RB
def getLF(self):
return self.LF
def getLB(self):
return self.LB
def getSleep(self):
return self.SLEEP
class gpioHelp:
def __init__(self):
gpio.setwarnings(False)
gpio.setmode(gpio.BCM)
gpio.setup(22, gpio.OUT)
gpio.setup(4, gpio.OUT)
gpio.setup(17, gpio.OUT)
gpio.setup(27, gpio.OUT)
def Move(self,Move):
gpio.output(4,Move.getRB)
gpio.output(17,Move.getLB)
gpio.output(22,Move.getRF)
gpio.output(27,Move.getLF)
time.sleep(Move.sleep)
moveHELP = gpioHelp()
def moveForward(self, t):
for x in range (0,100):
d = move(gpio.HIGH,gpio.LOW,gpio.HIGH,gpio.LOW,10)
moveHELP.Move(d);
|
Add new Class Based Movment
|
Add new Class Based Movment
|
Python
|
apache-2.0
|
aateichman/Vision
|
Add new Class Based Movment
|
import RPi.GPIO as gpio
import time
class move:
def __init__(self, RF,RB,LF,LB,SLEEP):
self.RF = RF
self.RB = RB
self.LF = LF
self.LB = LB
self.SLEEP = SLEEP
def getRF(self):
return self.RF
def getRB(self):
return self.RB
def getLF(self):
return self.LF
def getLB(self):
return self.LB
def getSleep(self):
return self.SLEEP
class gpioHelp:
def __init__(self):
gpio.setwarnings(False)
gpio.setmode(gpio.BCM)
gpio.setup(22, gpio.OUT)
gpio.setup(4, gpio.OUT)
gpio.setup(17, gpio.OUT)
gpio.setup(27, gpio.OUT)
def Move(self,Move):
gpio.output(4,Move.getRB)
gpio.output(17,Move.getLB)
gpio.output(22,Move.getRF)
gpio.output(27,Move.getLF)
time.sleep(Move.sleep)
moveHELP = gpioHelp()
def moveForward(self, t):
for x in range (0,100):
d = move(gpio.HIGH,gpio.LOW,gpio.HIGH,gpio.LOW,10)
moveHELP.Move(d);
|
<commit_before><commit_msg>Add new Class Based Movment<commit_after>
|
import RPi.GPIO as gpio
import time
class move:
def __init__(self, RF,RB,LF,LB,SLEEP):
self.RF = RF
self.RB = RB
self.LF = LF
self.LB = LB
self.SLEEP = SLEEP
def getRF(self):
return self.RF
def getRB(self):
return self.RB
def getLF(self):
return self.LF
def getLB(self):
return self.LB
def getSleep(self):
return self.SLEEP
class gpioHelp:
def __init__(self):
gpio.setwarnings(False)
gpio.setmode(gpio.BCM)
gpio.setup(22, gpio.OUT)
gpio.setup(4, gpio.OUT)
gpio.setup(17, gpio.OUT)
gpio.setup(27, gpio.OUT)
def Move(self,Move):
gpio.output(4,Move.getRB)
gpio.output(17,Move.getLB)
gpio.output(22,Move.getRF)
gpio.output(27,Move.getLF)
time.sleep(Move.sleep)
moveHELP = gpioHelp()
def moveForward(self, t):
for x in range (0,100):
d = move(gpio.HIGH,gpio.LOW,gpio.HIGH,gpio.LOW,10)
moveHELP.Move(d);
|
Add new Class Based Movmentimport RPi.GPIO as gpio
import time
class move:
def __init__(self, RF,RB,LF,LB,SLEEP):
self.RF = RF
self.RB = RB
self.LF = LF
self.LB = LB
self.SLEEP = SLEEP
def getRF(self):
return self.RF
def getRB(self):
return self.RB
def getLF(self):
return self.LF
def getLB(self):
return self.LB
def getSleep(self):
return self.SLEEP
class gpioHelp:
def __init__(self):
gpio.setwarnings(False)
gpio.setmode(gpio.BCM)
gpio.setup(22, gpio.OUT)
gpio.setup(4, gpio.OUT)
gpio.setup(17, gpio.OUT)
gpio.setup(27, gpio.OUT)
def Move(self,Move):
gpio.output(4,Move.getRB)
gpio.output(17,Move.getLB)
gpio.output(22,Move.getRF)
gpio.output(27,Move.getLF)
time.sleep(Move.sleep)
moveHELP = gpioHelp()
def moveForward(self, t):
for x in range (0,100):
d = move(gpio.HIGH,gpio.LOW,gpio.HIGH,gpio.LOW,10)
moveHELP.Move(d);
|
<commit_before><commit_msg>Add new Class Based Movment<commit_after>import RPi.GPIO as gpio
import time
class move:
def __init__(self, RF,RB,LF,LB,SLEEP):
self.RF = RF
self.RB = RB
self.LF = LF
self.LB = LB
self.SLEEP = SLEEP
def getRF(self):
return self.RF
def getRB(self):
return self.RB
def getLF(self):
return self.LF
def getLB(self):
return self.LB
def getSleep(self):
return self.SLEEP
class gpioHelp:
def __init__(self):
gpio.setwarnings(False)
gpio.setmode(gpio.BCM)
gpio.setup(22, gpio.OUT)
gpio.setup(4, gpio.OUT)
gpio.setup(17, gpio.OUT)
gpio.setup(27, gpio.OUT)
def Move(self,Move):
gpio.output(4,Move.getRB)
gpio.output(17,Move.getLB)
gpio.output(22,Move.getRF)
gpio.output(27,Move.getLF)
time.sleep(Move.sleep)
moveHELP = gpioHelp()
def moveForward(self, t):
for x in range (0,100):
d = move(gpio.HIGH,gpio.LOW,gpio.HIGH,gpio.LOW,10)
moveHELP.Move(d);
|
|
b082b72fc0c6b297571131cbc23c78f62b1aa96b
|
tests/unit/asyncio/test_asyncio_repr.py
|
tests/unit/asyncio/test_asyncio_repr.py
|
from butter.asyncio.eventfd import Eventfd_async
from butter.asyncio.fanotify import Fanotify_async
from butter.asyncio.inotify import Inotify_async
from butter.asyncio.signalfd import Signalfd_async
from butter.asyncio.timerfd import Timerfd_async
from collections import namedtuple
import pytest
import sys
class Mock_fd_obj(object):
def __init__(self, fd):
self._fd = fd
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(1) #fd=1
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj_closed(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(None)
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_name(obj):
assert obj.__class__.__name__ in repr(obj), "Instance's representation does not contain its own name"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd(obj):
assert 'fd=1' in repr(obj), "Instance does not list its own fd (used for easy identifcation)"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd_closed(obj_closed):
assert 'fd=closed' in repr(obj_closed), "Instance does not indicate it is closed"
|
Test async objects for __repr__ as well
|
Test async objects for __repr__ as well
|
Python
|
bsd-3-clause
|
arkaitzj/python-butter
|
Test async objects for __repr__ as well
|
from butter.asyncio.eventfd import Eventfd_async
from butter.asyncio.fanotify import Fanotify_async
from butter.asyncio.inotify import Inotify_async
from butter.asyncio.signalfd import Signalfd_async
from butter.asyncio.timerfd import Timerfd_async
from collections import namedtuple
import pytest
import sys
class Mock_fd_obj(object):
def __init__(self, fd):
self._fd = fd
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(1) #fd=1
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj_closed(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(None)
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_name(obj):
assert obj.__class__.__name__ in repr(obj), "Instance's representation does not contain its own name"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd(obj):
assert 'fd=1' in repr(obj), "Instance does not list its own fd (used for easy identifcation)"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd_closed(obj_closed):
assert 'fd=closed' in repr(obj_closed), "Instance does not indicate it is closed"
|
<commit_before><commit_msg>Test async objects for __repr__ as well<commit_after>
|
from butter.asyncio.eventfd import Eventfd_async
from butter.asyncio.fanotify import Fanotify_async
from butter.asyncio.inotify import Inotify_async
from butter.asyncio.signalfd import Signalfd_async
from butter.asyncio.timerfd import Timerfd_async
from collections import namedtuple
import pytest
import sys
class Mock_fd_obj(object):
def __init__(self, fd):
self._fd = fd
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(1) #fd=1
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj_closed(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(None)
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_name(obj):
assert obj.__class__.__name__ in repr(obj), "Instance's representation does not contain its own name"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd(obj):
assert 'fd=1' in repr(obj), "Instance does not list its own fd (used for easy identifcation)"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd_closed(obj_closed):
assert 'fd=closed' in repr(obj_closed), "Instance does not indicate it is closed"
|
Test async objects for __repr__ as wellfrom butter.asyncio.eventfd import Eventfd_async
from butter.asyncio.fanotify import Fanotify_async
from butter.asyncio.inotify import Inotify_async
from butter.asyncio.signalfd import Signalfd_async
from butter.asyncio.timerfd import Timerfd_async
from collections import namedtuple
import pytest
import sys
class Mock_fd_obj(object):
def __init__(self, fd):
self._fd = fd
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(1) #fd=1
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj_closed(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(None)
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_name(obj):
assert obj.__class__.__name__ in repr(obj), "Instance's representation does not contain its own name"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd(obj):
assert 'fd=1' in repr(obj), "Instance does not list its own fd (used for easy identifcation)"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd_closed(obj_closed):
assert 'fd=closed' in repr(obj_closed), "Instance does not indicate it is closed"
|
<commit_before><commit_msg>Test async objects for __repr__ as well<commit_after>from butter.asyncio.eventfd import Eventfd_async
from butter.asyncio.fanotify import Fanotify_async
from butter.asyncio.inotify import Inotify_async
from butter.asyncio.signalfd import Signalfd_async
from butter.asyncio.timerfd import Timerfd_async
from collections import namedtuple
import pytest
import sys
class Mock_fd_obj(object):
def __init__(self, fd):
self._fd = fd
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(1) #fd=1
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.fixture(params=[(Eventfd_async, '_eventfd' ),
(Fanotify_async, '_fanotify'),
(Inotify_async, '_inotify' ),
(Signalfd_async, '_signalfd'),
(Timerfd_async, '_timerfd' )])
def obj_closed(request):
Obj, sub_obj_name = request.param
o = Obj.__new__(Obj)
o._value = 3 # needed for eventfd
sub_obj = Mock_fd_obj(None)
setattr(o, sub_obj_name, sub_obj)
return o
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_name(obj):
assert obj.__class__.__name__ in repr(obj), "Instance's representation does not contain its own name"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd(obj):
assert 'fd=1' in repr(obj), "Instance does not list its own fd (used for easy identifcation)"
@pytest.mark.skipif(sys.version_info < (3,4), reason="requires python3.4/asyncio")
@pytest.mark.repr
@pytest.mark.unit
@pytest.mark.asyncio
def test_repr_fd_closed(obj_closed):
assert 'fd=closed' in repr(obj_closed), "Instance does not indicate it is closed"
|
|
926c095fbc1068658ed61195fe42b02d0211e013
|
mzalendo/core/management/commands/core_fix_ward_names.py
|
mzalendo/core/management/commands/core_fix_ward_names.py
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
Add a script to fix the Ward names + slugs in the database
|
Add a script to fix the Ward names + slugs in the database
The ward names imported from "Final Constituencies and Wards Description.pdf"
have strange formatting where the ward names are separated
by a '/' or a '-', where they have random whitespace after them
and none before. This creates problems for matching names against
them consistently, and they look ugly when presented on the site.
This script makes '/' or '-' separated names consistenly have one
space on either side, and updates the slug.
|
Python
|
agpl-3.0
|
hzj123/56th,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,ken-muturi/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,hzj123/56th,mysociety/pombola,hzj123/56th
|
Add a script to fix the Ward names + slugs in the database
The ward names imported from "Final Constituencies and Wards Description.pdf"
have strange formatting where the ward names are separated
by a '/' or a '-', where they have random whitespace after them
and none before. This creates problems for matching names against
them consistently, and they look ugly when presented on the site.
This script makes '/' or '-' separated names consistenly have one
space on either side, and updates the slug.
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
<commit_before><commit_msg>Add a script to fix the Ward names + slugs in the database
The ward names imported from "Final Constituencies and Wards Description.pdf"
have strange formatting where the ward names are separated
by a '/' or a '-', where they have random whitespace after them
and none before. This creates problems for matching names against
them consistently, and they look ugly when presented on the site.
This script makes '/' or '-' separated names consistenly have one
space on either side, and updates the slug.<commit_after>
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
Add a script to fix the Ward names + slugs in the database
The ward names imported from "Final Constituencies and Wards Description.pdf"
have strange formatting where the ward names are separated
by a '/' or a '-', where they have random whitespace after them
and none before. This creates problems for matching names against
them consistently, and they look ugly when presented on the site.
This script makes '/' or '-' separated names consistenly have one
space on either side, and updates the slug.import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
<commit_before><commit_msg>Add a script to fix the Ward names + slugs in the database
The ward names imported from "Final Constituencies and Wards Description.pdf"
have strange formatting where the ward names are separated
by a '/' or a '-', where they have random whitespace after them
and none before. This creates problems for matching names against
them consistently, and they look ugly when presented on the site.
This script makes '/' or '-' separated names consistenly have one
space on either side, and updates the slug.<commit_after>import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
|
e19d8b0deaf5c7d3d98d25ed42f932ee0281f728
|
project_euler/test/tests.py
|
project_euler/test/tests.py
|
# Hack to allow import of ProjectEulerAnswers while it is still at top level.
from os import path
import sys
sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath(__file__ )))))
import unittest
import ProjectEulerAnswers as pea
class TestEuler(unittest.TestCase):
def test_prob1(self):
self.assertEqual(pea.prob1(10), 23)
def test_prob2(self):
first_ten = (1, 2, 3, 5, 8, 13, 21, 34, 55, 89)
even_sum = sum(i for i in first_ten if i % 2 == 0)
self.assertEqual(pea.prob2(90), even_sum)
def test_prob3(self):
self.assertEqual(pea.prob3(13195), 29)
def test_prob4(self):
self.assertEqual(pea.prob4(2), 9009)
if __name__ == '__main__':
unittest.main()
|
Add test cases for the first 4 problems
|
Add test cases for the first 4 problems
|
Python
|
mit
|
tofu-rocketry/project-euler
|
Add test cases for the first 4 problems
|
# Hack to allow import of ProjectEulerAnswers while it is still at top level.
from os import path
import sys
sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath(__file__ )))))
import unittest
import ProjectEulerAnswers as pea
class TestEuler(unittest.TestCase):
def test_prob1(self):
self.assertEqual(pea.prob1(10), 23)
def test_prob2(self):
first_ten = (1, 2, 3, 5, 8, 13, 21, 34, 55, 89)
even_sum = sum(i for i in first_ten if i % 2 == 0)
self.assertEqual(pea.prob2(90), even_sum)
def test_prob3(self):
self.assertEqual(pea.prob3(13195), 29)
def test_prob4(self):
self.assertEqual(pea.prob4(2), 9009)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test cases for the first 4 problems<commit_after>
|
# Hack to allow import of ProjectEulerAnswers while it is still at top level.
from os import path
import sys
sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath(__file__ )))))
import unittest
import ProjectEulerAnswers as pea
class TestEuler(unittest.TestCase):
def test_prob1(self):
self.assertEqual(pea.prob1(10), 23)
def test_prob2(self):
first_ten = (1, 2, 3, 5, 8, 13, 21, 34, 55, 89)
even_sum = sum(i for i in first_ten if i % 2 == 0)
self.assertEqual(pea.prob2(90), even_sum)
def test_prob3(self):
self.assertEqual(pea.prob3(13195), 29)
def test_prob4(self):
self.assertEqual(pea.prob4(2), 9009)
if __name__ == '__main__':
unittest.main()
|
Add test cases for the first 4 problems# Hack to allow import of ProjectEulerAnswers while it is still at top level.
from os import path
import sys
sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath(__file__ )))))
import unittest
import ProjectEulerAnswers as pea
class TestEuler(unittest.TestCase):
def test_prob1(self):
self.assertEqual(pea.prob1(10), 23)
def test_prob2(self):
first_ten = (1, 2, 3, 5, 8, 13, 21, 34, 55, 89)
even_sum = sum(i for i in first_ten if i % 2 == 0)
self.assertEqual(pea.prob2(90), even_sum)
def test_prob3(self):
self.assertEqual(pea.prob3(13195), 29)
def test_prob4(self):
self.assertEqual(pea.prob4(2), 9009)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test cases for the first 4 problems<commit_after># Hack to allow import of ProjectEulerAnswers while it is still at top level.
from os import path
import sys
sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath(__file__ )))))
import unittest
import ProjectEulerAnswers as pea
class TestEuler(unittest.TestCase):
def test_prob1(self):
self.assertEqual(pea.prob1(10), 23)
def test_prob2(self):
first_ten = (1, 2, 3, 5, 8, 13, 21, 34, 55, 89)
even_sum = sum(i for i in first_ten if i % 2 == 0)
self.assertEqual(pea.prob2(90), even_sum)
def test_prob3(self):
self.assertEqual(pea.prob3(13195), 29)
def test_prob4(self):
self.assertEqual(pea.prob4(2), 9009)
if __name__ == '__main__':
unittest.main()
|
|
5a0ec237878512c408dd392c20b440033aed402b
|
tests/mock_config.py
|
tests/mock_config.py
|
from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
@property
def checks_location(self):
return 'scoring_engine/checks'
|
from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
|
Remove unnecessary config mock property
|
Remove unnecessary config mock property
|
Python
|
mit
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
@property
def checks_location(self):
return 'scoring_engine/checks'
Remove unnecessary config mock property
|
from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
|
<commit_before>from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
@property
def checks_location(self):
return 'scoring_engine/checks'
<commit_msg>Remove unnecessary config mock property<commit_after>
|
from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
|
from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
@property
def checks_location(self):
return 'scoring_engine/checks'
Remove unnecessary config mock propertyfrom scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
|
<commit_before>from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
@property
def checks_location(self):
return 'scoring_engine/checks'
<commit_msg>Remove unnecessary config mock property<commit_after>from scoring_engine.config_loader import ConfigLoader
class MockConfig(object):
def __init__(self, location):
self.file_location = location
@property
def config(self):
return ConfigLoader(self.file_location)
|
2ca3da2ffe97c1b77f8713d19819f44bf88728ed
|
corehq/apps/hqadmin/management/commands/report_code_metrics.py
|
corehq/apps/hqadmin/management/commands/report_code_metrics.py
|
from django.core.management.base import BaseCommand
from dimagi.ext.couchdbkit import Document
from corehq.util.metrics import metrics_gauge
class Command(BaseCommand):
help = "Display a variety of code-quality metrics, optionally sending them to datadog"
def add_arguments(self, parser):
parser.add_argument(
'--datadog',
action='store_true',
default=False,
help='Record these metrics in datadog',
)
def handle(self, **options):
self.datadog = options['datadog']
self.show_couch_docs_remaining()
def show_couch_docs_remaining(self):
def all_subclasses(cls):
return set(cls.__subclasses__()).union([
s for c in cls.__subclasses__() for s in all_subclasses(c)
])
num_remaining = len(all_subclasses(Document))
self.stdout.write(f"CouchDB models remaining: {num_remaining}")
if self.datadog:
metrics_gauge("commcare.gtd.num_couch_models", num_remaining)
|
Add management command to report computed stats
|
Add management command to report computed stats
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add management command to report computed stats
|
from django.core.management.base import BaseCommand
from dimagi.ext.couchdbkit import Document
from corehq.util.metrics import metrics_gauge
class Command(BaseCommand):
help = "Display a variety of code-quality metrics, optionally sending them to datadog"
def add_arguments(self, parser):
parser.add_argument(
'--datadog',
action='store_true',
default=False,
help='Record these metrics in datadog',
)
def handle(self, **options):
self.datadog = options['datadog']
self.show_couch_docs_remaining()
def show_couch_docs_remaining(self):
def all_subclasses(cls):
return set(cls.__subclasses__()).union([
s for c in cls.__subclasses__() for s in all_subclasses(c)
])
num_remaining = len(all_subclasses(Document))
self.stdout.write(f"CouchDB models remaining: {num_remaining}")
if self.datadog:
metrics_gauge("commcare.gtd.num_couch_models", num_remaining)
|
<commit_before><commit_msg>Add management command to report computed stats<commit_after>
|
from django.core.management.base import BaseCommand
from dimagi.ext.couchdbkit import Document
from corehq.util.metrics import metrics_gauge
class Command(BaseCommand):
help = "Display a variety of code-quality metrics, optionally sending them to datadog"
def add_arguments(self, parser):
parser.add_argument(
'--datadog',
action='store_true',
default=False,
help='Record these metrics in datadog',
)
def handle(self, **options):
self.datadog = options['datadog']
self.show_couch_docs_remaining()
def show_couch_docs_remaining(self):
def all_subclasses(cls):
return set(cls.__subclasses__()).union([
s for c in cls.__subclasses__() for s in all_subclasses(c)
])
num_remaining = len(all_subclasses(Document))
self.stdout.write(f"CouchDB models remaining: {num_remaining}")
if self.datadog:
metrics_gauge("commcare.gtd.num_couch_models", num_remaining)
|
Add management command to report computed statsfrom django.core.management.base import BaseCommand
from dimagi.ext.couchdbkit import Document
from corehq.util.metrics import metrics_gauge
class Command(BaseCommand):
help = "Display a variety of code-quality metrics, optionally sending them to datadog"
def add_arguments(self, parser):
parser.add_argument(
'--datadog',
action='store_true',
default=False,
help='Record these metrics in datadog',
)
def handle(self, **options):
self.datadog = options['datadog']
self.show_couch_docs_remaining()
def show_couch_docs_remaining(self):
def all_subclasses(cls):
return set(cls.__subclasses__()).union([
s for c in cls.__subclasses__() for s in all_subclasses(c)
])
num_remaining = len(all_subclasses(Document))
self.stdout.write(f"CouchDB models remaining: {num_remaining}")
if self.datadog:
metrics_gauge("commcare.gtd.num_couch_models", num_remaining)
|
<commit_before><commit_msg>Add management command to report computed stats<commit_after>from django.core.management.base import BaseCommand
from dimagi.ext.couchdbkit import Document
from corehq.util.metrics import metrics_gauge
class Command(BaseCommand):
help = "Display a variety of code-quality metrics, optionally sending them to datadog"
def add_arguments(self, parser):
parser.add_argument(
'--datadog',
action='store_true',
default=False,
help='Record these metrics in datadog',
)
def handle(self, **options):
self.datadog = options['datadog']
self.show_couch_docs_remaining()
def show_couch_docs_remaining(self):
def all_subclasses(cls):
return set(cls.__subclasses__()).union([
s for c in cls.__subclasses__() for s in all_subclasses(c)
])
num_remaining = len(all_subclasses(Document))
self.stdout.write(f"CouchDB models remaining: {num_remaining}")
if self.datadog:
metrics_gauge("commcare.gtd.num_couch_models", num_remaining)
|
|
cfa1a738da0d8bff2ab20f7070eab9e6ea967c49
|
examples/cheapest_destinations.py
|
examples/cheapest_destinations.py
|
'''
Finds the cheapest fares filed to a given city
Example:
python cheapest-destinations.py YTO
Result:
FLL 165.62
MCO 165.62
JFK 172.36
LGA 191.12
'''
import datetime
import json
import sys
sys.path.append('..')
import sabre_dev_studio
import sabre_dev_studio.sabre_exceptions as sabre_exceptions
'''
requires config.json in the same directory for api authentication
{
"sabre_client_id": -----,
"sabre_client_secret": -----
}
'''
def read_config():
raw_data = open('config.json').read()
data = json.loads(raw_data)
client_secret = data['sabre_client_secret']
client_id = data['sabre_client_id']
return (client_id, client_secret)
def set_up_client():
# Read from config
client_id, client_secret = read_config()
sds = sabre_dev_studio.SabreDevStudio()
sds.set_credentials(client_id, client_secret)
sds.authenticate()
return sds
def main():
if (len(sys.argv) < 2):
print('Please specify IATA city or airport code as a command-line argument')
elif (len(sys.argv[1]) != 3):
print('IATA city or airport code must be 3 letters long')
else:
client = set_up_client()
city = sys.argv[1]
resp = client.flights_to(city)
data = resp[0]
for city in data:
print(city.origin_location + '\t' + str(city.lowest_fare.fare))
if __name__ == '__main__':
main()
|
Add first example: cheapest destinations
|
Add first example: cheapest destinations
|
Python
|
mit
|
Jamil/sabre_dev_studio
|
Add first example: cheapest destinations
|
'''
Finds the cheapest fares filed to a given city
Example:
python cheapest-destinations.py YTO
Result:
FLL 165.62
MCO 165.62
JFK 172.36
LGA 191.12
'''
import datetime
import json
import sys
sys.path.append('..')
import sabre_dev_studio
import sabre_dev_studio.sabre_exceptions as sabre_exceptions
'''
requires config.json in the same directory for api authentication
{
"sabre_client_id": -----,
"sabre_client_secret": -----
}
'''
def read_config():
raw_data = open('config.json').read()
data = json.loads(raw_data)
client_secret = data['sabre_client_secret']
client_id = data['sabre_client_id']
return (client_id, client_secret)
def set_up_client():
# Read from config
client_id, client_secret = read_config()
sds = sabre_dev_studio.SabreDevStudio()
sds.set_credentials(client_id, client_secret)
sds.authenticate()
return sds
def main():
if (len(sys.argv) < 2):
print('Please specify IATA city or airport code as a command-line argument')
elif (len(sys.argv[1]) != 3):
print('IATA city or airport code must be 3 letters long')
else:
client = set_up_client()
city = sys.argv[1]
resp = client.flights_to(city)
data = resp[0]
for city in data:
print(city.origin_location + '\t' + str(city.lowest_fare.fare))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add first example: cheapest destinations<commit_after>
|
'''
Finds the cheapest fares filed to a given city
Example:
python cheapest-destinations.py YTO
Result:
FLL 165.62
MCO 165.62
JFK 172.36
LGA 191.12
'''
import datetime
import json
import sys
sys.path.append('..')
import sabre_dev_studio
import sabre_dev_studio.sabre_exceptions as sabre_exceptions
'''
requires config.json in the same directory for api authentication
{
"sabre_client_id": -----,
"sabre_client_secret": -----
}
'''
def read_config():
raw_data = open('config.json').read()
data = json.loads(raw_data)
client_secret = data['sabre_client_secret']
client_id = data['sabre_client_id']
return (client_id, client_secret)
def set_up_client():
# Read from config
client_id, client_secret = read_config()
sds = sabre_dev_studio.SabreDevStudio()
sds.set_credentials(client_id, client_secret)
sds.authenticate()
return sds
def main():
if (len(sys.argv) < 2):
print('Please specify IATA city or airport code as a command-line argument')
elif (len(sys.argv[1]) != 3):
print('IATA city or airport code must be 3 letters long')
else:
client = set_up_client()
city = sys.argv[1]
resp = client.flights_to(city)
data = resp[0]
for city in data:
print(city.origin_location + '\t' + str(city.lowest_fare.fare))
if __name__ == '__main__':
main()
|
Add first example: cheapest destinations'''
Finds the cheapest fares filed to a given city
Example:
python cheapest-destinations.py YTO
Result:
FLL 165.62
MCO 165.62
JFK 172.36
LGA 191.12
'''
import datetime
import json
import sys
sys.path.append('..')
import sabre_dev_studio
import sabre_dev_studio.sabre_exceptions as sabre_exceptions
'''
requires config.json in the same directory for api authentication
{
"sabre_client_id": -----,
"sabre_client_secret": -----
}
'''
def read_config():
raw_data = open('config.json').read()
data = json.loads(raw_data)
client_secret = data['sabre_client_secret']
client_id = data['sabre_client_id']
return (client_id, client_secret)
def set_up_client():
# Read from config
client_id, client_secret = read_config()
sds = sabre_dev_studio.SabreDevStudio()
sds.set_credentials(client_id, client_secret)
sds.authenticate()
return sds
def main():
if (len(sys.argv) < 2):
print('Please specify IATA city or airport code as a command-line argument')
elif (len(sys.argv[1]) != 3):
print('IATA city or airport code must be 3 letters long')
else:
client = set_up_client()
city = sys.argv[1]
resp = client.flights_to(city)
data = resp[0]
for city in data:
print(city.origin_location + '\t' + str(city.lowest_fare.fare))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add first example: cheapest destinations<commit_after>'''
Finds the cheapest fares filed to a given city
Example:
python cheapest-destinations.py YTO
Result:
FLL 165.62
MCO 165.62
JFK 172.36
LGA 191.12
'''
import datetime
import json
import sys
sys.path.append('..')
import sabre_dev_studio
import sabre_dev_studio.sabre_exceptions as sabre_exceptions
'''
requires config.json in the same directory for api authentication
{
"sabre_client_id": -----,
"sabre_client_secret": -----
}
'''
def read_config():
raw_data = open('config.json').read()
data = json.loads(raw_data)
client_secret = data['sabre_client_secret']
client_id = data['sabre_client_id']
return (client_id, client_secret)
def set_up_client():
# Read from config
client_id, client_secret = read_config()
sds = sabre_dev_studio.SabreDevStudio()
sds.set_credentials(client_id, client_secret)
sds.authenticate()
return sds
def main():
if (len(sys.argv) < 2):
print('Please specify IATA city or airport code as a command-line argument')
elif (len(sys.argv[1]) != 3):
print('IATA city or airport code must be 3 letters long')
else:
client = set_up_client()
city = sys.argv[1]
resp = client.flights_to(city)
data = resp[0]
for city in data:
print(city.origin_location + '\t' + str(city.lowest_fare.fare))
if __name__ == '__main__':
main()
|
|
7bafcf68f419a87f996960fb9be304468bc5154a
|
tests/test_synonym.py
|
tests/test_synonym.py
|
import sqlalchemy as sa
from sqlalchemy.ext.hybrid import hybrid_property
from wtforms_alchemy import ModelForm
from tests import ModelFormTestCase
class TestSynonym(ModelFormTestCase):
def test_synonym_returning_column_property(self):
class ModelTest(self.base):
__tablename__ = 'model_test'
id = sa.Column(sa.Integer, primary_key=True)
_test_column = sa.Column('test_column', sa.Integer, nullable=False)
@hybrid_property
def test_column(self):
return self.test_column * 2
@test_column.setter
def test_column(self, value):
self._test_column = value
test_column = sa.orm.synonym(
'_test_column', descriptor='test_column'
)
class ModelTestForm(ModelForm):
class Meta:
model = ModelTest
not_null_str_validator = None
not_null_validator = None
include = ('test_column', )
exclude = ('_test_column', )
form = ModelTestForm()
assert form.test_column
|
Add tests for SA synonym
|
Add tests for SA synonym
|
Python
|
bsd-3-clause
|
quantus/wtforms-alchemy,kelvinhammond/wtforms-alchemy,williamwu0220/wtforms-alchemy
|
Add tests for SA synonym
|
import sqlalchemy as sa
from sqlalchemy.ext.hybrid import hybrid_property
from wtforms_alchemy import ModelForm
from tests import ModelFormTestCase
class TestSynonym(ModelFormTestCase):
def test_synonym_returning_column_property(self):
class ModelTest(self.base):
__tablename__ = 'model_test'
id = sa.Column(sa.Integer, primary_key=True)
_test_column = sa.Column('test_column', sa.Integer, nullable=False)
@hybrid_property
def test_column(self):
return self.test_column * 2
@test_column.setter
def test_column(self, value):
self._test_column = value
test_column = sa.orm.synonym(
'_test_column', descriptor='test_column'
)
class ModelTestForm(ModelForm):
class Meta:
model = ModelTest
not_null_str_validator = None
not_null_validator = None
include = ('test_column', )
exclude = ('_test_column', )
form = ModelTestForm()
assert form.test_column
|
<commit_before><commit_msg>Add tests for SA synonym<commit_after>
|
import sqlalchemy as sa
from sqlalchemy.ext.hybrid import hybrid_property
from wtforms_alchemy import ModelForm
from tests import ModelFormTestCase
class TestSynonym(ModelFormTestCase):
def test_synonym_returning_column_property(self):
class ModelTest(self.base):
__tablename__ = 'model_test'
id = sa.Column(sa.Integer, primary_key=True)
_test_column = sa.Column('test_column', sa.Integer, nullable=False)
@hybrid_property
def test_column(self):
return self.test_column * 2
@test_column.setter
def test_column(self, value):
self._test_column = value
test_column = sa.orm.synonym(
'_test_column', descriptor='test_column'
)
class ModelTestForm(ModelForm):
class Meta:
model = ModelTest
not_null_str_validator = None
not_null_validator = None
include = ('test_column', )
exclude = ('_test_column', )
form = ModelTestForm()
assert form.test_column
|
Add tests for SA synonymimport sqlalchemy as sa
from sqlalchemy.ext.hybrid import hybrid_property
from wtforms_alchemy import ModelForm
from tests import ModelFormTestCase
class TestSynonym(ModelFormTestCase):
def test_synonym_returning_column_property(self):
class ModelTest(self.base):
__tablename__ = 'model_test'
id = sa.Column(sa.Integer, primary_key=True)
_test_column = sa.Column('test_column', sa.Integer, nullable=False)
@hybrid_property
def test_column(self):
return self.test_column * 2
@test_column.setter
def test_column(self, value):
self._test_column = value
test_column = sa.orm.synonym(
'_test_column', descriptor='test_column'
)
class ModelTestForm(ModelForm):
class Meta:
model = ModelTest
not_null_str_validator = None
not_null_validator = None
include = ('test_column', )
exclude = ('_test_column', )
form = ModelTestForm()
assert form.test_column
|
<commit_before><commit_msg>Add tests for SA synonym<commit_after>import sqlalchemy as sa
from sqlalchemy.ext.hybrid import hybrid_property
from wtforms_alchemy import ModelForm
from tests import ModelFormTestCase
class TestSynonym(ModelFormTestCase):
def test_synonym_returning_column_property(self):
class ModelTest(self.base):
__tablename__ = 'model_test'
id = sa.Column(sa.Integer, primary_key=True)
_test_column = sa.Column('test_column', sa.Integer, nullable=False)
@hybrid_property
def test_column(self):
return self.test_column * 2
@test_column.setter
def test_column(self, value):
self._test_column = value
test_column = sa.orm.synonym(
'_test_column', descriptor='test_column'
)
class ModelTestForm(ModelForm):
class Meta:
model = ModelTest
not_null_str_validator = None
not_null_validator = None
include = ('test_column', )
exclude = ('_test_column', )
form = ModelTestForm()
assert form.test_column
|
|
a846eadb0cdf02ace459e300ae4da7710754263f
|
IPython/__main__.py
|
IPython/__main__.py
|
# encoding: utf-8
"""Terminal-based IPython entry point.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012, IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from IPython.frontend.terminal.ipapp import launch_new_instance
launch_new_instance()
|
Allow starting IPython as `python -m IPython`.
|
Allow starting IPython as `python -m IPython`.
Closes #2541.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
Allow starting IPython as `python -m IPython`.
Closes #2541.
|
# encoding: utf-8
"""Terminal-based IPython entry point.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012, IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from IPython.frontend.terminal.ipapp import launch_new_instance
launch_new_instance()
|
<commit_before><commit_msg>Allow starting IPython as `python -m IPython`.
Closes #2541.<commit_after>
|
# encoding: utf-8
"""Terminal-based IPython entry point.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012, IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from IPython.frontend.terminal.ipapp import launch_new_instance
launch_new_instance()
|
Allow starting IPython as `python -m IPython`.
Closes #2541.# encoding: utf-8
"""Terminal-based IPython entry point.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012, IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from IPython.frontend.terminal.ipapp import launch_new_instance
launch_new_instance()
|
<commit_before><commit_msg>Allow starting IPython as `python -m IPython`.
Closes #2541.<commit_after># encoding: utf-8
"""Terminal-based IPython entry point.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012, IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from IPython.frontend.terminal.ipapp import launch_new_instance
launch_new_instance()
|
|
3e9c3fb3ba1a05a751f2587a24d163f9c9fca7cb
|
twitterproj/fisher.py
|
twitterproj/fisher.py
|
"""
Fisher information matrix of a Dirichlet distribution.
"""
from __future__ import division
import numpy as np
import scipy
from scipy.special import polygamma
import time
import twitterproj
import io
from operator import itemgetter
def fisher_information(counts):
"""
Calculates the Fisher information matrix of a Dirichlet distribution.
Parameters
----------
counts : array-like, shape (n,)
The counts for the Dirichlet distribution.
Returns
-------
fim : array-like, shape (n, n)
The Fisher information matrix.
"""
counts = np.asarray(counts)
fim = np.diag(polygamma(1, counts)) - polygamma(1, counts.sum())
return fim
def hashtag_countycounts(hashtags, collection, prior=1/2):
"""
Return the counts of a hashtag for each county.
"""
total = collection.find().count()
counts = np.zeros((total, len(hashtags))) + prior
for i, doc in enumerate(twitterproj.hashtag_counts__counties()):
row = np.array([doc['counts'].get(ht, 0) for ht in hashtags])
counts[i] += row
return counts.transpose()
def top_hashtags(n, sortkey, collection, extract=True):
docs = list(collection.find().sort(sortkey, -1).limit(n))
if extract:
ht = [doc['_id'] for doc in docs]
else:
ht = docs
return ht
def pipline(N, hashtags, norm):
db = twitterproj.connect()
collection = db.grids.counties.bot_filtered
x = hashtag_countycounts(hashtags, collection)
scores = [norm(x[i]) for i in range(len(x))]
return scores
def operator_norm(counts):
M = fisher_information(counts)
largest_eig = (M.shape[0] - 1, M.shape[0] - 1)
eigs = scipy.linalg.eigh(M, eigvals_only=True, eigvals=largest_eig)
return eigs[0]
def frobenius_norm(counts):
n = len(counts)
pgsum = polygamma(1, counts.sum())
A = (n**2 - n) * pgsum**2
B = polygamma(1, counts) - polygamma(1, counts.sum())
B = (B**2).sum()
return np.sqrt(A + B)
|
Add functions to calculate Fisher information.
|
Add functions to calculate Fisher information.
|
Python
|
unlicense
|
chebee7i/twitter,chebee7i/twitter,chebee7i/twitter
|
Add functions to calculate Fisher information.
|
"""
Fisher information matrix of a Dirichlet distribution.
"""
from __future__ import division
import numpy as np
import scipy
from scipy.special import polygamma
import time
import twitterproj
import io
from operator import itemgetter
def fisher_information(counts):
"""
Calculates the Fisher information matrix of a Dirichlet distribution.
Parameters
----------
counts : array-like, shape (n,)
The counts for the Dirichlet distribution.
Returns
-------
fim : array-like, shape (n, n)
The Fisher information matrix.
"""
counts = np.asarray(counts)
fim = np.diag(polygamma(1, counts)) - polygamma(1, counts.sum())
return fim
def hashtag_countycounts(hashtags, collection, prior=1/2):
"""
Return the counts of a hashtag for each county.
"""
total = collection.find().count()
counts = np.zeros((total, len(hashtags))) + prior
for i, doc in enumerate(twitterproj.hashtag_counts__counties()):
row = np.array([doc['counts'].get(ht, 0) for ht in hashtags])
counts[i] += row
return counts.transpose()
def top_hashtags(n, sortkey, collection, extract=True):
docs = list(collection.find().sort(sortkey, -1).limit(n))
if extract:
ht = [doc['_id'] for doc in docs]
else:
ht = docs
return ht
def pipline(N, hashtags, norm):
db = twitterproj.connect()
collection = db.grids.counties.bot_filtered
x = hashtag_countycounts(hashtags, collection)
scores = [norm(x[i]) for i in range(len(x))]
return scores
def operator_norm(counts):
M = fisher_information(counts)
largest_eig = (M.shape[0] - 1, M.shape[0] - 1)
eigs = scipy.linalg.eigh(M, eigvals_only=True, eigvals=largest_eig)
return eigs[0]
def frobenius_norm(counts):
n = len(counts)
pgsum = polygamma(1, counts.sum())
A = (n**2 - n) * pgsum**2
B = polygamma(1, counts) - polygamma(1, counts.sum())
B = (B**2).sum()
return np.sqrt(A + B)
|
<commit_before><commit_msg>Add functions to calculate Fisher information.<commit_after>
|
"""
Fisher information matrix of a Dirichlet distribution.
"""
from __future__ import division
import numpy as np
import scipy
from scipy.special import polygamma
import time
import twitterproj
import io
from operator import itemgetter
def fisher_information(counts):
"""
Calculates the Fisher information matrix of a Dirichlet distribution.
Parameters
----------
counts : array-like, shape (n,)
The counts for the Dirichlet distribution.
Returns
-------
fim : array-like, shape (n, n)
The Fisher information matrix.
"""
counts = np.asarray(counts)
fim = np.diag(polygamma(1, counts)) - polygamma(1, counts.sum())
return fim
def hashtag_countycounts(hashtags, collection, prior=1/2):
"""
Return the counts of a hashtag for each county.
"""
total = collection.find().count()
counts = np.zeros((total, len(hashtags))) + prior
for i, doc in enumerate(twitterproj.hashtag_counts__counties()):
row = np.array([doc['counts'].get(ht, 0) for ht in hashtags])
counts[i] += row
return counts.transpose()
def top_hashtags(n, sortkey, collection, extract=True):
docs = list(collection.find().sort(sortkey, -1).limit(n))
if extract:
ht = [doc['_id'] for doc in docs]
else:
ht = docs
return ht
def pipline(N, hashtags, norm):
db = twitterproj.connect()
collection = db.grids.counties.bot_filtered
x = hashtag_countycounts(hashtags, collection)
scores = [norm(x[i]) for i in range(len(x))]
return scores
def operator_norm(counts):
M = fisher_information(counts)
largest_eig = (M.shape[0] - 1, M.shape[0] - 1)
eigs = scipy.linalg.eigh(M, eigvals_only=True, eigvals=largest_eig)
return eigs[0]
def frobenius_norm(counts):
n = len(counts)
pgsum = polygamma(1, counts.sum())
A = (n**2 - n) * pgsum**2
B = polygamma(1, counts) - polygamma(1, counts.sum())
B = (B**2).sum()
return np.sqrt(A + B)
|
Add functions to calculate Fisher information."""
Fisher information matrix of a Dirichlet distribution.
"""
from __future__ import division
import numpy as np
import scipy
from scipy.special import polygamma
import time
import twitterproj
import io
from operator import itemgetter
def fisher_information(counts):
"""
Calculates the Fisher information matrix of a Dirichlet distribution.
Parameters
----------
counts : array-like, shape (n,)
The counts for the Dirichlet distribution.
Returns
-------
fim : array-like, shape (n, n)
The Fisher information matrix.
"""
counts = np.asarray(counts)
fim = np.diag(polygamma(1, counts)) - polygamma(1, counts.sum())
return fim
def hashtag_countycounts(hashtags, collection, prior=1/2):
"""
Return the counts of a hashtag for each county.
"""
total = collection.find().count()
counts = np.zeros((total, len(hashtags))) + prior
for i, doc in enumerate(twitterproj.hashtag_counts__counties()):
row = np.array([doc['counts'].get(ht, 0) for ht in hashtags])
counts[i] += row
return counts.transpose()
def top_hashtags(n, sortkey, collection, extract=True):
docs = list(collection.find().sort(sortkey, -1).limit(n))
if extract:
ht = [doc['_id'] for doc in docs]
else:
ht = docs
return ht
def pipline(N, hashtags, norm):
db = twitterproj.connect()
collection = db.grids.counties.bot_filtered
x = hashtag_countycounts(hashtags, collection)
scores = [norm(x[i]) for i in range(len(x))]
return scores
def operator_norm(counts):
M = fisher_information(counts)
largest_eig = (M.shape[0] - 1, M.shape[0] - 1)
eigs = scipy.linalg.eigh(M, eigvals_only=True, eigvals=largest_eig)
return eigs[0]
def frobenius_norm(counts):
n = len(counts)
pgsum = polygamma(1, counts.sum())
A = (n**2 - n) * pgsum**2
B = polygamma(1, counts) - polygamma(1, counts.sum())
B = (B**2).sum()
return np.sqrt(A + B)
|
<commit_before><commit_msg>Add functions to calculate Fisher information.<commit_after>"""
Fisher information matrix of a Dirichlet distribution.
"""
from __future__ import division
import numpy as np
import scipy
from scipy.special import polygamma
import time
import twitterproj
import io
from operator import itemgetter
def fisher_information(counts):
"""
Calculates the Fisher information matrix of a Dirichlet distribution.
Parameters
----------
counts : array-like, shape (n,)
The counts for the Dirichlet distribution.
Returns
-------
fim : array-like, shape (n, n)
The Fisher information matrix.
"""
counts = np.asarray(counts)
fim = np.diag(polygamma(1, counts)) - polygamma(1, counts.sum())
return fim
def hashtag_countycounts(hashtags, collection, prior=1/2):
"""
Return the counts of a hashtag for each county.
"""
total = collection.find().count()
counts = np.zeros((total, len(hashtags))) + prior
for i, doc in enumerate(twitterproj.hashtag_counts__counties()):
row = np.array([doc['counts'].get(ht, 0) for ht in hashtags])
counts[i] += row
return counts.transpose()
def top_hashtags(n, sortkey, collection, extract=True):
docs = list(collection.find().sort(sortkey, -1).limit(n))
if extract:
ht = [doc['_id'] for doc in docs]
else:
ht = docs
return ht
def pipline(N, hashtags, norm):
db = twitterproj.connect()
collection = db.grids.counties.bot_filtered
x = hashtag_countycounts(hashtags, collection)
scores = [norm(x[i]) for i in range(len(x))]
return scores
def operator_norm(counts):
M = fisher_information(counts)
largest_eig = (M.shape[0] - 1, M.shape[0] - 1)
eigs = scipy.linalg.eigh(M, eigvals_only=True, eigvals=largest_eig)
return eigs[0]
def frobenius_norm(counts):
n = len(counts)
pgsum = polygamma(1, counts.sum())
A = (n**2 - n) * pgsum**2
B = polygamma(1, counts) - polygamma(1, counts.sum())
B = (B**2).sum()
return np.sqrt(A + B)
|
|
20f1dfd80dc6090c2b82ec0847315585d2ecf26b
|
udiskie/automount.py
|
udiskie/automount.py
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
# Automount LUKS cleartext holders after they have been unlocked.
# Why doesn't this work in device_added?
def device_unlocked(self, udevice):
self.mounter.add_device(udevice.luks_cleartext_holder)
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
|
Revert "Automount LUKS devices when they have been unlocked"
|
Revert "Automount LUKS devices when they have been unlocked"
This reverts commit d67f8c7284e8d3bff2e7c81711a9fece952aea46.
|
Python
|
mit
|
khardix/udiskie,coldfix/udiskie,mathstuf/udiskie,coldfix/udiskie,pstray/udiskie,pstray/udiskie
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
# Automount LUKS cleartext holders after they have been unlocked.
# Why doesn't this work in device_added?
def device_unlocked(self, udevice):
self.mounter.add_device(udevice.luks_cleartext_holder)
Revert "Automount LUKS devices when they have been unlocked"
This reverts commit d67f8c7284e8d3bff2e7c81711a9fece952aea46.
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
|
<commit_before>"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
# Automount LUKS cleartext holders after they have been unlocked.
# Why doesn't this work in device_added?
def device_unlocked(self, udevice):
self.mounter.add_device(udevice.luks_cleartext_holder)
<commit_msg>Revert "Automount LUKS devices when they have been unlocked"
This reverts commit d67f8c7284e8d3bff2e7c81711a9fece952aea46.<commit_after>
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
# Automount LUKS cleartext holders after they have been unlocked.
# Why doesn't this work in device_added?
def device_unlocked(self, udevice):
self.mounter.add_device(udevice.luks_cleartext_holder)
Revert "Automount LUKS devices when they have been unlocked"
This reverts commit d67f8c7284e8d3bff2e7c81711a9fece952aea46."""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
|
<commit_before>"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
# Automount LUKS cleartext holders after they have been unlocked.
# Why doesn't this work in device_added?
def device_unlocked(self, udevice):
self.mounter.add_device(udevice.luks_cleartext_holder)
<commit_msg>Revert "Automount LUKS devices when they have been unlocked"
This reverts commit d67f8c7284e8d3bff2e7c81711a9fece952aea46.<commit_after>"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self.mounter = mounter
def device_added(self, udevice):
self.mounter.add_device(udevice)
def media_added(self, udevice):
self.mounter.add_device(udevice)
|
e63f4e7b42b85e25fc89a31d0a622b19e01a1227
|
tests/test_strings.py
|
tests/test_strings.py
|
"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
|
Add a smoke test for negated phrase matches.
|
Add a smoke test for negated phrase matches.
|
Python
|
mit
|
gartung/dxr,jay-z007/dxr,kleintom/dxr,pelmers/dxr,gartung/dxr,KiemVM/Mozilla--dxr,jbradberry/dxr,srenatus/dxr,pelmers/dxr,erikrose/dxr,pelmers/dxr,gartung/dxr,erikrose/dxr,bozzmob/dxr,pombredanne/dxr,kleintom/dxr,kleintom/dxr,bozzmob/dxr,bozzmob/dxr,kleintom/dxr,jbradberry/dxr,jbradberry/dxr,gartung/dxr,srenatus/dxr,KiemVM/Mozilla--dxr,kleintom/dxr,jbradberry/dxr,nrc/dxr,jay-z007/dxr,gartung/dxr,KiemVM/Mozilla--dxr,nrc/dxr,pelmers/dxr,jay-z007/dxr,pombredanne/dxr,pombredanne/dxr,bozzmob/dxr,jay-z007/dxr,bozzmob/dxr,pombredanne/dxr,kleintom/dxr,srenatus/dxr,erikrose/dxr,gartung/dxr,srenatus/dxr,kleintom/dxr,KiemVM/Mozilla--dxr,jay-z007/dxr,srenatus/dxr,nrc/dxr,pelmers/dxr,jbradberry/dxr,jay-z007/dxr,pelmers/dxr,nrc/dxr,jbradberry/dxr,bozzmob/dxr,nrc/dxr,jay-z007/dxr,erikrose/dxr,nrc/dxr,srenatus/dxr,pombredanne/dxr,gartung/dxr,pombredanne/dxr,KiemVM/Mozilla--dxr,KiemVM/Mozilla--dxr,pombredanne/dxr,pelmers/dxr,jbradberry/dxr,bozzmob/dxr,erikrose/dxr
|
Add a smoke test for negated phrase matches.
|
"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
|
<commit_before><commit_msg>Add a smoke test for negated phrase matches.<commit_after>
|
"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
|
Add a smoke test for negated phrase matches."""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
|
<commit_before><commit_msg>Add a smoke test for negated phrase matches.<commit_after>"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
|
|
6676d3fdc7a50f7b694d557a1f3daff154fbd221
|
setup.py
|
setup.py
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask<0.10',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
|
Use a version of flask < 0.10
|
Use a version of flask < 0.10
* Flask 0.10 has a bunch of changes which break nereid
|
Python
|
bsd-3-clause
|
fulfilio/nereid,prakashpp/nereid,usudaysingh/nereid,prakashpp/nereid,riteshshrv/nereid,riteshshrv/nereid,usudaysingh/nereid,fulfilio/nereid
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
Use a version of flask < 0.10
* Flask 0.10 has a bunch of changes which break nereid
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask<0.10',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
|
<commit_before>#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
<commit_msg>Use a version of flask < 0.10
* Flask 0.10 has a bunch of changes which break nereid<commit_after>
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask<0.10',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
Use a version of flask < 0.10
* Flask 0.10 has a bunch of changes which break nereid#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask<0.10',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
|
<commit_before>#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
<commit_msg>Use a version of flask < 0.10
* Flask 0.10 has a bunch of changes which break nereid<commit_after>#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from setuptools import setup
setup(
name='Nereid',
version='2.8.0.2',
url='http://nereid.openlabs.co.in/docs/',
license='GPLv3',
author='Openlabs Technologies & Consulting (P) Limited',
author_email='info@openlabs.co.in',
description='Tryton - Web Framework',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'distribute',
'flask<0.10',
'wtforms',
'wtforms-recaptcha',
'babel',
'speaklater',
'Flask-Babel',
],
packages=[
'nereid',
'nereid.contrib',
'nereid.tests',
],
package_dir={
'nereid': 'nereid',
'nereid.contrib': 'nereid/contrib',
'nereid.tests': 'tests',
},
zip_safe=False,
platforms='any',
)
|
0e8dce6d960a8dc2b2521160fd543529a17efd2c
|
projects/relational_memory/pooling_test.py
|
projects/relational_memory/pooling_test.py
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2018, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""TODO"""
import numpy as np
from htmresearch.algorithms.column_pooler import ColumnPooler
from nupic.algorithms.knn_classifier import KNNClassifier
def train(pooler, classifier, objs):
for label, obj in objs:
pooler.reset()
np.random.shuffle(obj)
for feature in obj:
sortedFeature = np.sort(feature)
print "A"
poolerOutput = pooler.compute(feedforwardInput=sortedFeature,
learn=True,
predictedInput=sortedFeature)
print "B"
classifierInput = np.zeros(4096, dtype=np.int32)
classifierInput[poolerOutput] = 1
classifier.learn(classifierInput, label)
def test(pooler, classifier, objs):
pass
def run():
numObjects = 100
objSize = 10
allIndices = np.array(xrange(1024), dtype=np.int32)
objs = [
(
label,
[np.random.choice(allIndices, 20) for _ in xrange(objSize)]
)
for label in xrange(numObjects)
]
pooler = ColumnPooler(
inputWidth=1024,
)
classifier = KNNClassifier(k=1, distanceMethod="rawOverlap")
train(pooler, classifier, objs)
test(pooler, classifier, objs)
if __name__ == "__main__":
run()
|
Add ColumnPooler test for debugging
|
Add ColumnPooler test for debugging
|
Python
|
agpl-3.0
|
neuroidss/nupic.research,numenta/htmresearch,neuroidss/nupic.research,numenta/htmresearch,subutai/htmresearch,neuroidss/nupic.research,numenta/htmresearch,subutai/htmresearch,numenta/htmresearch,subutai/htmresearch,neuroidss/nupic.research,numenta/htmresearch,subutai/htmresearch,neuroidss/nupic.research,numenta/htmresearch,subutai/htmresearch,numenta/htmresearch,neuroidss/nupic.research,neuroidss/nupic.research,subutai/htmresearch,subutai/htmresearch,numenta/htmresearch,subutai/htmresearch,neuroidss/nupic.research
|
Add ColumnPooler test for debugging
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2018, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""TODO"""
import numpy as np
from htmresearch.algorithms.column_pooler import ColumnPooler
from nupic.algorithms.knn_classifier import KNNClassifier
def train(pooler, classifier, objs):
for label, obj in objs:
pooler.reset()
np.random.shuffle(obj)
for feature in obj:
sortedFeature = np.sort(feature)
print "A"
poolerOutput = pooler.compute(feedforwardInput=sortedFeature,
learn=True,
predictedInput=sortedFeature)
print "B"
classifierInput = np.zeros(4096, dtype=np.int32)
classifierInput[poolerOutput] = 1
classifier.learn(classifierInput, label)
def test(pooler, classifier, objs):
pass
def run():
numObjects = 100
objSize = 10
allIndices = np.array(xrange(1024), dtype=np.int32)
objs = [
(
label,
[np.random.choice(allIndices, 20) for _ in xrange(objSize)]
)
for label in xrange(numObjects)
]
pooler = ColumnPooler(
inputWidth=1024,
)
classifier = KNNClassifier(k=1, distanceMethod="rawOverlap")
train(pooler, classifier, objs)
test(pooler, classifier, objs)
if __name__ == "__main__":
run()
|
<commit_before><commit_msg>Add ColumnPooler test for debugging<commit_after>
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2018, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""TODO"""
import numpy as np
from htmresearch.algorithms.column_pooler import ColumnPooler
from nupic.algorithms.knn_classifier import KNNClassifier
def train(pooler, classifier, objs):
for label, obj in objs:
pooler.reset()
np.random.shuffle(obj)
for feature in obj:
sortedFeature = np.sort(feature)
print "A"
poolerOutput = pooler.compute(feedforwardInput=sortedFeature,
learn=True,
predictedInput=sortedFeature)
print "B"
classifierInput = np.zeros(4096, dtype=np.int32)
classifierInput[poolerOutput] = 1
classifier.learn(classifierInput, label)
def test(pooler, classifier, objs):
pass
def run():
numObjects = 100
objSize = 10
allIndices = np.array(xrange(1024), dtype=np.int32)
objs = [
(
label,
[np.random.choice(allIndices, 20) for _ in xrange(objSize)]
)
for label in xrange(numObjects)
]
pooler = ColumnPooler(
inputWidth=1024,
)
classifier = KNNClassifier(k=1, distanceMethod="rawOverlap")
train(pooler, classifier, objs)
test(pooler, classifier, objs)
if __name__ == "__main__":
run()
|
Add ColumnPooler test for debugging# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2018, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""TODO"""
import numpy as np
from htmresearch.algorithms.column_pooler import ColumnPooler
from nupic.algorithms.knn_classifier import KNNClassifier
def train(pooler, classifier, objs):
for label, obj in objs:
pooler.reset()
np.random.shuffle(obj)
for feature in obj:
sortedFeature = np.sort(feature)
print "A"
poolerOutput = pooler.compute(feedforwardInput=sortedFeature,
learn=True,
predictedInput=sortedFeature)
print "B"
classifierInput = np.zeros(4096, dtype=np.int32)
classifierInput[poolerOutput] = 1
classifier.learn(classifierInput, label)
def test(pooler, classifier, objs):
pass
def run():
numObjects = 100
objSize = 10
allIndices = np.array(xrange(1024), dtype=np.int32)
objs = [
(
label,
[np.random.choice(allIndices, 20) for _ in xrange(objSize)]
)
for label in xrange(numObjects)
]
pooler = ColumnPooler(
inputWidth=1024,
)
classifier = KNNClassifier(k=1, distanceMethod="rawOverlap")
train(pooler, classifier, objs)
test(pooler, classifier, objs)
if __name__ == "__main__":
run()
|
<commit_before><commit_msg>Add ColumnPooler test for debugging<commit_after># ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2018, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""TODO"""
import numpy as np
from htmresearch.algorithms.column_pooler import ColumnPooler
from nupic.algorithms.knn_classifier import KNNClassifier
def train(pooler, classifier, objs):
for label, obj in objs:
pooler.reset()
np.random.shuffle(obj)
for feature in obj:
sortedFeature = np.sort(feature)
print "A"
poolerOutput = pooler.compute(feedforwardInput=sortedFeature,
learn=True,
predictedInput=sortedFeature)
print "B"
classifierInput = np.zeros(4096, dtype=np.int32)
classifierInput[poolerOutput] = 1
classifier.learn(classifierInput, label)
def test(pooler, classifier, objs):
pass
def run():
numObjects = 100
objSize = 10
allIndices = np.array(xrange(1024), dtype=np.int32)
objs = [
(
label,
[np.random.choice(allIndices, 20) for _ in xrange(objSize)]
)
for label in xrange(numObjects)
]
pooler = ColumnPooler(
inputWidth=1024,
)
classifier = KNNClassifier(k=1, distanceMethod="rawOverlap")
train(pooler, classifier, objs)
test(pooler, classifier, objs)
if __name__ == "__main__":
run()
|
|
eb09556cec13f2c80f57c7619d09d85d7fc29f32
|
setup.py
|
setup.py
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic==0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests==2.9.1",
"PyYAML==3.11",
"gunicorn==19.4.1",
"Werkzeug==0.11.2",
"click==6.6"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic>=0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
Update dependencies to make it installable.
|
Update dependencies to make it installable.
|
Python
|
apache-2.0
|
irtnog/SATOSA,irtnog/SATOSA,SUNET/SATOSA,its-dirg/SATOSA,SUNET/SATOSA
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic==0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests==2.9.1",
"PyYAML==3.11",
"gunicorn==19.4.1",
"Werkzeug==0.11.2",
"click==6.6"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
Update dependencies to make it installable.
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic>=0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
<commit_before>"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic==0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests==2.9.1",
"PyYAML==3.11",
"gunicorn==19.4.1",
"Werkzeug==0.11.2",
"click==6.6"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
<commit_msg>Update dependencies to make it installable.<commit_after>
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic>=0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic==0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests==2.9.1",
"PyYAML==3.11",
"gunicorn==19.4.1",
"Werkzeug==0.11.2",
"click==6.6"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
Update dependencies to make it installable."""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic>=0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
<commit_before>"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic==0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests==2.9.1",
"PyYAML==3.11",
"gunicorn==19.4.1",
"Werkzeug==0.11.2",
"click==6.6"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
<commit_msg>Update dependencies to make it installable.<commit_after>"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='2.1.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"oic>=0.8.4.0",
"pyop==1.0.0",
"pyjwkest==1.1.5",
"pysaml2==4.0.3",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
d4ad071a80bdbbc7e1ecc278800191bdf33f95c2
|
problem_1.py
|
problem_1.py
|
sum = 0
for i in range(1, 1000):
if i % 3 == 0 or i % 5 == 0:
sum += i
#print("i:", i)
print("Result:", sum)
|
Solve problem 1 in Python
|
Solve problem 1 in Python
|
Python
|
mit
|
sirodoht/project-euler,sirodoht/project-euler,sirodoht/project-euler
|
Solve problem 1 in Python
|
sum = 0
for i in range(1, 1000):
if i % 3 == 0 or i % 5 == 0:
sum += i
#print("i:", i)
print("Result:", sum)
|
<commit_before><commit_msg>Solve problem 1 in Python<commit_after>
|
sum = 0
for i in range(1, 1000):
if i % 3 == 0 or i % 5 == 0:
sum += i
#print("i:", i)
print("Result:", sum)
|
Solve problem 1 in Pythonsum = 0
for i in range(1, 1000):
if i % 3 == 0 or i % 5 == 0:
sum += i
#print("i:", i)
print("Result:", sum)
|
<commit_before><commit_msg>Solve problem 1 in Python<commit_after>sum = 0
for i in range(1, 1000):
if i % 3 == 0 or i % 5 == 0:
sum += i
#print("i:", i)
print("Result:", sum)
|
|
0ebf0ecf1b4591960cd8b56a68eabf71fe85329d
|
tasks.py
|
tasks.py
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
'package': 'invoke',
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
|
Make sure test watcher picks up source code file changes
|
Make sure test watcher picks up source code file changes
|
Python
|
bsd-2-clause
|
mkusz/invoke,mkusz/invoke,pyinvoke/invoke,pyinvoke/invoke
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
Make sure test watcher picks up source code file changes
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
'package': 'invoke',
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
|
<commit_before>from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
<commit_msg>Make sure test watcher picks up source code file changes<commit_after>
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
'package': 'invoke',
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
Make sure test watcher picks up source code file changesfrom invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
'package': 'invoke',
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
|
<commit_before>from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
<commit_msg>Make sure test watcher picks up source code file changes<commit_after>from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, coverage, integration, watch_tests
from invocations.packaging import vendorize, release
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
test, coverage, integration, vendorize, release, www, docs, sites,
watch_docs, watch_tests
)
ns.configure({
'tests': {
'logformat': LOG_FORMAT,
'package': 'invoke',
},
'packaging': {
'sign': True,
'wheel': True,
# Because of PyYAML's dual source nonsense =/
'dual_wheels': True,
},
})
|
319342a34feab6781986efb30aa69b2623460654
|
ircstat/config.py
|
ircstat/config.py
|
# Copyright 2013 John Reese
# Licensed under the MIT license
import json
from os import path
def read_config(filepath, defaults=None):
"""Read configuration from a given file path, by executing the contents as
python code within a sandboxed set of globals. Default values may be
passed in as a dictionary to pre-populate the set of locals. The returned
value is the resulting dictionary of local values after executing the
configuration file."""
filepath = path.realpath(filepath)
if not path.isfile(filepath):
raise ValueError('config path "%s" does not exist' % filepath)
g = {"__builtins__": {}}
config = {}
if defaults is not None:
config.update(defaults)
with open(filepath) as fh:
data = fh.read()
data = compile(data, filepath, 'exec')
exec(data, g, config)
return config
|
Add module for reading sandboxed python from a file
|
Add module for reading sandboxed python from a file
|
Python
|
mit
|
jreese/ircstat,jreese/ircstat
|
Add module for reading sandboxed python from a file
|
# Copyright 2013 John Reese
# Licensed under the MIT license
import json
from os import path
def read_config(filepath, defaults=None):
"""Read configuration from a given file path, by executing the contents as
python code within a sandboxed set of globals. Default values may be
passed in as a dictionary to pre-populate the set of locals. The returned
value is the resulting dictionary of local values after executing the
configuration file."""
filepath = path.realpath(filepath)
if not path.isfile(filepath):
raise ValueError('config path "%s" does not exist' % filepath)
g = {"__builtins__": {}}
config = {}
if defaults is not None:
config.update(defaults)
with open(filepath) as fh:
data = fh.read()
data = compile(data, filepath, 'exec')
exec(data, g, config)
return config
|
<commit_before><commit_msg>Add module for reading sandboxed python from a file<commit_after>
|
# Copyright 2013 John Reese
# Licensed under the MIT license
import json
from os import path
def read_config(filepath, defaults=None):
"""Read configuration from a given file path, by executing the contents as
python code within a sandboxed set of globals. Default values may be
passed in as a dictionary to pre-populate the set of locals. The returned
value is the resulting dictionary of local values after executing the
configuration file."""
filepath = path.realpath(filepath)
if not path.isfile(filepath):
raise ValueError('config path "%s" does not exist' % filepath)
g = {"__builtins__": {}}
config = {}
if defaults is not None:
config.update(defaults)
with open(filepath) as fh:
data = fh.read()
data = compile(data, filepath, 'exec')
exec(data, g, config)
return config
|
Add module for reading sandboxed python from a file# Copyright 2013 John Reese
# Licensed under the MIT license
import json
from os import path
def read_config(filepath, defaults=None):
"""Read configuration from a given file path, by executing the contents as
python code within a sandboxed set of globals. Default values may be
passed in as a dictionary to pre-populate the set of locals. The returned
value is the resulting dictionary of local values after executing the
configuration file."""
filepath = path.realpath(filepath)
if not path.isfile(filepath):
raise ValueError('config path "%s" does not exist' % filepath)
g = {"__builtins__": {}}
config = {}
if defaults is not None:
config.update(defaults)
with open(filepath) as fh:
data = fh.read()
data = compile(data, filepath, 'exec')
exec(data, g, config)
return config
|
<commit_before><commit_msg>Add module for reading sandboxed python from a file<commit_after># Copyright 2013 John Reese
# Licensed under the MIT license
import json
from os import path
def read_config(filepath, defaults=None):
"""Read configuration from a given file path, by executing the contents as
python code within a sandboxed set of globals. Default values may be
passed in as a dictionary to pre-populate the set of locals. The returned
value is the resulting dictionary of local values after executing the
configuration file."""
filepath = path.realpath(filepath)
if not path.isfile(filepath):
raise ValueError('config path "%s" does not exist' % filepath)
g = {"__builtins__": {}}
config = {}
if defaults is not None:
config.update(defaults)
with open(filepath) as fh:
data = fh.read()
data = compile(data, filepath, 'exec')
exec(data, g, config)
return config
|
|
351ec5ef16a131d69054d05cef7666890a7d8888
|
tests/test_cat2cohort.py
|
tests/test_cat2cohort.py
|
"""Unit tests for cat2cohort."""
import unittest
from cat2cohort import api_url
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
pass
|
Add unittests module for cat2cohort
|
Add unittests module for cat2cohort
I want to have unit tests for the methods in cat2cohort.
Add empty module implementing unittest.
|
Python
|
mit
|
danmichaelo/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics
|
Add unittests module for cat2cohort
I want to have unit tests for the methods in cat2cohort.
Add empty module implementing unittest.
|
"""Unit tests for cat2cohort."""
import unittest
from cat2cohort import api_url
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
pass
|
<commit_before><commit_msg>Add unittests module for cat2cohort
I want to have unit tests for the methods in cat2cohort.
Add empty module implementing unittest.<commit_after>
|
"""Unit tests for cat2cohort."""
import unittest
from cat2cohort import api_url
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
pass
|
Add unittests module for cat2cohort
I want to have unit tests for the methods in cat2cohort.
Add empty module implementing unittest."""Unit tests for cat2cohort."""
import unittest
from cat2cohort import api_url
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
pass
|
<commit_before><commit_msg>Add unittests module for cat2cohort
I want to have unit tests for the methods in cat2cohort.
Add empty module implementing unittest.<commit_after>"""Unit tests for cat2cohort."""
import unittest
from cat2cohort import api_url
class TestCat2Cohort(unittest.TestCase):
"""Test methods from Cat2Cohort."""
pass
|
|
edc0bf262e449d3b5ff2c9fb602f21536d7ba985
|
tests/test_sample_seed.py
|
tests/test_sample_seed.py
|
import numpy as np
from SALib.sample import fast_sampler, finite_diff, latin, saltelli
from SALib.sample.morris import sample as morris_sampler
def problem_setup():
N=1
problem = {'num_vars': 3,
'names': ['x1','x2','x3'],
'bounds': [
[0,1],
[0,1],
[0,1]
]
}
return N, problem
def test_morris_sample_seed():
N, problem = problem_setup()
sample1 = morris_sampler(problem, N, seed=None)
sample2 = morris_sampler(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_saltelli_sample_seed():
N, problem = problem_setup()
sample1 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1000)
sample2 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1001)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_fast_sample_seed():
_, problem = problem_setup()
sample1 = fast_sampler.sample(problem, 65, seed=None)
sample2 = fast_sampler.sample(problem, 65, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_finite_diff_sample_seed():
N, problem = problem_setup()
sample1 = finite_diff.sample(problem, N, skip_values=1001)
sample2 = finite_diff.sample(problem, N, skip_values=1002)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_latin_sample_seed():
N, problem = problem_setup()
sample1 = latin.sample(problem, N, seed=None)
sample2 = latin.sample(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
|
Add tests to ensure seed values result in different samples
|
Add tests to ensure seed values result in different samples
|
Python
|
mit
|
jdherman/SALib,jdherman/SALib,SALib/SALib
|
Add tests to ensure seed values result in different samples
|
import numpy as np
from SALib.sample import fast_sampler, finite_diff, latin, saltelli
from SALib.sample.morris import sample as morris_sampler
def problem_setup():
N=1
problem = {'num_vars': 3,
'names': ['x1','x2','x3'],
'bounds': [
[0,1],
[0,1],
[0,1]
]
}
return N, problem
def test_morris_sample_seed():
N, problem = problem_setup()
sample1 = morris_sampler(problem, N, seed=None)
sample2 = morris_sampler(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_saltelli_sample_seed():
N, problem = problem_setup()
sample1 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1000)
sample2 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1001)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_fast_sample_seed():
_, problem = problem_setup()
sample1 = fast_sampler.sample(problem, 65, seed=None)
sample2 = fast_sampler.sample(problem, 65, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_finite_diff_sample_seed():
N, problem = problem_setup()
sample1 = finite_diff.sample(problem, N, skip_values=1001)
sample2 = finite_diff.sample(problem, N, skip_values=1002)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_latin_sample_seed():
N, problem = problem_setup()
sample1 = latin.sample(problem, N, seed=None)
sample2 = latin.sample(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
|
<commit_before><commit_msg>Add tests to ensure seed values result in different samples<commit_after>
|
import numpy as np
from SALib.sample import fast_sampler, finite_diff, latin, saltelli
from SALib.sample.morris import sample as morris_sampler
def problem_setup():
N=1
problem = {'num_vars': 3,
'names': ['x1','x2','x3'],
'bounds': [
[0,1],
[0,1],
[0,1]
]
}
return N, problem
def test_morris_sample_seed():
N, problem = problem_setup()
sample1 = morris_sampler(problem, N, seed=None)
sample2 = morris_sampler(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_saltelli_sample_seed():
N, problem = problem_setup()
sample1 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1000)
sample2 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1001)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_fast_sample_seed():
_, problem = problem_setup()
sample1 = fast_sampler.sample(problem, 65, seed=None)
sample2 = fast_sampler.sample(problem, 65, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_finite_diff_sample_seed():
N, problem = problem_setup()
sample1 = finite_diff.sample(problem, N, skip_values=1001)
sample2 = finite_diff.sample(problem, N, skip_values=1002)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_latin_sample_seed():
N, problem = problem_setup()
sample1 = latin.sample(problem, N, seed=None)
sample2 = latin.sample(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
|
Add tests to ensure seed values result in different samplesimport numpy as np
from SALib.sample import fast_sampler, finite_diff, latin, saltelli
from SALib.sample.morris import sample as morris_sampler
def problem_setup():
N=1
problem = {'num_vars': 3,
'names': ['x1','x2','x3'],
'bounds': [
[0,1],
[0,1],
[0,1]
]
}
return N, problem
def test_morris_sample_seed():
N, problem = problem_setup()
sample1 = morris_sampler(problem, N, seed=None)
sample2 = morris_sampler(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_saltelli_sample_seed():
N, problem = problem_setup()
sample1 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1000)
sample2 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1001)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_fast_sample_seed():
_, problem = problem_setup()
sample1 = fast_sampler.sample(problem, 65, seed=None)
sample2 = fast_sampler.sample(problem, 65, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_finite_diff_sample_seed():
N, problem = problem_setup()
sample1 = finite_diff.sample(problem, N, skip_values=1001)
sample2 = finite_diff.sample(problem, N, skip_values=1002)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_latin_sample_seed():
N, problem = problem_setup()
sample1 = latin.sample(problem, N, seed=None)
sample2 = latin.sample(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
|
<commit_before><commit_msg>Add tests to ensure seed values result in different samples<commit_after>import numpy as np
from SALib.sample import fast_sampler, finite_diff, latin, saltelli
from SALib.sample.morris import sample as morris_sampler
def problem_setup():
N=1
problem = {'num_vars': 3,
'names': ['x1','x2','x3'],
'bounds': [
[0,1],
[0,1],
[0,1]
]
}
return N, problem
def test_morris_sample_seed():
N, problem = problem_setup()
sample1 = morris_sampler(problem, N, seed=None)
sample2 = morris_sampler(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_saltelli_sample_seed():
N, problem = problem_setup()
sample1 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1000)
sample2 = saltelli.sample(problem, N, calc_second_order=False, skip_values=1001)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_fast_sample_seed():
_, problem = problem_setup()
sample1 = fast_sampler.sample(problem, 65, seed=None)
sample2 = fast_sampler.sample(problem, 65, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_finite_diff_sample_seed():
N, problem = problem_setup()
sample1 = finite_diff.sample(problem, N, skip_values=1001)
sample2 = finite_diff.sample(problem, N, skip_values=1002)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
def test_latin_sample_seed():
N, problem = problem_setup()
sample1 = latin.sample(problem, N, seed=None)
sample2 = latin.sample(problem, N, seed=123)
np.testing.assert_equal(np.any(np.not_equal(sample1,sample2)), True)
|
|
9746e6f16dc0190dfd10c0499eec260a26d0c9f6
|
alembic/versions/72a56f6f1148_fix_foreign_key_cascades.py
|
alembic/versions/72a56f6f1148_fix_foreign_key_cascades.py
|
revision = '72a56f6f1148'
down_revision = 'be28e555a2da'
branch_labels = None
depends_on = None
import alembic
import sqlalchemy
def upgrade():
# Remove cascading deletes from quote FKs
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
def downgrade():
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
|
Remove cascading deletes from quote foreign keys
|
Remove cascading deletes from quote foreign keys
|
Python
|
apache-2.0
|
mrphlip/lrrbot,andreasots/lrrbot,andreasots/lrrbot,andreasots/lrrbot,mrphlip/lrrbot,mrphlip/lrrbot
|
Remove cascading deletes from quote foreign keys
|
revision = '72a56f6f1148'
down_revision = 'be28e555a2da'
branch_labels = None
depends_on = None
import alembic
import sqlalchemy
def upgrade():
# Remove cascading deletes from quote FKs
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
def downgrade():
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
|
<commit_before><commit_msg>Remove cascading deletes from quote foreign keys<commit_after>
|
revision = '72a56f6f1148'
down_revision = 'be28e555a2da'
branch_labels = None
depends_on = None
import alembic
import sqlalchemy
def upgrade():
# Remove cascading deletes from quote FKs
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
def downgrade():
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
|
Remove cascading deletes from quote foreign keysrevision = '72a56f6f1148'
down_revision = 'be28e555a2da'
branch_labels = None
depends_on = None
import alembic
import sqlalchemy
def upgrade():
# Remove cascading deletes from quote FKs
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
def downgrade():
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
|
<commit_before><commit_msg>Remove cascading deletes from quote foreign keys<commit_after>revision = '72a56f6f1148'
down_revision = 'be28e555a2da'
branch_labels = None
depends_on = None
import alembic
import sqlalchemy
def upgrade():
# Remove cascading deletes from quote FKs
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="SET NULL")
def downgrade():
alembic.op.drop_constraint("quotes_game_id_fkey", "quotes", "foreignkey")
alembic.op.drop_constraint("quotes_show_id_fkey", "quotes", "foreignkey")
alembic.op.create_foreign_key(
None, 'quotes', 'games', ['game_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
alembic.op.create_foreign_key(
None, 'quotes', 'shows', ['show_id'], ['id'], onupdate="CASCADE", ondelete="CASCADE")
|
|
cb12e5da17a115ea751df604158992af7c0d6573
|
rbtools/utils/console.py
|
rbtools/utils/console.py
|
import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vim is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('EDITOR', 'vim')
subprocess.call([editor, tempfile])
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
|
import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vi is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('VISUAL') or os.environ.get('EDITOR') or 'vi'
try:
subprocess.call([editor, tempfile])
except OSError:
print 'No editor found. Set EDITOR environment variable or install vi.'
raise
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
|
Clean handling of lack of $EDITOR
|
Clean handling of lack of $EDITOR
Problem: If EDITOR is not set, and vim is not installed, rbt post exits with
just "CRITICAL: [Errno 2] No such file or directory" as an error message, which
does not indicate where the problem lies nor how to solve it.
vim is not installed by default on many distributions, or is installed as "vi".
Moreover, EDITOR is traditionally used for a line editor (remember those?),
with VISUAL being the environment variable of choice for a full-screen or GUI
editor. If neither is set, we should fall back to the editor most likely to be
installed in the base distribution.
Solution: Prefer $VISUAL over $EDITOR over "vi". Offer a helpful error message
on failure.
Reviewed at https://reviews.reviewboard.org/r/5790/
|
Python
|
mit
|
davidt/rbtools,haosdent/rbtools,halvorlu/rbtools,davidt/rbtools,halvorlu/rbtools,reviewboard/rbtools,datjwu/rbtools,beol/rbtools,reviewboard/rbtools,haosdent/rbtools,datjwu/rbtools,reviewboard/rbtools,beol/rbtools,haosdent/rbtools,datjwu/rbtools,beol/rbtools,halvorlu/rbtools,davidt/rbtools
|
import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vim is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('EDITOR', 'vim')
subprocess.call([editor, tempfile])
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
Clean handling of lack of $EDITOR
Problem: If EDITOR is not set, and vim is not installed, rbt post exits with
just "CRITICAL: [Errno 2] No such file or directory" as an error message, which
does not indicate where the problem lies nor how to solve it.
vim is not installed by default on many distributions, or is installed as "vi".
Moreover, EDITOR is traditionally used for a line editor (remember those?),
with VISUAL being the environment variable of choice for a full-screen or GUI
editor. If neither is set, we should fall back to the editor most likely to be
installed in the base distribution.
Solution: Prefer $VISUAL over $EDITOR over "vi". Offer a helpful error message
on failure.
Reviewed at https://reviews.reviewboard.org/r/5790/
|
import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vi is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('VISUAL') or os.environ.get('EDITOR') or 'vi'
try:
subprocess.call([editor, tempfile])
except OSError:
print 'No editor found. Set EDITOR environment variable or install vi.'
raise
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
|
<commit_before>import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vim is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('EDITOR', 'vim')
subprocess.call([editor, tempfile])
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
<commit_msg>Clean handling of lack of $EDITOR
Problem: If EDITOR is not set, and vim is not installed, rbt post exits with
just "CRITICAL: [Errno 2] No such file or directory" as an error message, which
does not indicate where the problem lies nor how to solve it.
vim is not installed by default on many distributions, or is installed as "vi".
Moreover, EDITOR is traditionally used for a line editor (remember those?),
with VISUAL being the environment variable of choice for a full-screen or GUI
editor. If neither is set, we should fall back to the editor most likely to be
installed in the base distribution.
Solution: Prefer $VISUAL over $EDITOR over "vi". Offer a helpful error message
on failure.
Reviewed at https://reviews.reviewboard.org/r/5790/<commit_after>
|
import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vi is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('VISUAL') or os.environ.get('EDITOR') or 'vi'
try:
subprocess.call([editor, tempfile])
except OSError:
print 'No editor found. Set EDITOR environment variable or install vi.'
raise
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
|
import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vim is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('EDITOR', 'vim')
subprocess.call([editor, tempfile])
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
Clean handling of lack of $EDITOR
Problem: If EDITOR is not set, and vim is not installed, rbt post exits with
just "CRITICAL: [Errno 2] No such file or directory" as an error message, which
does not indicate where the problem lies nor how to solve it.
vim is not installed by default on many distributions, or is installed as "vi".
Moreover, EDITOR is traditionally used for a line editor (remember those?),
with VISUAL being the environment variable of choice for a full-screen or GUI
editor. If neither is set, we should fall back to the editor most likely to be
installed in the base distribution.
Solution: Prefer $VISUAL over $EDITOR over "vi". Offer a helpful error message
on failure.
Reviewed at https://reviews.reviewboard.org/r/5790/import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vi is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('VISUAL') or os.environ.get('EDITOR') or 'vi'
try:
subprocess.call([editor, tempfile])
except OSError:
print 'No editor found. Set EDITOR environment variable or install vi.'
raise
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
|
<commit_before>import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vim is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('EDITOR', 'vim')
subprocess.call([editor, tempfile])
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
<commit_msg>Clean handling of lack of $EDITOR
Problem: If EDITOR is not set, and vim is not installed, rbt post exits with
just "CRITICAL: [Errno 2] No such file or directory" as an error message, which
does not indicate where the problem lies nor how to solve it.
vim is not installed by default on many distributions, or is installed as "vi".
Moreover, EDITOR is traditionally used for a line editor (remember those?),
with VISUAL being the environment variable of choice for a full-screen or GUI
editor. If neither is set, we should fall back to the editor most likely to be
installed in the base distribution.
Solution: Prefer $VISUAL over $EDITOR over "vi". Offer a helpful error message
on failure.
Reviewed at https://reviews.reviewboard.org/r/5790/<commit_after>import os
import subprocess
from distutils.util import strtobool
from rbtools.utils.filesystem import make_tempfile
def confirm(question):
"""Interactively prompt for a Yes/No answer.
Accepted values (case-insensitive) depend on distutils.util.strtobool():
'Yes' values: y, yes, t, true, on, 1
'No' values: n, no , f, false, off, 0
"""
while True:
try:
answer = raw_input("%s [Yes/No]: " % question).lower()
return strtobool(answer)
except ValueError:
print '%s is not a valid answer.' % answer
def edit_text(content):
"""Allows a user to edit a block of text and returns the saved result.
The environment's default text editor is used if available, otherwise
vi is used.
"""
tempfile = make_tempfile(content.encode('utf8'))
editor = os.environ.get('VISUAL') or os.environ.get('EDITOR') or 'vi'
try:
subprocess.call([editor, tempfile])
except OSError:
print 'No editor found. Set EDITOR environment variable or install vi.'
raise
f = open(tempfile)
result = f.read()
f.close()
return result.decode('utf8')
|
ed8514a2d9f60bad6bea8174ef5263164f9f4857
|
test/antTest.py
|
test/antTest.py
|
"""
Code based on:
https://github.com/mvillalba/python-ant/blob/develop/demos/ant.core/03-basicchannel.py
in the python-ant repository and
https://github.com/tomwardill/developerhealth
by Tom Wardill
"""
import sys
import time
from ant.core import driver, node, event, message, log
from ant.core.constants import CHANNEL_TYPE_TWOWAY_RECEIVE, TIMEOUT_NEVER
class HRM(event.EventCallback):
def __init__(self, serial, netkey):
self.serial = serial
self.netkey = netkey
self.antnode = None
self.channel = None
def start(self):
print("starting node")
self._start_antnode()
self._setup_channel()
self.channel.registerCallback(self)
print("start listening for hr events")
def stop(self):
if self.channel:
self.channel.close()
self.channel.unassign()
if self.antnode:
self.antnode.stop()
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.stop()
def _start_antnode(self):
stick = driver.USB2Driver(self.serial)
self.antnode = node.Node(stick)
self.antnode.start()
def _setup_channel(self):
key = node.NetworkKey('N:ANT+', self.netkey)
self.antnode.setNetworkKey(0, key)
self.channel = self.antnode.getFreeChannel()
self.channel.name = 'C:HRM'
self.channel.assign('N:ANT+', CHANNEL_TYPE_TWOWAY_RECEIVE)
self.channel.setID(120, 0, 0)
self.channel.setSearchTimeout(TIMEOUT_NEVER)
self.channel.setPeriod(8070)
self.channel.setFrequency(57)
self.channel.open()
def process(self, msg):
if isinstance(msg, message.ChannelBroadcastDataMessage):
print("heart rate is {}".format(ord(msg.payload[-1])))
# currentHR = format(ord(msg.payload[-1]))
SERIAL = '/dev/ttyUSB0'
NETKEY = 'B9A521FBBD72C345'.decode('hex')
with HRM(serial=SERIAL, netkey=NETKEY) as hrm:
hrm.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
sys.exit(0)
|
Test file that works 4 me. Adding some info on the doc in a second.
|
Test file that works 4 me. Adding some info on the doc in a second.
|
Python
|
mit
|
jtoumey/powerBasedCC
|
Test file that works 4 me. Adding some info on the doc in a second.
|
"""
Code based on:
https://github.com/mvillalba/python-ant/blob/develop/demos/ant.core/03-basicchannel.py
in the python-ant repository and
https://github.com/tomwardill/developerhealth
by Tom Wardill
"""
import sys
import time
from ant.core import driver, node, event, message, log
from ant.core.constants import CHANNEL_TYPE_TWOWAY_RECEIVE, TIMEOUT_NEVER
class HRM(event.EventCallback):
def __init__(self, serial, netkey):
self.serial = serial
self.netkey = netkey
self.antnode = None
self.channel = None
def start(self):
print("starting node")
self._start_antnode()
self._setup_channel()
self.channel.registerCallback(self)
print("start listening for hr events")
def stop(self):
if self.channel:
self.channel.close()
self.channel.unassign()
if self.antnode:
self.antnode.stop()
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.stop()
def _start_antnode(self):
stick = driver.USB2Driver(self.serial)
self.antnode = node.Node(stick)
self.antnode.start()
def _setup_channel(self):
key = node.NetworkKey('N:ANT+', self.netkey)
self.antnode.setNetworkKey(0, key)
self.channel = self.antnode.getFreeChannel()
self.channel.name = 'C:HRM'
self.channel.assign('N:ANT+', CHANNEL_TYPE_TWOWAY_RECEIVE)
self.channel.setID(120, 0, 0)
self.channel.setSearchTimeout(TIMEOUT_NEVER)
self.channel.setPeriod(8070)
self.channel.setFrequency(57)
self.channel.open()
def process(self, msg):
if isinstance(msg, message.ChannelBroadcastDataMessage):
print("heart rate is {}".format(ord(msg.payload[-1])))
# currentHR = format(ord(msg.payload[-1]))
SERIAL = '/dev/ttyUSB0'
NETKEY = 'B9A521FBBD72C345'.decode('hex')
with HRM(serial=SERIAL, netkey=NETKEY) as hrm:
hrm.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
sys.exit(0)
|
<commit_before><commit_msg>Test file that works 4 me. Adding some info on the doc in a second.<commit_after>
|
"""
Code based on:
https://github.com/mvillalba/python-ant/blob/develop/demos/ant.core/03-basicchannel.py
in the python-ant repository and
https://github.com/tomwardill/developerhealth
by Tom Wardill
"""
import sys
import time
from ant.core import driver, node, event, message, log
from ant.core.constants import CHANNEL_TYPE_TWOWAY_RECEIVE, TIMEOUT_NEVER
class HRM(event.EventCallback):
def __init__(self, serial, netkey):
self.serial = serial
self.netkey = netkey
self.antnode = None
self.channel = None
def start(self):
print("starting node")
self._start_antnode()
self._setup_channel()
self.channel.registerCallback(self)
print("start listening for hr events")
def stop(self):
if self.channel:
self.channel.close()
self.channel.unassign()
if self.antnode:
self.antnode.stop()
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.stop()
def _start_antnode(self):
stick = driver.USB2Driver(self.serial)
self.antnode = node.Node(stick)
self.antnode.start()
def _setup_channel(self):
key = node.NetworkKey('N:ANT+', self.netkey)
self.antnode.setNetworkKey(0, key)
self.channel = self.antnode.getFreeChannel()
self.channel.name = 'C:HRM'
self.channel.assign('N:ANT+', CHANNEL_TYPE_TWOWAY_RECEIVE)
self.channel.setID(120, 0, 0)
self.channel.setSearchTimeout(TIMEOUT_NEVER)
self.channel.setPeriod(8070)
self.channel.setFrequency(57)
self.channel.open()
def process(self, msg):
if isinstance(msg, message.ChannelBroadcastDataMessage):
print("heart rate is {}".format(ord(msg.payload[-1])))
# currentHR = format(ord(msg.payload[-1]))
SERIAL = '/dev/ttyUSB0'
NETKEY = 'B9A521FBBD72C345'.decode('hex')
with HRM(serial=SERIAL, netkey=NETKEY) as hrm:
hrm.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
sys.exit(0)
|
Test file that works 4 me. Adding some info on the doc in a second."""
Code based on:
https://github.com/mvillalba/python-ant/blob/develop/demos/ant.core/03-basicchannel.py
in the python-ant repository and
https://github.com/tomwardill/developerhealth
by Tom Wardill
"""
import sys
import time
from ant.core import driver, node, event, message, log
from ant.core.constants import CHANNEL_TYPE_TWOWAY_RECEIVE, TIMEOUT_NEVER
class HRM(event.EventCallback):
def __init__(self, serial, netkey):
self.serial = serial
self.netkey = netkey
self.antnode = None
self.channel = None
def start(self):
print("starting node")
self._start_antnode()
self._setup_channel()
self.channel.registerCallback(self)
print("start listening for hr events")
def stop(self):
if self.channel:
self.channel.close()
self.channel.unassign()
if self.antnode:
self.antnode.stop()
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.stop()
def _start_antnode(self):
stick = driver.USB2Driver(self.serial)
self.antnode = node.Node(stick)
self.antnode.start()
def _setup_channel(self):
key = node.NetworkKey('N:ANT+', self.netkey)
self.antnode.setNetworkKey(0, key)
self.channel = self.antnode.getFreeChannel()
self.channel.name = 'C:HRM'
self.channel.assign('N:ANT+', CHANNEL_TYPE_TWOWAY_RECEIVE)
self.channel.setID(120, 0, 0)
self.channel.setSearchTimeout(TIMEOUT_NEVER)
self.channel.setPeriod(8070)
self.channel.setFrequency(57)
self.channel.open()
def process(self, msg):
if isinstance(msg, message.ChannelBroadcastDataMessage):
print("heart rate is {}".format(ord(msg.payload[-1])))
# currentHR = format(ord(msg.payload[-1]))
SERIAL = '/dev/ttyUSB0'
NETKEY = 'B9A521FBBD72C345'.decode('hex')
with HRM(serial=SERIAL, netkey=NETKEY) as hrm:
hrm.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
sys.exit(0)
|
<commit_before><commit_msg>Test file that works 4 me. Adding some info on the doc in a second.<commit_after>"""
Code based on:
https://github.com/mvillalba/python-ant/blob/develop/demos/ant.core/03-basicchannel.py
in the python-ant repository and
https://github.com/tomwardill/developerhealth
by Tom Wardill
"""
import sys
import time
from ant.core import driver, node, event, message, log
from ant.core.constants import CHANNEL_TYPE_TWOWAY_RECEIVE, TIMEOUT_NEVER
class HRM(event.EventCallback):
def __init__(self, serial, netkey):
self.serial = serial
self.netkey = netkey
self.antnode = None
self.channel = None
def start(self):
print("starting node")
self._start_antnode()
self._setup_channel()
self.channel.registerCallback(self)
print("start listening for hr events")
def stop(self):
if self.channel:
self.channel.close()
self.channel.unassign()
if self.antnode:
self.antnode.stop()
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.stop()
def _start_antnode(self):
stick = driver.USB2Driver(self.serial)
self.antnode = node.Node(stick)
self.antnode.start()
def _setup_channel(self):
key = node.NetworkKey('N:ANT+', self.netkey)
self.antnode.setNetworkKey(0, key)
self.channel = self.antnode.getFreeChannel()
self.channel.name = 'C:HRM'
self.channel.assign('N:ANT+', CHANNEL_TYPE_TWOWAY_RECEIVE)
self.channel.setID(120, 0, 0)
self.channel.setSearchTimeout(TIMEOUT_NEVER)
self.channel.setPeriod(8070)
self.channel.setFrequency(57)
self.channel.open()
def process(self, msg):
if isinstance(msg, message.ChannelBroadcastDataMessage):
print("heart rate is {}".format(ord(msg.payload[-1])))
# currentHR = format(ord(msg.payload[-1]))
SERIAL = '/dev/ttyUSB0'
NETKEY = 'B9A521FBBD72C345'.decode('hex')
with HRM(serial=SERIAL, netkey=NETKEY) as hrm:
hrm.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
sys.exit(0)
|
|
328eef0c145c1efbaedd9453d515955012d1a975
|
backend/scripts/projsize.py
|
backend/scripts/projsize.py
|
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
def compute_project_size(project_id, conn):
total = 0
for f in r.table('project2datafile').get_all(project_id, index="project_id").eq_join('datafile_id', r.table(
'datafiles')).zip().run(conn):
total = total + f['size']
print "Total size %s" % sizeof_fmt(total)
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int", help="rethinkdb port", default=30815)
parser.add_option("-p", "--project-id", dest="project_id", type="string", help="project id")
(options, args) = parser.parse_args()
conn = r.connect('localhost', options.port, db="materialscommons")
compute_project_size(options.project_id, conn)
|
Add script for computing total project size.
|
Add script for computing total project size.
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
Add script for computing total project size.
|
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
def compute_project_size(project_id, conn):
total = 0
for f in r.table('project2datafile').get_all(project_id, index="project_id").eq_join('datafile_id', r.table(
'datafiles')).zip().run(conn):
total = total + f['size']
print "Total size %s" % sizeof_fmt(total)
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int", help="rethinkdb port", default=30815)
parser.add_option("-p", "--project-id", dest="project_id", type="string", help="project id")
(options, args) = parser.parse_args()
conn = r.connect('localhost', options.port, db="materialscommons")
compute_project_size(options.project_id, conn)
|
<commit_before><commit_msg>Add script for computing total project size.<commit_after>
|
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
def compute_project_size(project_id, conn):
total = 0
for f in r.table('project2datafile').get_all(project_id, index="project_id").eq_join('datafile_id', r.table(
'datafiles')).zip().run(conn):
total = total + f['size']
print "Total size %s" % sizeof_fmt(total)
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int", help="rethinkdb port", default=30815)
parser.add_option("-p", "--project-id", dest="project_id", type="string", help="project id")
(options, args) = parser.parse_args()
conn = r.connect('localhost', options.port, db="materialscommons")
compute_project_size(options.project_id, conn)
|
Add script for computing total project size.#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
def compute_project_size(project_id, conn):
total = 0
for f in r.table('project2datafile').get_all(project_id, index="project_id").eq_join('datafile_id', r.table(
'datafiles')).zip().run(conn):
total = total + f['size']
print "Total size %s" % sizeof_fmt(total)
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int", help="rethinkdb port", default=30815)
parser.add_option("-p", "--project-id", dest="project_id", type="string", help="project id")
(options, args) = parser.parse_args()
conn = r.connect('localhost', options.port, db="materialscommons")
compute_project_size(options.project_id, conn)
|
<commit_before><commit_msg>Add script for computing total project size.<commit_after>#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
def compute_project_size(project_id, conn):
total = 0
for f in r.table('project2datafile').get_all(project_id, index="project_id").eq_join('datafile_id', r.table(
'datafiles')).zip().run(conn):
total = total + f['size']
print "Total size %s" % sizeof_fmt(total)
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int", help="rethinkdb port", default=30815)
parser.add_option("-p", "--project-id", dest="project_id", type="string", help="project id")
(options, args) = parser.parse_args()
conn = r.connect('localhost', options.port, db="materialscommons")
compute_project_size(options.project_id, conn)
|
|
d3aa2c658ae3ba624b209a1d583fa97137ab2e23
|
chaco/tests/text_plot_1d_test_case.py
|
chaco/tests/text_plot_1d_test_case.py
|
import unittest
from numpy import alltrue, arange
from enable.compiled_path import CompiledPath
# Chaco imports
from chaco.api import (ArrayDataSource, DataRange1D, LinearMapper,
PlotGraphicsContext)
from chaco.text_plot_1d import TextPlot1D
class TextPlot1DTest(unittest.TestCase):
def setUp(self):
self.size = (250, 250)
data_source = ArrayDataSource(arange(10))
text_data = ArrayDataSource(['one', 'two', 'three', 'four', 'five',
'six', 'seven', 'eight', 'nine', 'ten'])
index_range = DataRange1D()
index_range.add(data_source)
index_mapper = LinearMapper(range=index_range)
self.scatterplot = TextPlot1D(
index=data_source,
index_mapper=index_mapper,
value=text_data,
border_visible=False,
)
self.scatterplot.outer_bounds = list(self.size)
def test_scatter_1d(self):
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_scatter_1d_rotated(self):
self.scatterplot.text_rotate_angle = 45
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
|
Add text plot 1d test case.
|
Add text plot 1d test case.
|
Python
|
bsd-3-clause
|
tommy-u/chaco,tommy-u/chaco,tommy-u/chaco
|
Add text plot 1d test case.
|
import unittest
from numpy import alltrue, arange
from enable.compiled_path import CompiledPath
# Chaco imports
from chaco.api import (ArrayDataSource, DataRange1D, LinearMapper,
PlotGraphicsContext)
from chaco.text_plot_1d import TextPlot1D
class TextPlot1DTest(unittest.TestCase):
def setUp(self):
self.size = (250, 250)
data_source = ArrayDataSource(arange(10))
text_data = ArrayDataSource(['one', 'two', 'three', 'four', 'five',
'six', 'seven', 'eight', 'nine', 'ten'])
index_range = DataRange1D()
index_range.add(data_source)
index_mapper = LinearMapper(range=index_range)
self.scatterplot = TextPlot1D(
index=data_source,
index_mapper=index_mapper,
value=text_data,
border_visible=False,
)
self.scatterplot.outer_bounds = list(self.size)
def test_scatter_1d(self):
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_scatter_1d_rotated(self):
self.scatterplot.text_rotate_angle = 45
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
|
<commit_before><commit_msg>Add text plot 1d test case.<commit_after>
|
import unittest
from numpy import alltrue, arange
from enable.compiled_path import CompiledPath
# Chaco imports
from chaco.api import (ArrayDataSource, DataRange1D, LinearMapper,
PlotGraphicsContext)
from chaco.text_plot_1d import TextPlot1D
class TextPlot1DTest(unittest.TestCase):
def setUp(self):
self.size = (250, 250)
data_source = ArrayDataSource(arange(10))
text_data = ArrayDataSource(['one', 'two', 'three', 'four', 'five',
'six', 'seven', 'eight', 'nine', 'ten'])
index_range = DataRange1D()
index_range.add(data_source)
index_mapper = LinearMapper(range=index_range)
self.scatterplot = TextPlot1D(
index=data_source,
index_mapper=index_mapper,
value=text_data,
border_visible=False,
)
self.scatterplot.outer_bounds = list(self.size)
def test_scatter_1d(self):
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_scatter_1d_rotated(self):
self.scatterplot.text_rotate_angle = 45
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
|
Add text plot 1d test case.import unittest
from numpy import alltrue, arange
from enable.compiled_path import CompiledPath
# Chaco imports
from chaco.api import (ArrayDataSource, DataRange1D, LinearMapper,
PlotGraphicsContext)
from chaco.text_plot_1d import TextPlot1D
class TextPlot1DTest(unittest.TestCase):
def setUp(self):
self.size = (250, 250)
data_source = ArrayDataSource(arange(10))
text_data = ArrayDataSource(['one', 'two', 'three', 'four', 'five',
'six', 'seven', 'eight', 'nine', 'ten'])
index_range = DataRange1D()
index_range.add(data_source)
index_mapper = LinearMapper(range=index_range)
self.scatterplot = TextPlot1D(
index=data_source,
index_mapper=index_mapper,
value=text_data,
border_visible=False,
)
self.scatterplot.outer_bounds = list(self.size)
def test_scatter_1d(self):
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_scatter_1d_rotated(self):
self.scatterplot.text_rotate_angle = 45
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
|
<commit_before><commit_msg>Add text plot 1d test case.<commit_after>import unittest
from numpy import alltrue, arange
from enable.compiled_path import CompiledPath
# Chaco imports
from chaco.api import (ArrayDataSource, DataRange1D, LinearMapper,
PlotGraphicsContext)
from chaco.text_plot_1d import TextPlot1D
class TextPlot1DTest(unittest.TestCase):
def setUp(self):
self.size = (250, 250)
data_source = ArrayDataSource(arange(10))
text_data = ArrayDataSource(['one', 'two', 'three', 'four', 'five',
'six', 'seven', 'eight', 'nine', 'ten'])
index_range = DataRange1D()
index_range.add(data_source)
index_mapper = LinearMapper(range=index_range)
self.scatterplot = TextPlot1D(
index=data_source,
index_mapper=index_mapper,
value=text_data,
border_visible=False,
)
self.scatterplot.outer_bounds = list(self.size)
def test_scatter_1d(self):
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_scatter_1d_rotated(self):
self.scatterplot.text_rotate_angle = 45
gc = PlotGraphicsContext(self.size)
gc.render_component(self.scatterplot)
actual = gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
|
|
2a579b7df30546e642d87b70417ecf8a1a9590e0
|
axelrod/random_.py
|
axelrod/random_.py
|
import random
def random_choice(p=0.5):
"""
Return 'C' wit probability `p`, else return 'D'
Emulates Python's random.choice(['C', 'D']) since it is not consistent
across Python 2.7 to Python 3.4"""
r = random.random()
if r < p:
return 'C'
return 'D'
|
Add missing file to git
|
Add missing file to git
|
Python
|
mit
|
ranjinidas/Axelrod,marcharper/Axelrod,marcharper/Axelrod,ranjinidas/Axelrod
|
Add missing file to git
|
import random
def random_choice(p=0.5):
"""
Return 'C' wit probability `p`, else return 'D'
Emulates Python's random.choice(['C', 'D']) since it is not consistent
across Python 2.7 to Python 3.4"""
r = random.random()
if r < p:
return 'C'
return 'D'
|
<commit_before><commit_msg>Add missing file to git<commit_after>
|
import random
def random_choice(p=0.5):
"""
Return 'C' wit probability `p`, else return 'D'
Emulates Python's random.choice(['C', 'D']) since it is not consistent
across Python 2.7 to Python 3.4"""
r = random.random()
if r < p:
return 'C'
return 'D'
|
Add missing file to gitimport random
def random_choice(p=0.5):
"""
Return 'C' wit probability `p`, else return 'D'
Emulates Python's random.choice(['C', 'D']) since it is not consistent
across Python 2.7 to Python 3.4"""
r = random.random()
if r < p:
return 'C'
return 'D'
|
<commit_before><commit_msg>Add missing file to git<commit_after>import random
def random_choice(p=0.5):
"""
Return 'C' wit probability `p`, else return 'D'
Emulates Python's random.choice(['C', 'D']) since it is not consistent
across Python 2.7 to Python 3.4"""
r = random.random()
if r < p:
return 'C'
return 'D'
|
|
ef3dc09af13bcb98667797a649cc9a2ff8af34ae
|
registrations/migrations/0005_subscriptionrequest_metadata.py
|
registrations/migrations/0005_subscriptionrequest_metadata.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-03-30 13:57
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('registrations', '0004_auto_20160323_1258'),
]
operations = [
migrations.AddField(
model_name='subscriptionrequest',
name='metadata',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True),
),
]
|
Add metadata to subscription request for welcome message setting
|
Add metadata to subscription request for welcome message setting
|
Python
|
bsd-3-clause
|
praekelt/hellomama-registration,praekelt/hellomama-registration
|
Add metadata to subscription request for welcome message setting
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-03-30 13:57
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('registrations', '0004_auto_20160323_1258'),
]
operations = [
migrations.AddField(
model_name='subscriptionrequest',
name='metadata',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True),
),
]
|
<commit_before><commit_msg>Add metadata to subscription request for welcome message setting<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-03-30 13:57
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('registrations', '0004_auto_20160323_1258'),
]
operations = [
migrations.AddField(
model_name='subscriptionrequest',
name='metadata',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True),
),
]
|
Add metadata to subscription request for welcome message setting# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-03-30 13:57
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('registrations', '0004_auto_20160323_1258'),
]
operations = [
migrations.AddField(
model_name='subscriptionrequest',
name='metadata',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True),
),
]
|
<commit_before><commit_msg>Add metadata to subscription request for welcome message setting<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-03-30 13:57
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('registrations', '0004_auto_20160323_1258'),
]
operations = [
migrations.AddField(
model_name='subscriptionrequest',
name='metadata',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True),
),
]
|
|
c7136b117696b10664a7b6427f5293813bd1d3b0
|
Time.py
|
Time.py
|
def _DoRough(time, big, bigname, little, littlename):
b = int(time / big / little)
l = int(((time + little / 2) / little) % big)
# print "b =", b, "l =", l, "time =", time, "/", b * big * little + l * little
t = str(b) + " " + bigname
if b > 1:
t += "s"
if l != 0:
t += " " + str(l) + " " + littlename
if l > 1:
t += "s"
return t
# Return a string roughly describing the time difference handed in.
def RoughAge(time):
if time < 60*60:
return _DoRough(time, 60, "minute", 1, "second")
if time < 24*60*60:
return _DoRough(time, 60, "hour", 60, "minute")
if time < 7*24*60*60:
return _DoRough(time, 24, "day", 60*60, "hour")
if time < 365*24*60*60:
return _DoRough(time, 7, "week", 24*60*60, "day")
# yes, this measure of a year is fairly crap :-)
return _DoRough(time, 52, "year", 7*24*60*60, "week")
return "I dunno"
if __name__ == '__main__':
import random
r = random.SystemRandom()
for x in range(100):
y = r.randrange(100000000)
print y, RoughAge(y)
|
Add rough time so when Felix adds time since last play, he can use this to show it.
|
Add rough time so when Felix adds time since last play, he can use
this to show it.
|
Python
|
bsd-3-clause
|
erbridge/NQr,erbridge/NQr,erbridge/NQr
|
Add rough time so when Felix adds time since last play, he can use
this to show it.
|
def _DoRough(time, big, bigname, little, littlename):
b = int(time / big / little)
l = int(((time + little / 2) / little) % big)
# print "b =", b, "l =", l, "time =", time, "/", b * big * little + l * little
t = str(b) + " " + bigname
if b > 1:
t += "s"
if l != 0:
t += " " + str(l) + " " + littlename
if l > 1:
t += "s"
return t
# Return a string roughly describing the time difference handed in.
def RoughAge(time):
if time < 60*60:
return _DoRough(time, 60, "minute", 1, "second")
if time < 24*60*60:
return _DoRough(time, 60, "hour", 60, "minute")
if time < 7*24*60*60:
return _DoRough(time, 24, "day", 60*60, "hour")
if time < 365*24*60*60:
return _DoRough(time, 7, "week", 24*60*60, "day")
# yes, this measure of a year is fairly crap :-)
return _DoRough(time, 52, "year", 7*24*60*60, "week")
return "I dunno"
if __name__ == '__main__':
import random
r = random.SystemRandom()
for x in range(100):
y = r.randrange(100000000)
print y, RoughAge(y)
|
<commit_before><commit_msg>Add rough time so when Felix adds time since last play, he can use
this to show it.<commit_after>
|
def _DoRough(time, big, bigname, little, littlename):
b = int(time / big / little)
l = int(((time + little / 2) / little) % big)
# print "b =", b, "l =", l, "time =", time, "/", b * big * little + l * little
t = str(b) + " " + bigname
if b > 1:
t += "s"
if l != 0:
t += " " + str(l) + " " + littlename
if l > 1:
t += "s"
return t
# Return a string roughly describing the time difference handed in.
def RoughAge(time):
if time < 60*60:
return _DoRough(time, 60, "minute", 1, "second")
if time < 24*60*60:
return _DoRough(time, 60, "hour", 60, "minute")
if time < 7*24*60*60:
return _DoRough(time, 24, "day", 60*60, "hour")
if time < 365*24*60*60:
return _DoRough(time, 7, "week", 24*60*60, "day")
# yes, this measure of a year is fairly crap :-)
return _DoRough(time, 52, "year", 7*24*60*60, "week")
return "I dunno"
if __name__ == '__main__':
import random
r = random.SystemRandom()
for x in range(100):
y = r.randrange(100000000)
print y, RoughAge(y)
|
Add rough time so when Felix adds time since last play, he can use
this to show it.def _DoRough(time, big, bigname, little, littlename):
b = int(time / big / little)
l = int(((time + little / 2) / little) % big)
# print "b =", b, "l =", l, "time =", time, "/", b * big * little + l * little
t = str(b) + " " + bigname
if b > 1:
t += "s"
if l != 0:
t += " " + str(l) + " " + littlename
if l > 1:
t += "s"
return t
# Return a string roughly describing the time difference handed in.
def RoughAge(time):
if time < 60*60:
return _DoRough(time, 60, "minute", 1, "second")
if time < 24*60*60:
return _DoRough(time, 60, "hour", 60, "minute")
if time < 7*24*60*60:
return _DoRough(time, 24, "day", 60*60, "hour")
if time < 365*24*60*60:
return _DoRough(time, 7, "week", 24*60*60, "day")
# yes, this measure of a year is fairly crap :-)
return _DoRough(time, 52, "year", 7*24*60*60, "week")
return "I dunno"
if __name__ == '__main__':
import random
r = random.SystemRandom()
for x in range(100):
y = r.randrange(100000000)
print y, RoughAge(y)
|
<commit_before><commit_msg>Add rough time so when Felix adds time since last play, he can use
this to show it.<commit_after>def _DoRough(time, big, bigname, little, littlename):
b = int(time / big / little)
l = int(((time + little / 2) / little) % big)
# print "b =", b, "l =", l, "time =", time, "/", b * big * little + l * little
t = str(b) + " " + bigname
if b > 1:
t += "s"
if l != 0:
t += " " + str(l) + " " + littlename
if l > 1:
t += "s"
return t
# Return a string roughly describing the time difference handed in.
def RoughAge(time):
if time < 60*60:
return _DoRough(time, 60, "minute", 1, "second")
if time < 24*60*60:
return _DoRough(time, 60, "hour", 60, "minute")
if time < 7*24*60*60:
return _DoRough(time, 24, "day", 60*60, "hour")
if time < 365*24*60*60:
return _DoRough(time, 7, "week", 24*60*60, "day")
# yes, this measure of a year is fairly crap :-)
return _DoRough(time, 52, "year", 7*24*60*60, "week")
return "I dunno"
if __name__ == '__main__':
import random
r = random.SystemRandom()
for x in range(100):
y = r.randrange(100000000)
print y, RoughAge(y)
|
|
0df2f253dd2f5059b76cfa5527b9705375c9c617
|
utils.py
|
utils.py
|
import numpy as np
def param_correction(start, params, order):
pos = np.round(start.copy())
oldpos = np.round(start.copy())
pos_not_rounded = np.round(start.copy())
for i in range(len(params)):
param = np.reshape(params[i], [order, len(start)], order='F')
pos += np.sum(np.round(param), axis=0)
pos_not_rounded += np.sum(param, axis=0)
diff = pos - pos_not_rounded
param[0] = np.round(param[0]) - np.round(diff)
pos = oldpos.copy() + np.sum(param, axis=0)
oldpos = pos.copy()
params[i] = np.reshape(np.round(param.T), [1, len(start)*order], order='C')
return params
|
Create function for parameter correction.
|
Create function for parameter correction.
|
Python
|
mit
|
petroolg/robo-spline
|
Create function for parameter correction.
|
import numpy as np
def param_correction(start, params, order):
pos = np.round(start.copy())
oldpos = np.round(start.copy())
pos_not_rounded = np.round(start.copy())
for i in range(len(params)):
param = np.reshape(params[i], [order, len(start)], order='F')
pos += np.sum(np.round(param), axis=0)
pos_not_rounded += np.sum(param, axis=0)
diff = pos - pos_not_rounded
param[0] = np.round(param[0]) - np.round(diff)
pos = oldpos.copy() + np.sum(param, axis=0)
oldpos = pos.copy()
params[i] = np.reshape(np.round(param.T), [1, len(start)*order], order='C')
return params
|
<commit_before><commit_msg>Create function for parameter correction.<commit_after>
|
import numpy as np
def param_correction(start, params, order):
pos = np.round(start.copy())
oldpos = np.round(start.copy())
pos_not_rounded = np.round(start.copy())
for i in range(len(params)):
param = np.reshape(params[i], [order, len(start)], order='F')
pos += np.sum(np.round(param), axis=0)
pos_not_rounded += np.sum(param, axis=0)
diff = pos - pos_not_rounded
param[0] = np.round(param[0]) - np.round(diff)
pos = oldpos.copy() + np.sum(param, axis=0)
oldpos = pos.copy()
params[i] = np.reshape(np.round(param.T), [1, len(start)*order], order='C')
return params
|
Create function for parameter correction.import numpy as np
def param_correction(start, params, order):
pos = np.round(start.copy())
oldpos = np.round(start.copy())
pos_not_rounded = np.round(start.copy())
for i in range(len(params)):
param = np.reshape(params[i], [order, len(start)], order='F')
pos += np.sum(np.round(param), axis=0)
pos_not_rounded += np.sum(param, axis=0)
diff = pos - pos_not_rounded
param[0] = np.round(param[0]) - np.round(diff)
pos = oldpos.copy() + np.sum(param, axis=0)
oldpos = pos.copy()
params[i] = np.reshape(np.round(param.T), [1, len(start)*order], order='C')
return params
|
<commit_before><commit_msg>Create function for parameter correction.<commit_after>import numpy as np
def param_correction(start, params, order):
pos = np.round(start.copy())
oldpos = np.round(start.copy())
pos_not_rounded = np.round(start.copy())
for i in range(len(params)):
param = np.reshape(params[i], [order, len(start)], order='F')
pos += np.sum(np.round(param), axis=0)
pos_not_rounded += np.sum(param, axis=0)
diff = pos - pos_not_rounded
param[0] = np.round(param[0]) - np.round(diff)
pos = oldpos.copy() + np.sum(param, axis=0)
oldpos = pos.copy()
params[i] = np.reshape(np.round(param.T), [1, len(start)*order], order='C')
return params
|
|
0c1a8445262920bda55f220bb82ab845f50d6585
|
tests/providers/__init__.py
|
tests/providers/__init__.py
|
# Allow out-of-tree auth submodules.
from pkgutil import extend_path
from inbox.util.misc import register_backends
__path__ = extend_path(__path__, __name__)
module_registry = register_backends(__name__, __path__)
|
Add stub for test auth providers
|
Add stub for test auth providers
|
Python
|
agpl-3.0
|
Eagles2F/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,closeio/nylas,EthanBlackburn/sync-engine,closeio/nylas,Eagles2F/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,gale320/sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,closeio/nylas,wakermahmud/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,nylas/sync-engine,gale320/sync-engine,PriviPK/privipk-sync-engine,gale320/sync-engine,nylas/sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,closeio/nylas,gale320/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,ErinCall/sync-engine,jobscore/sync-engine
|
Add stub for test auth providers
|
# Allow out-of-tree auth submodules.
from pkgutil import extend_path
from inbox.util.misc import register_backends
__path__ = extend_path(__path__, __name__)
module_registry = register_backends(__name__, __path__)
|
<commit_before><commit_msg>Add stub for test auth providers<commit_after>
|
# Allow out-of-tree auth submodules.
from pkgutil import extend_path
from inbox.util.misc import register_backends
__path__ = extend_path(__path__, __name__)
module_registry = register_backends(__name__, __path__)
|
Add stub for test auth providers# Allow out-of-tree auth submodules.
from pkgutil import extend_path
from inbox.util.misc import register_backends
__path__ = extend_path(__path__, __name__)
module_registry = register_backends(__name__, __path__)
|
<commit_before><commit_msg>Add stub for test auth providers<commit_after># Allow out-of-tree auth submodules.
from pkgutil import extend_path
from inbox.util.misc import register_backends
__path__ = extend_path(__path__, __name__)
module_registry = register_backends(__name__, __path__)
|
|
125e2e67d71a70e29d36c6538d805bbb50008e46
|
gravity_waves/mixedoperators.py
|
gravity_waves/mixedoperators.py
|
from __future__ import absolute_import, print_function, division
from firedrake import *
from function_spaces import generate_function_spaces
from vertical_normal import VerticalNormal
class MixedOperator(object):
"""A class describing the operator of the velocity-pressure sub-system
of the mixed (Velocity-Pressure-Buoyancy) gravity wave system.
This is obtained after eliminating the buoyancy unknown to arrive
at a Helmholtz-like saddle point system.
"""
def __init__(self, mesh, dt, c, N):
"""Constructor for the MixedSubSystem class.
:arg mesh: An Earth-like extruded mesh.
:arg dt: A positive real number denoting the time step size.
:arg c: A positive real number denoting the speed of sound.
:arg N: A positive real number denoting the buoyancy frequency.
"""
super(MixedSubSystem, self).__init__()
W2, W3, Wb = generate_function_spaces(mesh, degree=1)
self._mesh = mesh
self._hdiv_space = W2
self._L2_space = W3
self._Wb = Wb
self._dt = dt
self._c = c
self._N = N
# Constants from eliminating buoyancy
self._omega_N2 = Constant((0.5*dt*N) ** 2)
self._dt_half = Constant(0.5*dt)
self._dt_half_N2 = Constant(0.5*dt*N**2)
self._dt_half_c2 = Constant(0.5*dt*c**2)
self._W2W3 = W2 * W3
u, p = TrialFunctions(self._W2W3)
v, q = TestFunctions(self._W2W3)
self._khat = VerticalNormal(self._mesh)
# Modified velocity mass matrix
Mutilde = (dot(v, u) + self._omega_N2 * dot(v, self._khat.khat) \
dot(self._khat.khat, u)) * dx
# Off-diagonal blocks
Qt = (-self._dt_half * div(v) * p) * dx
Q = self._dt_half_c2 * q * div(u) * dx
# Pressure mass matrix
Mp = p * q * dx
self._bilinear_form = Mutilde + Qt + Q + Mp
# Boundary conditions
self._bcs = [DirichletBC(self._hdiv_space, 0.0, "bottom"),
DirichletBC(self._hdiv_space, 0.0, "top")]
|
Add draft of the mixed operator for the gravity wave problem
|
Add draft of the mixed operator for the gravity wave problem
|
Python
|
mit
|
thomasgibson/firedrake-hybridization
|
Add draft of the mixed operator for the gravity wave problem
|
from __future__ import absolute_import, print_function, division
from firedrake import *
from function_spaces import generate_function_spaces
from vertical_normal import VerticalNormal
class MixedOperator(object):
"""A class describing the operator of the velocity-pressure sub-system
of the mixed (Velocity-Pressure-Buoyancy) gravity wave system.
This is obtained after eliminating the buoyancy unknown to arrive
at a Helmholtz-like saddle point system.
"""
def __init__(self, mesh, dt, c, N):
"""Constructor for the MixedSubSystem class.
:arg mesh: An Earth-like extruded mesh.
:arg dt: A positive real number denoting the time step size.
:arg c: A positive real number denoting the speed of sound.
:arg N: A positive real number denoting the buoyancy frequency.
"""
super(MixedSubSystem, self).__init__()
W2, W3, Wb = generate_function_spaces(mesh, degree=1)
self._mesh = mesh
self._hdiv_space = W2
self._L2_space = W3
self._Wb = Wb
self._dt = dt
self._c = c
self._N = N
# Constants from eliminating buoyancy
self._omega_N2 = Constant((0.5*dt*N) ** 2)
self._dt_half = Constant(0.5*dt)
self._dt_half_N2 = Constant(0.5*dt*N**2)
self._dt_half_c2 = Constant(0.5*dt*c**2)
self._W2W3 = W2 * W3
u, p = TrialFunctions(self._W2W3)
v, q = TestFunctions(self._W2W3)
self._khat = VerticalNormal(self._mesh)
# Modified velocity mass matrix
Mutilde = (dot(v, u) + self._omega_N2 * dot(v, self._khat.khat) \
dot(self._khat.khat, u)) * dx
# Off-diagonal blocks
Qt = (-self._dt_half * div(v) * p) * dx
Q = self._dt_half_c2 * q * div(u) * dx
# Pressure mass matrix
Mp = p * q * dx
self._bilinear_form = Mutilde + Qt + Q + Mp
# Boundary conditions
self._bcs = [DirichletBC(self._hdiv_space, 0.0, "bottom"),
DirichletBC(self._hdiv_space, 0.0, "top")]
|
<commit_before><commit_msg>Add draft of the mixed operator for the gravity wave problem<commit_after>
|
from __future__ import absolute_import, print_function, division
from firedrake import *
from function_spaces import generate_function_spaces
from vertical_normal import VerticalNormal
class MixedOperator(object):
"""A class describing the operator of the velocity-pressure sub-system
of the mixed (Velocity-Pressure-Buoyancy) gravity wave system.
This is obtained after eliminating the buoyancy unknown to arrive
at a Helmholtz-like saddle point system.
"""
def __init__(self, mesh, dt, c, N):
"""Constructor for the MixedSubSystem class.
:arg mesh: An Earth-like extruded mesh.
:arg dt: A positive real number denoting the time step size.
:arg c: A positive real number denoting the speed of sound.
:arg N: A positive real number denoting the buoyancy frequency.
"""
super(MixedSubSystem, self).__init__()
W2, W3, Wb = generate_function_spaces(mesh, degree=1)
self._mesh = mesh
self._hdiv_space = W2
self._L2_space = W3
self._Wb = Wb
self._dt = dt
self._c = c
self._N = N
# Constants from eliminating buoyancy
self._omega_N2 = Constant((0.5*dt*N) ** 2)
self._dt_half = Constant(0.5*dt)
self._dt_half_N2 = Constant(0.5*dt*N**2)
self._dt_half_c2 = Constant(0.5*dt*c**2)
self._W2W3 = W2 * W3
u, p = TrialFunctions(self._W2W3)
v, q = TestFunctions(self._W2W3)
self._khat = VerticalNormal(self._mesh)
# Modified velocity mass matrix
Mutilde = (dot(v, u) + self._omega_N2 * dot(v, self._khat.khat) \
dot(self._khat.khat, u)) * dx
# Off-diagonal blocks
Qt = (-self._dt_half * div(v) * p) * dx
Q = self._dt_half_c2 * q * div(u) * dx
# Pressure mass matrix
Mp = p * q * dx
self._bilinear_form = Mutilde + Qt + Q + Mp
# Boundary conditions
self._bcs = [DirichletBC(self._hdiv_space, 0.0, "bottom"),
DirichletBC(self._hdiv_space, 0.0, "top")]
|
Add draft of the mixed operator for the gravity wave problemfrom __future__ import absolute_import, print_function, division
from firedrake import *
from function_spaces import generate_function_spaces
from vertical_normal import VerticalNormal
class MixedOperator(object):
"""A class describing the operator of the velocity-pressure sub-system
of the mixed (Velocity-Pressure-Buoyancy) gravity wave system.
This is obtained after eliminating the buoyancy unknown to arrive
at a Helmholtz-like saddle point system.
"""
def __init__(self, mesh, dt, c, N):
"""Constructor for the MixedSubSystem class.
:arg mesh: An Earth-like extruded mesh.
:arg dt: A positive real number denoting the time step size.
:arg c: A positive real number denoting the speed of sound.
:arg N: A positive real number denoting the buoyancy frequency.
"""
super(MixedSubSystem, self).__init__()
W2, W3, Wb = generate_function_spaces(mesh, degree=1)
self._mesh = mesh
self._hdiv_space = W2
self._L2_space = W3
self._Wb = Wb
self._dt = dt
self._c = c
self._N = N
# Constants from eliminating buoyancy
self._omega_N2 = Constant((0.5*dt*N) ** 2)
self._dt_half = Constant(0.5*dt)
self._dt_half_N2 = Constant(0.5*dt*N**2)
self._dt_half_c2 = Constant(0.5*dt*c**2)
self._W2W3 = W2 * W3
u, p = TrialFunctions(self._W2W3)
v, q = TestFunctions(self._W2W3)
self._khat = VerticalNormal(self._mesh)
# Modified velocity mass matrix
Mutilde = (dot(v, u) + self._omega_N2 * dot(v, self._khat.khat) \
dot(self._khat.khat, u)) * dx
# Off-diagonal blocks
Qt = (-self._dt_half * div(v) * p) * dx
Q = self._dt_half_c2 * q * div(u) * dx
# Pressure mass matrix
Mp = p * q * dx
self._bilinear_form = Mutilde + Qt + Q + Mp
# Boundary conditions
self._bcs = [DirichletBC(self._hdiv_space, 0.0, "bottom"),
DirichletBC(self._hdiv_space, 0.0, "top")]
|
<commit_before><commit_msg>Add draft of the mixed operator for the gravity wave problem<commit_after>from __future__ import absolute_import, print_function, division
from firedrake import *
from function_spaces import generate_function_spaces
from vertical_normal import VerticalNormal
class MixedOperator(object):
"""A class describing the operator of the velocity-pressure sub-system
of the mixed (Velocity-Pressure-Buoyancy) gravity wave system.
This is obtained after eliminating the buoyancy unknown to arrive
at a Helmholtz-like saddle point system.
"""
def __init__(self, mesh, dt, c, N):
"""Constructor for the MixedSubSystem class.
:arg mesh: An Earth-like extruded mesh.
:arg dt: A positive real number denoting the time step size.
:arg c: A positive real number denoting the speed of sound.
:arg N: A positive real number denoting the buoyancy frequency.
"""
super(MixedSubSystem, self).__init__()
W2, W3, Wb = generate_function_spaces(mesh, degree=1)
self._mesh = mesh
self._hdiv_space = W2
self._L2_space = W3
self._Wb = Wb
self._dt = dt
self._c = c
self._N = N
# Constants from eliminating buoyancy
self._omega_N2 = Constant((0.5*dt*N) ** 2)
self._dt_half = Constant(0.5*dt)
self._dt_half_N2 = Constant(0.5*dt*N**2)
self._dt_half_c2 = Constant(0.5*dt*c**2)
self._W2W3 = W2 * W3
u, p = TrialFunctions(self._W2W3)
v, q = TestFunctions(self._W2W3)
self._khat = VerticalNormal(self._mesh)
# Modified velocity mass matrix
Mutilde = (dot(v, u) + self._omega_N2 * dot(v, self._khat.khat) \
dot(self._khat.khat, u)) * dx
# Off-diagonal blocks
Qt = (-self._dt_half * div(v) * p) * dx
Q = self._dt_half_c2 * q * div(u) * dx
# Pressure mass matrix
Mp = p * q * dx
self._bilinear_form = Mutilde + Qt + Q + Mp
# Boundary conditions
self._bcs = [DirichletBC(self._hdiv_space, 0.0, "bottom"),
DirichletBC(self._hdiv_space, 0.0, "top")]
|
|
d3da3bdd178de4cbda5b42db11bf17cab73056e5
|
python/opencv/opencv_2/read_image.py
|
python/opencv/opencv_2/read_image.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Read image: read an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to read)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img_np_array = cv.imread(infile_str, imread_flags)
print(type(img_np_array))
print(img_np_array)
if __name__ == '__main__':
main()
|
Add a snippet (Python OpenCV).
|
Add a snippet (Python OpenCV).
|
Python
|
mit
|
jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets
|
Add a snippet (Python OpenCV).
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Read image: read an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to read)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img_np_array = cv.imread(infile_str, imread_flags)
print(type(img_np_array))
print(img_np_array)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a snippet (Python OpenCV).<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Read image: read an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to read)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img_np_array = cv.imread(infile_str, imread_flags)
print(type(img_np_array))
print(img_np_array)
if __name__ == '__main__':
main()
|
Add a snippet (Python OpenCV).#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Read image: read an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to read)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img_np_array = cv.imread(infile_str, imread_flags)
print(type(img_np_array))
print(img_np_array)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a snippet (Python OpenCV).<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Read image: read an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to read)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img_np_array = cv.imread(infile_str, imread_flags)
print(type(img_np_array))
print(img_np_array)
if __name__ == '__main__':
main()
|
|
fcbed0346d9f5abd1c4e2f6b98f28818a73d9ec5
|
users/migrations/0010_add_fields_to_users_applications.py
|
users/migrations/0010_add_fields_to_users_applications.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-08 10:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('users', '0009_add_ad_groups'),
]
operations = [
migrations.AddField(
model_name='application',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='application',
name='updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='application',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
]
|
Add missing migration to users
|
Add missing migration to users
|
Python
|
mit
|
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
|
Add missing migration to users
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-08 10:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('users', '0009_add_ad_groups'),
]
operations = [
migrations.AddField(
model_name='application',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='application',
name='updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='application',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
]
|
<commit_before><commit_msg>Add missing migration to users<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-08 10:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('users', '0009_add_ad_groups'),
]
operations = [
migrations.AddField(
model_name='application',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='application',
name='updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='application',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
]
|
Add missing migration to users# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-08 10:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('users', '0009_add_ad_groups'),
]
operations = [
migrations.AddField(
model_name='application',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='application',
name='updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='application',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
]
|
<commit_before><commit_msg>Add missing migration to users<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-08 10:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('users', '0009_add_ad_groups'),
]
operations = [
migrations.AddField(
model_name='application',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='application',
name='updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='application',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
]
|
|
23be51d959763235df8ab44b7dc90047c33002c3
|
tests/test_game_parser.py
|
tests/test_game_parser.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import requests
from lxml import html
from parsers.game_parser import GameParser
def test_2013_centre_bell():
url = "http://www.nhl.com/scores/htmlreports/20132014/ES020001.HTM"
game_id = str(os.path.splitext(os.path.basename(url))[0][2:])
gp = GameParser(game_id, get_data(url))
gp.load_data()
attendance, venue = gp.retrieve_game_attendance_venue()
assert attendance == 21273
assert venue == "Centre Bell"
def get_data(url):
r = requests.get(url)
return html.fromstring(r.text)
|
Add initial version of game parser test script
|
Add initial version of game parser test script
|
Python
|
mit
|
leaffan/pynhldb
|
Add initial version of game parser test script
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import requests
from lxml import html
from parsers.game_parser import GameParser
def test_2013_centre_bell():
url = "http://www.nhl.com/scores/htmlreports/20132014/ES020001.HTM"
game_id = str(os.path.splitext(os.path.basename(url))[0][2:])
gp = GameParser(game_id, get_data(url))
gp.load_data()
attendance, venue = gp.retrieve_game_attendance_venue()
assert attendance == 21273
assert venue == "Centre Bell"
def get_data(url):
r = requests.get(url)
return html.fromstring(r.text)
|
<commit_before><commit_msg>Add initial version of game parser test script<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import requests
from lxml import html
from parsers.game_parser import GameParser
def test_2013_centre_bell():
url = "http://www.nhl.com/scores/htmlreports/20132014/ES020001.HTM"
game_id = str(os.path.splitext(os.path.basename(url))[0][2:])
gp = GameParser(game_id, get_data(url))
gp.load_data()
attendance, venue = gp.retrieve_game_attendance_venue()
assert attendance == 21273
assert venue == "Centre Bell"
def get_data(url):
r = requests.get(url)
return html.fromstring(r.text)
|
Add initial version of game parser test script#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import requests
from lxml import html
from parsers.game_parser import GameParser
def test_2013_centre_bell():
url = "http://www.nhl.com/scores/htmlreports/20132014/ES020001.HTM"
game_id = str(os.path.splitext(os.path.basename(url))[0][2:])
gp = GameParser(game_id, get_data(url))
gp.load_data()
attendance, venue = gp.retrieve_game_attendance_venue()
assert attendance == 21273
assert venue == "Centre Bell"
def get_data(url):
r = requests.get(url)
return html.fromstring(r.text)
|
<commit_before><commit_msg>Add initial version of game parser test script<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import requests
from lxml import html
from parsers.game_parser import GameParser
def test_2013_centre_bell():
url = "http://www.nhl.com/scores/htmlreports/20132014/ES020001.HTM"
game_id = str(os.path.splitext(os.path.basename(url))[0][2:])
gp = GameParser(game_id, get_data(url))
gp.load_data()
attendance, venue = gp.retrieve_game_attendance_venue()
assert attendance == 21273
assert venue == "Centre Bell"
def get_data(url):
r = requests.get(url)
return html.fromstring(r.text)
|
|
0267f8a61e067c7b1a92cb307de36c477f79b455
|
snippets/feature_generation.py
|
snippets/feature_generation.py
|
from sklearn.preprocessing import PolynomialFeatures
from sklearn.decomposition import PCA
import numpy as np
X = np.arange(9).reshape(3, 3)
print X
poly = PolynomialFeatures(2)
print poly.fit_transform(X)
poly = PolynomialFeatures(interaction_only=True)
print poly.fit_transform(X)
pca = PCA(n_components=1)
print pca.fit_transform(X)
|
Add snippet with code to generate new features
|
Add snippet with code to generate new features
|
Python
|
mit
|
davidgasquez/kaggle-airbnb
|
Add snippet with code to generate new features
|
from sklearn.preprocessing import PolynomialFeatures
from sklearn.decomposition import PCA
import numpy as np
X = np.arange(9).reshape(3, 3)
print X
poly = PolynomialFeatures(2)
print poly.fit_transform(X)
poly = PolynomialFeatures(interaction_only=True)
print poly.fit_transform(X)
pca = PCA(n_components=1)
print pca.fit_transform(X)
|
<commit_before><commit_msg>Add snippet with code to generate new features<commit_after>
|
from sklearn.preprocessing import PolynomialFeatures
from sklearn.decomposition import PCA
import numpy as np
X = np.arange(9).reshape(3, 3)
print X
poly = PolynomialFeatures(2)
print poly.fit_transform(X)
poly = PolynomialFeatures(interaction_only=True)
print poly.fit_transform(X)
pca = PCA(n_components=1)
print pca.fit_transform(X)
|
Add snippet with code to generate new featuresfrom sklearn.preprocessing import PolynomialFeatures
from sklearn.decomposition import PCA
import numpy as np
X = np.arange(9).reshape(3, 3)
print X
poly = PolynomialFeatures(2)
print poly.fit_transform(X)
poly = PolynomialFeatures(interaction_only=True)
print poly.fit_transform(X)
pca = PCA(n_components=1)
print pca.fit_transform(X)
|
<commit_before><commit_msg>Add snippet with code to generate new features<commit_after>from sklearn.preprocessing import PolynomialFeatures
from sklearn.decomposition import PCA
import numpy as np
X = np.arange(9).reshape(3, 3)
print X
poly = PolynomialFeatures(2)
print poly.fit_transform(X)
poly = PolynomialFeatures(interaction_only=True)
print poly.fit_transform(X)
pca = PCA(n_components=1)
print pca.fit_transform(X)
|
|
e308575d9723c90d3a15e5e8de45b0232c5d0b75
|
parse_ast.py
|
parse_ast.py
|
"""Parse python code into the abstract syntax tree and represent as JSON"""
from __future__ import print_function
import ast
from itertools import chain, count
import json
import sys
def dictify(obj):
if hasattr(obj, "__dict__"):
result = {k: dictify(v) for k, v in chain(obj.__dict__.items(), [("classname", obj.__class__.__name__)])}
return result
elif isinstance(obj, list):
return [dictify(x) for x in obj]
else:
return obj
def parse_file(filename):
with open(filename) as f:
source = f.read()
return ast.parse(source, filename=filename, mode="exec")
def main(args):
filename = args[0]
if len(args) != 1 or filename.lower() in ("help", "h", "-h", "--help"):
print(__doc__)
else:
ast_node = parse_file(filename)
ast_dict = dictify(ast_node)
ast_json = json.dumps(ast_dict, sort_keys=True, indent=4, separators=(',', ': '))
print(ast_json)
if __name__ == "__main__":
main(sys.argv[1:])
|
Add basic ast to json converter
|
Add basic ast to json converter
|
Python
|
mit
|
RishiRamraj/wensleydale
|
Add basic ast to json converter
|
"""Parse python code into the abstract syntax tree and represent as JSON"""
from __future__ import print_function
import ast
from itertools import chain, count
import json
import sys
def dictify(obj):
if hasattr(obj, "__dict__"):
result = {k: dictify(v) for k, v in chain(obj.__dict__.items(), [("classname", obj.__class__.__name__)])}
return result
elif isinstance(obj, list):
return [dictify(x) for x in obj]
else:
return obj
def parse_file(filename):
with open(filename) as f:
source = f.read()
return ast.parse(source, filename=filename, mode="exec")
def main(args):
filename = args[0]
if len(args) != 1 or filename.lower() in ("help", "h", "-h", "--help"):
print(__doc__)
else:
ast_node = parse_file(filename)
ast_dict = dictify(ast_node)
ast_json = json.dumps(ast_dict, sort_keys=True, indent=4, separators=(',', ': '))
print(ast_json)
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before><commit_msg>Add basic ast to json converter<commit_after>
|
"""Parse python code into the abstract syntax tree and represent as JSON"""
from __future__ import print_function
import ast
from itertools import chain, count
import json
import sys
def dictify(obj):
if hasattr(obj, "__dict__"):
result = {k: dictify(v) for k, v in chain(obj.__dict__.items(), [("classname", obj.__class__.__name__)])}
return result
elif isinstance(obj, list):
return [dictify(x) for x in obj]
else:
return obj
def parse_file(filename):
with open(filename) as f:
source = f.read()
return ast.parse(source, filename=filename, mode="exec")
def main(args):
filename = args[0]
if len(args) != 1 or filename.lower() in ("help", "h", "-h", "--help"):
print(__doc__)
else:
ast_node = parse_file(filename)
ast_dict = dictify(ast_node)
ast_json = json.dumps(ast_dict, sort_keys=True, indent=4, separators=(',', ': '))
print(ast_json)
if __name__ == "__main__":
main(sys.argv[1:])
|
Add basic ast to json converter"""Parse python code into the abstract syntax tree and represent as JSON"""
from __future__ import print_function
import ast
from itertools import chain, count
import json
import sys
def dictify(obj):
if hasattr(obj, "__dict__"):
result = {k: dictify(v) for k, v in chain(obj.__dict__.items(), [("classname", obj.__class__.__name__)])}
return result
elif isinstance(obj, list):
return [dictify(x) for x in obj]
else:
return obj
def parse_file(filename):
with open(filename) as f:
source = f.read()
return ast.parse(source, filename=filename, mode="exec")
def main(args):
filename = args[0]
if len(args) != 1 or filename.lower() in ("help", "h", "-h", "--help"):
print(__doc__)
else:
ast_node = parse_file(filename)
ast_dict = dictify(ast_node)
ast_json = json.dumps(ast_dict, sort_keys=True, indent=4, separators=(',', ': '))
print(ast_json)
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before><commit_msg>Add basic ast to json converter<commit_after>"""Parse python code into the abstract syntax tree and represent as JSON"""
from __future__ import print_function
import ast
from itertools import chain, count
import json
import sys
def dictify(obj):
if hasattr(obj, "__dict__"):
result = {k: dictify(v) for k, v in chain(obj.__dict__.items(), [("classname", obj.__class__.__name__)])}
return result
elif isinstance(obj, list):
return [dictify(x) for x in obj]
else:
return obj
def parse_file(filename):
with open(filename) as f:
source = f.read()
return ast.parse(source, filename=filename, mode="exec")
def main(args):
filename = args[0]
if len(args) != 1 or filename.lower() in ("help", "h", "-h", "--help"):
print(__doc__)
else:
ast_node = parse_file(filename)
ast_dict = dictify(ast_node)
ast_json = json.dumps(ast_dict, sort_keys=True, indent=4, separators=(',', ': '))
print(ast_json)
if __name__ == "__main__":
main(sys.argv[1:])
|
|
ef901f36d8eb8a3da1c747e64a79bd2fbad4878d
|
dev/clean_parse_tables.py
|
dev/clean_parse_tables.py
|
#!/usr/bin/env python
"""
A utility to fix PLY-generated lex and yacc tables to be Python 2 and
3 compatible.
"""
import os
for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), '..')):
for fname in files:
if not (fname.endswith('lextab.py') or fname.endswith('parsetab.py')):
continue
path = os.path.join(root, fname)
with open(path, 'rb') as fd:
lines = fd.readlines()
with open(path, 'wb') as fd:
fd.write("# Licensed under a 3-clause BSD style license - see LICENSE.rst\n")
fd.write("from __future__ import (absolute_import, division, print_function, unicode_literals)\n")
fd.write('\n')
lines = [x.replace("u'", "'").replace('u"', '"') for x in lines]
lines = [x for x in lines if not (fname in x and x[0] == '#')]
fd.write(''.join(lines))
|
Add tool to fix up parsing tables
|
Add tool to fix up parsing tables
|
Python
|
bsd-3-clause
|
lpsinger/astropy,funbaker/astropy,tbabej/astropy,larrybradley/astropy,DougBurke/astropy,mhvk/astropy,DougBurke/astropy,larrybradley/astropy,MSeifert04/astropy,MSeifert04/astropy,AustereCuriosity/astropy,kelle/astropy,mhvk/astropy,mhvk/astropy,StuartLittlefair/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,kelle/astropy,mhvk/astropy,joergdietrich/astropy,astropy/astropy,larrybradley/astropy,tbabej/astropy,lpsinger/astropy,kelle/astropy,astropy/astropy,bsipocz/astropy,AustereCuriosity/astropy,MSeifert04/astropy,larrybradley/astropy,larrybradley/astropy,saimn/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,joergdietrich/astropy,dhomeier/astropy,pllim/astropy,mhvk/astropy,kelle/astropy,funbaker/astropy,astropy/astropy,joergdietrich/astropy,pllim/astropy,StuartLittlefair/astropy,bsipocz/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,lpsinger/astropy,stargaser/astropy,StuartLittlefair/astropy,pllim/astropy,saimn/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,tbabej/astropy,dhomeier/astropy,AustereCuriosity/astropy,saimn/astropy,StuartLittlefair/astropy,funbaker/astropy,kelle/astropy,aleksandr-bakanov/astropy,astropy/astropy,dhomeier/astropy,dhomeier/astropy,tbabej/astropy,stargaser/astropy,stargaser/astropy,pllim/astropy,dhomeier/astropy,astropy/astropy,saimn/astropy,saimn/astropy,bsipocz/astropy,stargaser/astropy,bsipocz/astropy,pllim/astropy,funbaker/astropy,tbabej/astropy,lpsinger/astropy,lpsinger/astropy,AustereCuriosity/astropy,MSeifert04/astropy
|
Add tool to fix up parsing tables
|
#!/usr/bin/env python
"""
A utility to fix PLY-generated lex and yacc tables to be Python 2 and
3 compatible.
"""
import os
for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), '..')):
for fname in files:
if not (fname.endswith('lextab.py') or fname.endswith('parsetab.py')):
continue
path = os.path.join(root, fname)
with open(path, 'rb') as fd:
lines = fd.readlines()
with open(path, 'wb') as fd:
fd.write("# Licensed under a 3-clause BSD style license - see LICENSE.rst\n")
fd.write("from __future__ import (absolute_import, division, print_function, unicode_literals)\n")
fd.write('\n')
lines = [x.replace("u'", "'").replace('u"', '"') for x in lines]
lines = [x for x in lines if not (fname in x and x[0] == '#')]
fd.write(''.join(lines))
|
<commit_before><commit_msg>Add tool to fix up parsing tables<commit_after>
|
#!/usr/bin/env python
"""
A utility to fix PLY-generated lex and yacc tables to be Python 2 and
3 compatible.
"""
import os
for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), '..')):
for fname in files:
if not (fname.endswith('lextab.py') or fname.endswith('parsetab.py')):
continue
path = os.path.join(root, fname)
with open(path, 'rb') as fd:
lines = fd.readlines()
with open(path, 'wb') as fd:
fd.write("# Licensed under a 3-clause BSD style license - see LICENSE.rst\n")
fd.write("from __future__ import (absolute_import, division, print_function, unicode_literals)\n")
fd.write('\n')
lines = [x.replace("u'", "'").replace('u"', '"') for x in lines]
lines = [x for x in lines if not (fname in x and x[0] == '#')]
fd.write(''.join(lines))
|
Add tool to fix up parsing tables#!/usr/bin/env python
"""
A utility to fix PLY-generated lex and yacc tables to be Python 2 and
3 compatible.
"""
import os
for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), '..')):
for fname in files:
if not (fname.endswith('lextab.py') or fname.endswith('parsetab.py')):
continue
path = os.path.join(root, fname)
with open(path, 'rb') as fd:
lines = fd.readlines()
with open(path, 'wb') as fd:
fd.write("# Licensed under a 3-clause BSD style license - see LICENSE.rst\n")
fd.write("from __future__ import (absolute_import, division, print_function, unicode_literals)\n")
fd.write('\n')
lines = [x.replace("u'", "'").replace('u"', '"') for x in lines]
lines = [x for x in lines if not (fname in x and x[0] == '#')]
fd.write(''.join(lines))
|
<commit_before><commit_msg>Add tool to fix up parsing tables<commit_after>#!/usr/bin/env python
"""
A utility to fix PLY-generated lex and yacc tables to be Python 2 and
3 compatible.
"""
import os
for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), '..')):
for fname in files:
if not (fname.endswith('lextab.py') or fname.endswith('parsetab.py')):
continue
path = os.path.join(root, fname)
with open(path, 'rb') as fd:
lines = fd.readlines()
with open(path, 'wb') as fd:
fd.write("# Licensed under a 3-clause BSD style license - see LICENSE.rst\n")
fd.write("from __future__ import (absolute_import, division, print_function, unicode_literals)\n")
fd.write('\n')
lines = [x.replace("u'", "'").replace('u"', '"') for x in lines]
lines = [x for x in lines if not (fname in x and x[0] == '#')]
fd.write(''.join(lines))
|
|
374278b45dd8be76ef897881f61144ae79318827
|
utils/test_sims.py
|
utils/test_sims.py
|
import unittest
import random
import math
import sims
TRIALS = 1000 # number of trials to run per test case
MAX_DEGREE = 1.5 # max degree of freedom; refer to https://en.wikipedia.org/wiki/Pearson's_chi-squared_test
class TestCoinSim(unittest.TestCase):
def runTest(self):
# bad input
self.assertRaises(Exception, sims.CoinSim, 0, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, -1, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, 1, 0, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, -1, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, 1, 0) # bad length
self.assertRaises(Exception, sims.CoinSim, 1, 1, -1) # bad length
# if radius*2 >= gap length, it should always hit
hits = sims.CoinSim(1, 2, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 == gap, coin does not always hit")
hits = sims.CoinSim(1, 1.5, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 > gap, coin does not always hit")
degrees = []
for i in range(1000):
radius = random.random()
gap = random.random()
while radius*2 >= gap:
radius = random.random()
gap = random.random()
# if radius*2 < gap length
# average degree of freedom should be < MAX_DEGREE
else:
hits = sims.CoinSim(radius, gap, TRIALS).run_trials()
pred_hits = self._predict_hits(radius, gap, TRIALS)
# refer to https://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
# below equation was simplified after substituting original
chi_squared = 2*((hits - pred_hits)**2)/pred_hits
degrees.append(chi_squared)
avg_degree = sum(degrees)/len(degrees)
self.assertTrue(avg_degree < MAX_DEGREE, "while diameter < gap, average degree of freedom is %f > %f" % (avg_degree, MAX_DEGREE))
def _predict_hits(self, radius, gap, trials):
pred_hits = self._predict_prob(radius, gap)*trials
return pred_hits
def _predict_prob(self, radius, gap):
pred_prob = float(radius*2)/gap
if pred_prob > 1.0:
return 1.0
return pred_prob
if __name__ == '__main__':
unittest.main()
|
Add test suite for coin simulation.
|
Add test suite for coin simulation.
|
Python
|
mit
|
wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation
|
Add test suite for coin simulation.
|
import unittest
import random
import math
import sims
TRIALS = 1000 # number of trials to run per test case
MAX_DEGREE = 1.5 # max degree of freedom; refer to https://en.wikipedia.org/wiki/Pearson's_chi-squared_test
class TestCoinSim(unittest.TestCase):
def runTest(self):
# bad input
self.assertRaises(Exception, sims.CoinSim, 0, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, -1, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, 1, 0, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, -1, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, 1, 0) # bad length
self.assertRaises(Exception, sims.CoinSim, 1, 1, -1) # bad length
# if radius*2 >= gap length, it should always hit
hits = sims.CoinSim(1, 2, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 == gap, coin does not always hit")
hits = sims.CoinSim(1, 1.5, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 > gap, coin does not always hit")
degrees = []
for i in range(1000):
radius = random.random()
gap = random.random()
while radius*2 >= gap:
radius = random.random()
gap = random.random()
# if radius*2 < gap length
# average degree of freedom should be < MAX_DEGREE
else:
hits = sims.CoinSim(radius, gap, TRIALS).run_trials()
pred_hits = self._predict_hits(radius, gap, TRIALS)
# refer to https://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
# below equation was simplified after substituting original
chi_squared = 2*((hits - pred_hits)**2)/pred_hits
degrees.append(chi_squared)
avg_degree = sum(degrees)/len(degrees)
self.assertTrue(avg_degree < MAX_DEGREE, "while diameter < gap, average degree of freedom is %f > %f" % (avg_degree, MAX_DEGREE))
def _predict_hits(self, radius, gap, trials):
pred_hits = self._predict_prob(radius, gap)*trials
return pred_hits
def _predict_prob(self, radius, gap):
pred_prob = float(radius*2)/gap
if pred_prob > 1.0:
return 1.0
return pred_prob
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test suite for coin simulation.<commit_after>
|
import unittest
import random
import math
import sims
TRIALS = 1000 # number of trials to run per test case
MAX_DEGREE = 1.5 # max degree of freedom; refer to https://en.wikipedia.org/wiki/Pearson's_chi-squared_test
class TestCoinSim(unittest.TestCase):
def runTest(self):
# bad input
self.assertRaises(Exception, sims.CoinSim, 0, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, -1, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, 1, 0, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, -1, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, 1, 0) # bad length
self.assertRaises(Exception, sims.CoinSim, 1, 1, -1) # bad length
# if radius*2 >= gap length, it should always hit
hits = sims.CoinSim(1, 2, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 == gap, coin does not always hit")
hits = sims.CoinSim(1, 1.5, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 > gap, coin does not always hit")
degrees = []
for i in range(1000):
radius = random.random()
gap = random.random()
while radius*2 >= gap:
radius = random.random()
gap = random.random()
# if radius*2 < gap length
# average degree of freedom should be < MAX_DEGREE
else:
hits = sims.CoinSim(radius, gap, TRIALS).run_trials()
pred_hits = self._predict_hits(radius, gap, TRIALS)
# refer to https://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
# below equation was simplified after substituting original
chi_squared = 2*((hits - pred_hits)**2)/pred_hits
degrees.append(chi_squared)
avg_degree = sum(degrees)/len(degrees)
self.assertTrue(avg_degree < MAX_DEGREE, "while diameter < gap, average degree of freedom is %f > %f" % (avg_degree, MAX_DEGREE))
def _predict_hits(self, radius, gap, trials):
pred_hits = self._predict_prob(radius, gap)*trials
return pred_hits
def _predict_prob(self, radius, gap):
pred_prob = float(radius*2)/gap
if pred_prob > 1.0:
return 1.0
return pred_prob
if __name__ == '__main__':
unittest.main()
|
Add test suite for coin simulation.import unittest
import random
import math
import sims
TRIALS = 1000 # number of trials to run per test case
MAX_DEGREE = 1.5 # max degree of freedom; refer to https://en.wikipedia.org/wiki/Pearson's_chi-squared_test
class TestCoinSim(unittest.TestCase):
def runTest(self):
# bad input
self.assertRaises(Exception, sims.CoinSim, 0, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, -1, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, 1, 0, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, -1, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, 1, 0) # bad length
self.assertRaises(Exception, sims.CoinSim, 1, 1, -1) # bad length
# if radius*2 >= gap length, it should always hit
hits = sims.CoinSim(1, 2, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 == gap, coin does not always hit")
hits = sims.CoinSim(1, 1.5, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 > gap, coin does not always hit")
degrees = []
for i in range(1000):
radius = random.random()
gap = random.random()
while radius*2 >= gap:
radius = random.random()
gap = random.random()
# if radius*2 < gap length
# average degree of freedom should be < MAX_DEGREE
else:
hits = sims.CoinSim(radius, gap, TRIALS).run_trials()
pred_hits = self._predict_hits(radius, gap, TRIALS)
# refer to https://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
# below equation was simplified after substituting original
chi_squared = 2*((hits - pred_hits)**2)/pred_hits
degrees.append(chi_squared)
avg_degree = sum(degrees)/len(degrees)
self.assertTrue(avg_degree < MAX_DEGREE, "while diameter < gap, average degree of freedom is %f > %f" % (avg_degree, MAX_DEGREE))
def _predict_hits(self, radius, gap, trials):
pred_hits = self._predict_prob(radius, gap)*trials
return pred_hits
def _predict_prob(self, radius, gap):
pred_prob = float(radius*2)/gap
if pred_prob > 1.0:
return 1.0
return pred_prob
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test suite for coin simulation.<commit_after>import unittest
import random
import math
import sims
TRIALS = 1000 # number of trials to run per test case
MAX_DEGREE = 1.5 # max degree of freedom; refer to https://en.wikipedia.org/wiki/Pearson's_chi-squared_test
class TestCoinSim(unittest.TestCase):
def runTest(self):
# bad input
self.assertRaises(Exception, sims.CoinSim, 0, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, -1, 1, 1) # bad radius
self.assertRaises(Exception, sims.CoinSim, 1, 0, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, -1, 1) # bad gap
self.assertRaises(Exception, sims.CoinSim, 1, 1, 0) # bad length
self.assertRaises(Exception, sims.CoinSim, 1, 1, -1) # bad length
# if radius*2 >= gap length, it should always hit
hits = sims.CoinSim(1, 2, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 == gap, coin does not always hit")
hits = sims.CoinSim(1, 1.5, TRIALS).run_trials()
self.assertEquals(hits, TRIALS, "while radius*2 > gap, coin does not always hit")
degrees = []
for i in range(1000):
radius = random.random()
gap = random.random()
while radius*2 >= gap:
radius = random.random()
gap = random.random()
# if radius*2 < gap length
# average degree of freedom should be < MAX_DEGREE
else:
hits = sims.CoinSim(radius, gap, TRIALS).run_trials()
pred_hits = self._predict_hits(radius, gap, TRIALS)
# refer to https://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
# below equation was simplified after substituting original
chi_squared = 2*((hits - pred_hits)**2)/pred_hits
degrees.append(chi_squared)
avg_degree = sum(degrees)/len(degrees)
self.assertTrue(avg_degree < MAX_DEGREE, "while diameter < gap, average degree of freedom is %f > %f" % (avg_degree, MAX_DEGREE))
def _predict_hits(self, radius, gap, trials):
pred_hits = self._predict_prob(radius, gap)*trials
return pred_hits
def _predict_prob(self, radius, gap):
pred_prob = float(radius*2)/gap
if pred_prob > 1.0:
return 1.0
return pred_prob
if __name__ == '__main__':
unittest.main()
|
|
4afb6229c89237079966ab91bb4d2372818a7f44
|
greedy/fractional_knapsack/python/fractional_knapsack.py
|
greedy/fractional_knapsack/python/fractional_knapsack.py
|
def FractionalKnapsack(capacity, values, weights):
rel_value = [val / weight for val, weight in zip(values, weights)]
sorted_items = [i for _,i in sorted(zip(rel_value, range(len(rel_value))))]
carry_items = []
while capacity > 0 and len(sorted_items) > 0:
item = sorted_items.pop(0)
weight = weights[item]
new_capacity = capacity - weight
if new_capacity >= 0:
carry_items.append((item, 1))
else:
carry_items.append((item, capacity / weight))
capacity = new_capacity
return carry_items
# Example execution:
# FractionalKnapsack(5, [55,2,3], [4, 1, 1])
# Returns a list like [(item_number, proportion_of_item),...]
|
Add fractional knapsack greedy algorithm
|
Add fractional knapsack greedy algorithm
Implemented in Python
|
Python
|
cc0-1.0
|
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
|
Add fractional knapsack greedy algorithm
Implemented in Python
|
def FractionalKnapsack(capacity, values, weights):
rel_value = [val / weight for val, weight in zip(values, weights)]
sorted_items = [i for _,i in sorted(zip(rel_value, range(len(rel_value))))]
carry_items = []
while capacity > 0 and len(sorted_items) > 0:
item = sorted_items.pop(0)
weight = weights[item]
new_capacity = capacity - weight
if new_capacity >= 0:
carry_items.append((item, 1))
else:
carry_items.append((item, capacity / weight))
capacity = new_capacity
return carry_items
# Example execution:
# FractionalKnapsack(5, [55,2,3], [4, 1, 1])
# Returns a list like [(item_number, proportion_of_item),...]
|
<commit_before><commit_msg>Add fractional knapsack greedy algorithm
Implemented in Python<commit_after>
|
def FractionalKnapsack(capacity, values, weights):
rel_value = [val / weight for val, weight in zip(values, weights)]
sorted_items = [i for _,i in sorted(zip(rel_value, range(len(rel_value))))]
carry_items = []
while capacity > 0 and len(sorted_items) > 0:
item = sorted_items.pop(0)
weight = weights[item]
new_capacity = capacity - weight
if new_capacity >= 0:
carry_items.append((item, 1))
else:
carry_items.append((item, capacity / weight))
capacity = new_capacity
return carry_items
# Example execution:
# FractionalKnapsack(5, [55,2,3], [4, 1, 1])
# Returns a list like [(item_number, proportion_of_item),...]
|
Add fractional knapsack greedy algorithm
Implemented in Pythondef FractionalKnapsack(capacity, values, weights):
rel_value = [val / weight for val, weight in zip(values, weights)]
sorted_items = [i for _,i in sorted(zip(rel_value, range(len(rel_value))))]
carry_items = []
while capacity > 0 and len(sorted_items) > 0:
item = sorted_items.pop(0)
weight = weights[item]
new_capacity = capacity - weight
if new_capacity >= 0:
carry_items.append((item, 1))
else:
carry_items.append((item, capacity / weight))
capacity = new_capacity
return carry_items
# Example execution:
# FractionalKnapsack(5, [55,2,3], [4, 1, 1])
# Returns a list like [(item_number, proportion_of_item),...]
|
<commit_before><commit_msg>Add fractional knapsack greedy algorithm
Implemented in Python<commit_after>def FractionalKnapsack(capacity, values, weights):
rel_value = [val / weight for val, weight in zip(values, weights)]
sorted_items = [i for _,i in sorted(zip(rel_value, range(len(rel_value))))]
carry_items = []
while capacity > 0 and len(sorted_items) > 0:
item = sorted_items.pop(0)
weight = weights[item]
new_capacity = capacity - weight
if new_capacity >= 0:
carry_items.append((item, 1))
else:
carry_items.append((item, capacity / weight))
capacity = new_capacity
return carry_items
# Example execution:
# FractionalKnapsack(5, [55,2,3], [4, 1, 1])
# Returns a list like [(item_number, proportion_of_item),...]
|
|
7617826e2b9a01e9a377d6fa2c8f64768a184704
|
nodeconductor/structure/tests/unittests/test_serializer.py
|
nodeconductor/structure/tests/unittests/test_serializer.py
|
from urlparse import urlparse
from django.contrib.auth import get_user_model
from django.core.urlresolvers import resolve
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from .. import factories as structure_factories
from ...serializers import BasicUserSerializer
User = get_user_model()
class UUIDSerializerTest(TestCase):
def setUp(self):
factory = APIRequestFactory()
request = factory.get('/users/')
context = {'request': request}
user = structure_factories.UserFactory()
serializer = BasicUserSerializer(instance=user, context=context)
self.data = serializer.data
def test_url_and_uuid_do_not_contain_hyphenation(self):
path = urlparse(self.data['url']).path
match = resolve(path)
self.assertEqual(match.url_name, 'user-detail')
value = match.kwargs.get('uuid')
self.assertEqual(value, self.data['uuid'])
self.assertTrue('-' not in value)
|
Add UUID serialization test case (NC-1214)
|
Add UUID serialization test case (NC-1214)
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
Add UUID serialization test case (NC-1214)
|
from urlparse import urlparse
from django.contrib.auth import get_user_model
from django.core.urlresolvers import resolve
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from .. import factories as structure_factories
from ...serializers import BasicUserSerializer
User = get_user_model()
class UUIDSerializerTest(TestCase):
def setUp(self):
factory = APIRequestFactory()
request = factory.get('/users/')
context = {'request': request}
user = structure_factories.UserFactory()
serializer = BasicUserSerializer(instance=user, context=context)
self.data = serializer.data
def test_url_and_uuid_do_not_contain_hyphenation(self):
path = urlparse(self.data['url']).path
match = resolve(path)
self.assertEqual(match.url_name, 'user-detail')
value = match.kwargs.get('uuid')
self.assertEqual(value, self.data['uuid'])
self.assertTrue('-' not in value)
|
<commit_before><commit_msg>Add UUID serialization test case (NC-1214)<commit_after>
|
from urlparse import urlparse
from django.contrib.auth import get_user_model
from django.core.urlresolvers import resolve
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from .. import factories as structure_factories
from ...serializers import BasicUserSerializer
User = get_user_model()
class UUIDSerializerTest(TestCase):
def setUp(self):
factory = APIRequestFactory()
request = factory.get('/users/')
context = {'request': request}
user = structure_factories.UserFactory()
serializer = BasicUserSerializer(instance=user, context=context)
self.data = serializer.data
def test_url_and_uuid_do_not_contain_hyphenation(self):
path = urlparse(self.data['url']).path
match = resolve(path)
self.assertEqual(match.url_name, 'user-detail')
value = match.kwargs.get('uuid')
self.assertEqual(value, self.data['uuid'])
self.assertTrue('-' not in value)
|
Add UUID serialization test case (NC-1214)from urlparse import urlparse
from django.contrib.auth import get_user_model
from django.core.urlresolvers import resolve
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from .. import factories as structure_factories
from ...serializers import BasicUserSerializer
User = get_user_model()
class UUIDSerializerTest(TestCase):
def setUp(self):
factory = APIRequestFactory()
request = factory.get('/users/')
context = {'request': request}
user = structure_factories.UserFactory()
serializer = BasicUserSerializer(instance=user, context=context)
self.data = serializer.data
def test_url_and_uuid_do_not_contain_hyphenation(self):
path = urlparse(self.data['url']).path
match = resolve(path)
self.assertEqual(match.url_name, 'user-detail')
value = match.kwargs.get('uuid')
self.assertEqual(value, self.data['uuid'])
self.assertTrue('-' not in value)
|
<commit_before><commit_msg>Add UUID serialization test case (NC-1214)<commit_after>from urlparse import urlparse
from django.contrib.auth import get_user_model
from django.core.urlresolvers import resolve
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from .. import factories as structure_factories
from ...serializers import BasicUserSerializer
User = get_user_model()
class UUIDSerializerTest(TestCase):
def setUp(self):
factory = APIRequestFactory()
request = factory.get('/users/')
context = {'request': request}
user = structure_factories.UserFactory()
serializer = BasicUserSerializer(instance=user, context=context)
self.data = serializer.data
def test_url_and_uuid_do_not_contain_hyphenation(self):
path = urlparse(self.data['url']).path
match = resolve(path)
self.assertEqual(match.url_name, 'user-detail')
value = match.kwargs.get('uuid')
self.assertEqual(value, self.data['uuid'])
self.assertTrue('-' not in value)
|
|
4afbd87d3d6d0c4953f4c41b3c37abf1fc3be0f1
|
acapi/tests/test_database.py
|
acapi/tests/test_database.py
|
""" Tests the database class. """
import requests_mock
from . import BaseTest
from ..resources import Database, BackupList
@requests_mock.Mocker()
class TestDatabase(BaseTest):
"""Tests the Acquia Cloud API db class."""
def test_backups(self, mocker):
""" Test create call. """
json = [
{
"checksum": "042f31bebd595b6f2c84b3532d4f1a3b",
"completed": "1331110381",
"deleted": "0",
"id": "22",
"name": "mysite",
"path": "backups/dev-mysite-mysitedev-2012-03-07.sql.gz",
"started": "1331110381",
"type": "daily",
"link": "http://mysite.devcloud.acquia-sites.com/AH_DOWNLOAD?t=1342468716&prod=7386761671e68e517a74b7b790ef74d8a8fba7336dbc891cfef133bd29a7b238&d=/mnt/files/mysite.prod/backups/prod-mysite-mysite-2012-07-15.sql.gz"
},
]
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/envs/dev/dbs/mysite/backups.json',
json=json
)
backups = self.client.site('mysite').environment('dev').db('mysite').backups()
self.assertIsInstance(backups, BackupList)
def test_copy(self, mocker):
""" Test database copy call. """
mocker.register_uri(
'POST',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/dbs/mysite/db-copy/dev/staging.json',
json=self.generate_task_dictionary(1210, 'waiting', False),
)
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/tasks/1210.json',
json=self.generate_task_dictionary(1210, 'done', True),
)
db = self.client.site('mysite').environment('dev').db('mysite').copy('staging')
self.assertIsInstance(db, Database)
|
Add test coverage for database resource
|
Add test coverage for database resource
|
Python
|
mit
|
skwashd/python-acquia-cloud
|
Add test coverage for database resource
|
""" Tests the database class. """
import requests_mock
from . import BaseTest
from ..resources import Database, BackupList
@requests_mock.Mocker()
class TestDatabase(BaseTest):
"""Tests the Acquia Cloud API db class."""
def test_backups(self, mocker):
""" Test create call. """
json = [
{
"checksum": "042f31bebd595b6f2c84b3532d4f1a3b",
"completed": "1331110381",
"deleted": "0",
"id": "22",
"name": "mysite",
"path": "backups/dev-mysite-mysitedev-2012-03-07.sql.gz",
"started": "1331110381",
"type": "daily",
"link": "http://mysite.devcloud.acquia-sites.com/AH_DOWNLOAD?t=1342468716&prod=7386761671e68e517a74b7b790ef74d8a8fba7336dbc891cfef133bd29a7b238&d=/mnt/files/mysite.prod/backups/prod-mysite-mysite-2012-07-15.sql.gz"
},
]
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/envs/dev/dbs/mysite/backups.json',
json=json
)
backups = self.client.site('mysite').environment('dev').db('mysite').backups()
self.assertIsInstance(backups, BackupList)
def test_copy(self, mocker):
""" Test database copy call. """
mocker.register_uri(
'POST',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/dbs/mysite/db-copy/dev/staging.json',
json=self.generate_task_dictionary(1210, 'waiting', False),
)
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/tasks/1210.json',
json=self.generate_task_dictionary(1210, 'done', True),
)
db = self.client.site('mysite').environment('dev').db('mysite').copy('staging')
self.assertIsInstance(db, Database)
|
<commit_before><commit_msg>Add test coverage for database resource<commit_after>
|
""" Tests the database class. """
import requests_mock
from . import BaseTest
from ..resources import Database, BackupList
@requests_mock.Mocker()
class TestDatabase(BaseTest):
"""Tests the Acquia Cloud API db class."""
def test_backups(self, mocker):
""" Test create call. """
json = [
{
"checksum": "042f31bebd595b6f2c84b3532d4f1a3b",
"completed": "1331110381",
"deleted": "0",
"id": "22",
"name": "mysite",
"path": "backups/dev-mysite-mysitedev-2012-03-07.sql.gz",
"started": "1331110381",
"type": "daily",
"link": "http://mysite.devcloud.acquia-sites.com/AH_DOWNLOAD?t=1342468716&prod=7386761671e68e517a74b7b790ef74d8a8fba7336dbc891cfef133bd29a7b238&d=/mnt/files/mysite.prod/backups/prod-mysite-mysite-2012-07-15.sql.gz"
},
]
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/envs/dev/dbs/mysite/backups.json',
json=json
)
backups = self.client.site('mysite').environment('dev').db('mysite').backups()
self.assertIsInstance(backups, BackupList)
def test_copy(self, mocker):
""" Test database copy call. """
mocker.register_uri(
'POST',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/dbs/mysite/db-copy/dev/staging.json',
json=self.generate_task_dictionary(1210, 'waiting', False),
)
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/tasks/1210.json',
json=self.generate_task_dictionary(1210, 'done', True),
)
db = self.client.site('mysite').environment('dev').db('mysite').copy('staging')
self.assertIsInstance(db, Database)
|
Add test coverage for database resource""" Tests the database class. """
import requests_mock
from . import BaseTest
from ..resources import Database, BackupList
@requests_mock.Mocker()
class TestDatabase(BaseTest):
"""Tests the Acquia Cloud API db class."""
def test_backups(self, mocker):
""" Test create call. """
json = [
{
"checksum": "042f31bebd595b6f2c84b3532d4f1a3b",
"completed": "1331110381",
"deleted": "0",
"id": "22",
"name": "mysite",
"path": "backups/dev-mysite-mysitedev-2012-03-07.sql.gz",
"started": "1331110381",
"type": "daily",
"link": "http://mysite.devcloud.acquia-sites.com/AH_DOWNLOAD?t=1342468716&prod=7386761671e68e517a74b7b790ef74d8a8fba7336dbc891cfef133bd29a7b238&d=/mnt/files/mysite.prod/backups/prod-mysite-mysite-2012-07-15.sql.gz"
},
]
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/envs/dev/dbs/mysite/backups.json',
json=json
)
backups = self.client.site('mysite').environment('dev').db('mysite').backups()
self.assertIsInstance(backups, BackupList)
def test_copy(self, mocker):
""" Test database copy call. """
mocker.register_uri(
'POST',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/dbs/mysite/db-copy/dev/staging.json',
json=self.generate_task_dictionary(1210, 'waiting', False),
)
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/tasks/1210.json',
json=self.generate_task_dictionary(1210, 'done', True),
)
db = self.client.site('mysite').environment('dev').db('mysite').copy('staging')
self.assertIsInstance(db, Database)
|
<commit_before><commit_msg>Add test coverage for database resource<commit_after>""" Tests the database class. """
import requests_mock
from . import BaseTest
from ..resources import Database, BackupList
@requests_mock.Mocker()
class TestDatabase(BaseTest):
"""Tests the Acquia Cloud API db class."""
def test_backups(self, mocker):
""" Test create call. """
json = [
{
"checksum": "042f31bebd595b6f2c84b3532d4f1a3b",
"completed": "1331110381",
"deleted": "0",
"id": "22",
"name": "mysite",
"path": "backups/dev-mysite-mysitedev-2012-03-07.sql.gz",
"started": "1331110381",
"type": "daily",
"link": "http://mysite.devcloud.acquia-sites.com/AH_DOWNLOAD?t=1342468716&prod=7386761671e68e517a74b7b790ef74d8a8fba7336dbc891cfef133bd29a7b238&d=/mnt/files/mysite.prod/backups/prod-mysite-mysite-2012-07-15.sql.gz"
},
]
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/envs/dev/dbs/mysite/backups.json',
json=json
)
backups = self.client.site('mysite').environment('dev').db('mysite').backups()
self.assertIsInstance(backups, BackupList)
def test_copy(self, mocker):
""" Test database copy call. """
mocker.register_uri(
'POST',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/dbs/mysite/db-copy/dev/staging.json',
json=self.generate_task_dictionary(1210, 'waiting', False),
)
mocker.register_uri(
'GET',
'https://cloudapi.acquia.com/v1/sites/prod:mysite/tasks/1210.json',
json=self.generate_task_dictionary(1210, 'done', True),
)
db = self.client.site('mysite').environment('dev').db('mysite').copy('staging')
self.assertIsInstance(db, Database)
|
|
3a0efca1a48563a50e634eeb3401b43b7e6b2da7
|
ash/PRESUBMIT.py
|
ash/PRESUBMIT.py
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for ash.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
Enable presubmit warning for "git cl format"
|
Enable presubmit warning for "git cl format"
BUG=None
Review URL: https://codereview.chromium.org/835683004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#311332}
|
Python
|
bsd-3-clause
|
PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,ltilve/chromium,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,fujunwei/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk
|
Enable presubmit warning for "git cl format"
BUG=None
Review URL: https://codereview.chromium.org/835683004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#311332}
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for ash.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
<commit_before><commit_msg>Enable presubmit warning for "git cl format"
BUG=None
Review URL: https://codereview.chromium.org/835683004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#311332}<commit_after>
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for ash.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
Enable presubmit warning for "git cl format"
BUG=None
Review URL: https://codereview.chromium.org/835683004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#311332}# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for ash.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
<commit_before><commit_msg>Enable presubmit warning for "git cl format"
BUG=None
Review URL: https://codereview.chromium.org/835683004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#311332}<commit_after># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for ash.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
|
8fe69746a631054387019662d7aea37879909da7
|
src/Mapping.py
|
src/Mapping.py
|
import enum
import logging
import json
import cv2
import os
from pynput import keyboard
from src import screen, helper
logger = logging.getLogger(__name__)
class Mapper():
def __init__(self):
self.screenshot_dir = os.path.join(helper.get_home_folder(), '.dolphin-emu', 'ScreenShots')
self.file_name="log.json"
self.output_dir = helper.get_output_folder()
self.image_dir = os.path.join(helper.get_output_folder(), "images")
def mapping(self):
num_files = len(os.listdir(self.image_dir))
count = 1
state_map=dict()
with open(os.path.join(self.output_dir, self.file_name), 'r') as f:
log=json.load(f)
while count <= num_files:
file_name = "{}.png".format(count)
ima = cv2.imread(os.path.join(self.image_dir, file_name))
ima_tuple = tuple(ima)
key_map_boolean = log['data'][count]['presses']
key_map_numeric=dict()
# Convert "true" to 1 and "false" to 0
for k in key_map_boolean:
if key_map_boolean['k'] == "true":
key_map_numeric['k'] = 1
elif key_map_boolean == "false":
key_map_numeric['k'] = 0
# Check if the state already exists in the state map
if ima_tuple in state_map:
for k in state_map['ima_tuple']:
if key_map_numeric['k'] == 1:
state_map['ima_tuple']['k'] +=1
else:
state_map[ima_tuple] = key_map_numeric
count += 1
test=Mapper
test.mapping()
'''
with open(os.path.join(helper.get_output_folder(), "log.json"), 'r') as f:
log=json.load(f)
print(iter(log['data'][1]['presses']))
'''
|
Add rough state mapping class
|
Add rough state mapping class
|
Python
|
mit
|
ENPH-479/dolphin-env-api,ENPH-479/dolphin-env-api,ENPH-479/dolphin-env-api,ENPH-479/dolphin-env-api
|
Add rough state mapping class
|
import enum
import logging
import json
import cv2
import os
from pynput import keyboard
from src import screen, helper
logger = logging.getLogger(__name__)
class Mapper():
def __init__(self):
self.screenshot_dir = os.path.join(helper.get_home_folder(), '.dolphin-emu', 'ScreenShots')
self.file_name="log.json"
self.output_dir = helper.get_output_folder()
self.image_dir = os.path.join(helper.get_output_folder(), "images")
def mapping(self):
num_files = len(os.listdir(self.image_dir))
count = 1
state_map=dict()
with open(os.path.join(self.output_dir, self.file_name), 'r') as f:
log=json.load(f)
while count <= num_files:
file_name = "{}.png".format(count)
ima = cv2.imread(os.path.join(self.image_dir, file_name))
ima_tuple = tuple(ima)
key_map_boolean = log['data'][count]['presses']
key_map_numeric=dict()
# Convert "true" to 1 and "false" to 0
for k in key_map_boolean:
if key_map_boolean['k'] == "true":
key_map_numeric['k'] = 1
elif key_map_boolean == "false":
key_map_numeric['k'] = 0
# Check if the state already exists in the state map
if ima_tuple in state_map:
for k in state_map['ima_tuple']:
if key_map_numeric['k'] == 1:
state_map['ima_tuple']['k'] +=1
else:
state_map[ima_tuple] = key_map_numeric
count += 1
test=Mapper
test.mapping()
'''
with open(os.path.join(helper.get_output_folder(), "log.json"), 'r') as f:
log=json.load(f)
print(iter(log['data'][1]['presses']))
'''
|
<commit_before><commit_msg>Add rough state mapping class<commit_after>
|
import enum
import logging
import json
import cv2
import os
from pynput import keyboard
from src import screen, helper
logger = logging.getLogger(__name__)
class Mapper():
def __init__(self):
self.screenshot_dir = os.path.join(helper.get_home_folder(), '.dolphin-emu', 'ScreenShots')
self.file_name="log.json"
self.output_dir = helper.get_output_folder()
self.image_dir = os.path.join(helper.get_output_folder(), "images")
def mapping(self):
num_files = len(os.listdir(self.image_dir))
count = 1
state_map=dict()
with open(os.path.join(self.output_dir, self.file_name), 'r') as f:
log=json.load(f)
while count <= num_files:
file_name = "{}.png".format(count)
ima = cv2.imread(os.path.join(self.image_dir, file_name))
ima_tuple = tuple(ima)
key_map_boolean = log['data'][count]['presses']
key_map_numeric=dict()
# Convert "true" to 1 and "false" to 0
for k in key_map_boolean:
if key_map_boolean['k'] == "true":
key_map_numeric['k'] = 1
elif key_map_boolean == "false":
key_map_numeric['k'] = 0
# Check if the state already exists in the state map
if ima_tuple in state_map:
for k in state_map['ima_tuple']:
if key_map_numeric['k'] == 1:
state_map['ima_tuple']['k'] +=1
else:
state_map[ima_tuple] = key_map_numeric
count += 1
test=Mapper
test.mapping()
'''
with open(os.path.join(helper.get_output_folder(), "log.json"), 'r') as f:
log=json.load(f)
print(iter(log['data'][1]['presses']))
'''
|
Add rough state mapping classimport enum
import logging
import json
import cv2
import os
from pynput import keyboard
from src import screen, helper
logger = logging.getLogger(__name__)
class Mapper():
def __init__(self):
self.screenshot_dir = os.path.join(helper.get_home_folder(), '.dolphin-emu', 'ScreenShots')
self.file_name="log.json"
self.output_dir = helper.get_output_folder()
self.image_dir = os.path.join(helper.get_output_folder(), "images")
def mapping(self):
num_files = len(os.listdir(self.image_dir))
count = 1
state_map=dict()
with open(os.path.join(self.output_dir, self.file_name), 'r') as f:
log=json.load(f)
while count <= num_files:
file_name = "{}.png".format(count)
ima = cv2.imread(os.path.join(self.image_dir, file_name))
ima_tuple = tuple(ima)
key_map_boolean = log['data'][count]['presses']
key_map_numeric=dict()
# Convert "true" to 1 and "false" to 0
for k in key_map_boolean:
if key_map_boolean['k'] == "true":
key_map_numeric['k'] = 1
elif key_map_boolean == "false":
key_map_numeric['k'] = 0
# Check if the state already exists in the state map
if ima_tuple in state_map:
for k in state_map['ima_tuple']:
if key_map_numeric['k'] == 1:
state_map['ima_tuple']['k'] +=1
else:
state_map[ima_tuple] = key_map_numeric
count += 1
test=Mapper
test.mapping()
'''
with open(os.path.join(helper.get_output_folder(), "log.json"), 'r') as f:
log=json.load(f)
print(iter(log['data'][1]['presses']))
'''
|
<commit_before><commit_msg>Add rough state mapping class<commit_after>import enum
import logging
import json
import cv2
import os
from pynput import keyboard
from src import screen, helper
logger = logging.getLogger(__name__)
class Mapper():
def __init__(self):
self.screenshot_dir = os.path.join(helper.get_home_folder(), '.dolphin-emu', 'ScreenShots')
self.file_name="log.json"
self.output_dir = helper.get_output_folder()
self.image_dir = os.path.join(helper.get_output_folder(), "images")
def mapping(self):
num_files = len(os.listdir(self.image_dir))
count = 1
state_map=dict()
with open(os.path.join(self.output_dir, self.file_name), 'r') as f:
log=json.load(f)
while count <= num_files:
file_name = "{}.png".format(count)
ima = cv2.imread(os.path.join(self.image_dir, file_name))
ima_tuple = tuple(ima)
key_map_boolean = log['data'][count]['presses']
key_map_numeric=dict()
# Convert "true" to 1 and "false" to 0
for k in key_map_boolean:
if key_map_boolean['k'] == "true":
key_map_numeric['k'] = 1
elif key_map_boolean == "false":
key_map_numeric['k'] = 0
# Check if the state already exists in the state map
if ima_tuple in state_map:
for k in state_map['ima_tuple']:
if key_map_numeric['k'] == 1:
state_map['ima_tuple']['k'] +=1
else:
state_map[ima_tuple] = key_map_numeric
count += 1
test=Mapper
test.mapping()
'''
with open(os.path.join(helper.get_output_folder(), "log.json"), 'r') as f:
log=json.load(f)
print(iter(log['data'][1]['presses']))
'''
|
|
e54785f1a7aa8aff1d652d3caef36f8aa04cd91f
|
tests/unit_tests/test_urr_capture.py
|
tests/unit_tests/test_urr_capture.py
|
import openmc
import pytest
@pytest.fixture
def th232_model():
# URR boundaries for Th232
e_min, e_max = 4000.0, 100000.0
model = openmc.model.Model()
th232 = openmc.Material()
th232.add_nuclide('Th232', 1.0)
surf = openmc.Sphere(r=100.0, boundary_type='reflective')
cell = openmc.Cell(fill=th232, region=-surf)
model.geometry = openmc.Geometry([cell])
model.settings.particles = 100
model.settings.batches = 10
model.settings.run_mode = 'fixed source'
energies = openmc.stats.Uniform(e_min, e_max)
model.settings.source = openmc.Source(energy=energies)
tally = openmc.Tally(name='rates')
tally.filters = [openmc.EnergyFilter([e_min, e_max])]
tally.scores = ['(n,gamma)', 'absorption', 'fission']
model.tallies.append(tally)
return model
def test_urr_capture(run_in_tmpdir, th232_model):
# Export and run model
th232_model.export_to_xml()
openmc.run()
# Get reaction rates from tally
with openmc.StatePoint('statepoint.10.h5') as sp:
t = sp.get_tally(name='rates')
ngamma, absorption, fission = t.mean.flatten()
# In URR, the (n,gamma) rate should be equal to absorption - fission
assert ngamma == pytest.approx(absorption - fission)
|
Add (currently failing) test with URR (n,gamma) reaction rate check
|
Add (currently failing) test with URR (n,gamma) reaction rate check
|
Python
|
mit
|
amandalund/openmc,amandalund/openmc,walshjon/openmc,amandalund/openmc,walshjon/openmc,walshjon/openmc,shikhar413/openmc,amandalund/openmc,shikhar413/openmc,shikhar413/openmc,shikhar413/openmc,walshjon/openmc
|
Add (currently failing) test with URR (n,gamma) reaction rate check
|
import openmc
import pytest
@pytest.fixture
def th232_model():
# URR boundaries for Th232
e_min, e_max = 4000.0, 100000.0
model = openmc.model.Model()
th232 = openmc.Material()
th232.add_nuclide('Th232', 1.0)
surf = openmc.Sphere(r=100.0, boundary_type='reflective')
cell = openmc.Cell(fill=th232, region=-surf)
model.geometry = openmc.Geometry([cell])
model.settings.particles = 100
model.settings.batches = 10
model.settings.run_mode = 'fixed source'
energies = openmc.stats.Uniform(e_min, e_max)
model.settings.source = openmc.Source(energy=energies)
tally = openmc.Tally(name='rates')
tally.filters = [openmc.EnergyFilter([e_min, e_max])]
tally.scores = ['(n,gamma)', 'absorption', 'fission']
model.tallies.append(tally)
return model
def test_urr_capture(run_in_tmpdir, th232_model):
# Export and run model
th232_model.export_to_xml()
openmc.run()
# Get reaction rates from tally
with openmc.StatePoint('statepoint.10.h5') as sp:
t = sp.get_tally(name='rates')
ngamma, absorption, fission = t.mean.flatten()
# In URR, the (n,gamma) rate should be equal to absorption - fission
assert ngamma == pytest.approx(absorption - fission)
|
<commit_before><commit_msg>Add (currently failing) test with URR (n,gamma) reaction rate check<commit_after>
|
import openmc
import pytest
@pytest.fixture
def th232_model():
# URR boundaries for Th232
e_min, e_max = 4000.0, 100000.0
model = openmc.model.Model()
th232 = openmc.Material()
th232.add_nuclide('Th232', 1.0)
surf = openmc.Sphere(r=100.0, boundary_type='reflective')
cell = openmc.Cell(fill=th232, region=-surf)
model.geometry = openmc.Geometry([cell])
model.settings.particles = 100
model.settings.batches = 10
model.settings.run_mode = 'fixed source'
energies = openmc.stats.Uniform(e_min, e_max)
model.settings.source = openmc.Source(energy=energies)
tally = openmc.Tally(name='rates')
tally.filters = [openmc.EnergyFilter([e_min, e_max])]
tally.scores = ['(n,gamma)', 'absorption', 'fission']
model.tallies.append(tally)
return model
def test_urr_capture(run_in_tmpdir, th232_model):
# Export and run model
th232_model.export_to_xml()
openmc.run()
# Get reaction rates from tally
with openmc.StatePoint('statepoint.10.h5') as sp:
t = sp.get_tally(name='rates')
ngamma, absorption, fission = t.mean.flatten()
# In URR, the (n,gamma) rate should be equal to absorption - fission
assert ngamma == pytest.approx(absorption - fission)
|
Add (currently failing) test with URR (n,gamma) reaction rate checkimport openmc
import pytest
@pytest.fixture
def th232_model():
# URR boundaries for Th232
e_min, e_max = 4000.0, 100000.0
model = openmc.model.Model()
th232 = openmc.Material()
th232.add_nuclide('Th232', 1.0)
surf = openmc.Sphere(r=100.0, boundary_type='reflective')
cell = openmc.Cell(fill=th232, region=-surf)
model.geometry = openmc.Geometry([cell])
model.settings.particles = 100
model.settings.batches = 10
model.settings.run_mode = 'fixed source'
energies = openmc.stats.Uniform(e_min, e_max)
model.settings.source = openmc.Source(energy=energies)
tally = openmc.Tally(name='rates')
tally.filters = [openmc.EnergyFilter([e_min, e_max])]
tally.scores = ['(n,gamma)', 'absorption', 'fission']
model.tallies.append(tally)
return model
def test_urr_capture(run_in_tmpdir, th232_model):
# Export and run model
th232_model.export_to_xml()
openmc.run()
# Get reaction rates from tally
with openmc.StatePoint('statepoint.10.h5') as sp:
t = sp.get_tally(name='rates')
ngamma, absorption, fission = t.mean.flatten()
# In URR, the (n,gamma) rate should be equal to absorption - fission
assert ngamma == pytest.approx(absorption - fission)
|
<commit_before><commit_msg>Add (currently failing) test with URR (n,gamma) reaction rate check<commit_after>import openmc
import pytest
@pytest.fixture
def th232_model():
# URR boundaries for Th232
e_min, e_max = 4000.0, 100000.0
model = openmc.model.Model()
th232 = openmc.Material()
th232.add_nuclide('Th232', 1.0)
surf = openmc.Sphere(r=100.0, boundary_type='reflective')
cell = openmc.Cell(fill=th232, region=-surf)
model.geometry = openmc.Geometry([cell])
model.settings.particles = 100
model.settings.batches = 10
model.settings.run_mode = 'fixed source'
energies = openmc.stats.Uniform(e_min, e_max)
model.settings.source = openmc.Source(energy=energies)
tally = openmc.Tally(name='rates')
tally.filters = [openmc.EnergyFilter([e_min, e_max])]
tally.scores = ['(n,gamma)', 'absorption', 'fission']
model.tallies.append(tally)
return model
def test_urr_capture(run_in_tmpdir, th232_model):
# Export and run model
th232_model.export_to_xml()
openmc.run()
# Get reaction rates from tally
with openmc.StatePoint('statepoint.10.h5') as sp:
t = sp.get_tally(name='rates')
ngamma, absorption, fission = t.mean.flatten()
# In URR, the (n,gamma) rate should be equal to absorption - fission
assert ngamma == pytest.approx(absorption - fission)
|
|
da4151a0e83e6738361b23edb2fda3ee0e386391
|
localflavor/br/models.py
|
localflavor/br/models.py
|
from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import CharField
from .br_states import STATE_CHOICES
class BRStateField(CharField):
"""
A model field for states of Brazil
"""
description = _("BR. state (two uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 2
super(BRStateField, self).__init__(*args, **kwargs)
|
Add a model field for states of Brazil
|
Add a model field for states of Brazil
|
Python
|
bsd-3-clause
|
zarelit/django-localflavor,rsalmaso/django-localflavor,maisim/django-localflavor,infoxchange/django-localflavor,M157q/django-localflavor,agustin380/django-localflavor,django/django-localflavor,thor/django-localflavor,jieter/django-localflavor
|
Add a model field for states of Brazil
|
from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import CharField
from .br_states import STATE_CHOICES
class BRStateField(CharField):
"""
A model field for states of Brazil
"""
description = _("BR. state (two uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 2
super(BRStateField, self).__init__(*args, **kwargs)
|
<commit_before><commit_msg>Add a model field for states of Brazil<commit_after>
|
from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import CharField
from .br_states import STATE_CHOICES
class BRStateField(CharField):
"""
A model field for states of Brazil
"""
description = _("BR. state (two uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 2
super(BRStateField, self).__init__(*args, **kwargs)
|
Add a model field for states of Brazilfrom django.utils.translation import ugettext_lazy as _
from django.db.models.fields import CharField
from .br_states import STATE_CHOICES
class BRStateField(CharField):
"""
A model field for states of Brazil
"""
description = _("BR. state (two uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 2
super(BRStateField, self).__init__(*args, **kwargs)
|
<commit_before><commit_msg>Add a model field for states of Brazil<commit_after>from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import CharField
from .br_states import STATE_CHOICES
class BRStateField(CharField):
"""
A model field for states of Brazil
"""
description = _("BR. state (two uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 2
super(BRStateField, self).__init__(*args, **kwargs)
|
|
a4cd1c644d7b0636e0debc3a44df9d81a6fa7ce7
|
app/main/views/register.py
|
app/main/views/register.py
|
from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error=e.message), 400
else:
return jsonify(form.errors), 400
|
from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error='encountered database error'), 400
else:
return jsonify(form.errors), 400
|
Change to a generic message for database errors.
|
108536374: Change to a generic message for database errors.
Need a story to handle db exceptions in the dao layer
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin
|
from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error=e.message), 400
else:
return jsonify(form.errors), 400
108536374: Change to a generic message for database errors.
Need a story to handle db exceptions in the dao layer
|
from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error='encountered database error'), 400
else:
return jsonify(form.errors), 400
|
<commit_before>from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error=e.message), 400
else:
return jsonify(form.errors), 400
<commit_msg>108536374: Change to a generic message for database errors.
Need a story to handle db exceptions in the dao layer<commit_after>
|
from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error='encountered database error'), 400
else:
return jsonify(form.errors), 400
|
from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error=e.message), 400
else:
return jsonify(form.errors), 400
108536374: Change to a generic message for database errors.
Need a story to handle db exceptions in the dao layerfrom datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error='encountered database error'), 400
else:
return jsonify(form.errors), 400
|
<commit_before>from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error=e.message), 400
else:
return jsonify(form.errors), 400
<commit_msg>108536374: Change to a generic message for database errors.
Need a story to handle db exceptions in the dao layer<commit_after>from datetime import datetime
from flask import render_template, redirect, jsonify
from app.main import main
from app.main.dao import users_dao
from app.main.forms import RegisterUserForm
from app.models import User
@main.route("/register", methods=['GET'])
def render_register():
return render_template('register.html', form=RegisterUserForm())
@main.route('/register', methods=['POST'])
def process_register():
form = RegisterUserForm()
if form.validate_on_submit():
user = User(name=form.name.data,
email_address=form.email_address.data,
mobile_number=form.mobile_number.data,
password=form.password.data,
created_at=datetime.now(),
role_id=1)
try:
users_dao.insert_user(user)
return redirect('/two-factor')
except Exception as e:
return jsonify(database_error='encountered database error'), 400
else:
return jsonify(form.errors), 400
|
045e8b0dabd265e2f8cf0cbf26a5d912345a4533
|
h2o-py/tests/testdir_parser/pyunit_PUBDEV_5705_drop_columns_parser_svmlight_large.py
|
h2o-py/tests/testdir_parser/pyunit_PUBDEV_5705_drop_columns_parser_svmlight_large.py
|
from __future__ import print_function
import sys
sys.path.insert(1, "../../")
import h2o
from tests import pyunit_utils
import os
def test_parser_svmlight_column_skip():
# generate a big frame
nrow = 10000
ncol = 10
seed = 12345
original_frame = h2o.create_frame(rows=nrow, cols=ncol, real_fraction=0.5, integer_fraction=0.5, missing_fraction=0,
has_response=True, seed=seed)
results_path = pyunit_utils.locate("results")
svmfile = os.path.join(results_path, 'out_large.svm')
# write h2o frame to svm format
pyunit_utils.write_H2OFrame_2_SVMLight(svmfile, original_frame)
# check if frame uploaded/imported from svm file is equal to original frame
svm_frame_uploaded = h2o.upload_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded, prob=1, returnResult=True),\
"Frame uploaded from svm file is not the same as original"
svm_frame_imported = h2o.import_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported, prob=1, returnResult=True), \
"Frame imported from svm file is not the same as original"
# test with null skipped_column list
svm_frame_uploaded_skipped_nothing = h2o.upload_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded_skipped_nothing, prob=1,
returnResult=True),\
"Frame uploaded from svm file with empty skipped_columns parameter is not the same as original"
svm_frame_imported_skipped_nothing = h2o.import_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported_skipped_nothing, prob=1,
returnResult=True),\
"Frame imported from svm file with empty skipped_columns parameter is not the same as original"
if __name__ == "__main__":
pyunit_utils.standalone_test(test_parser_svmlight_column_skip)
else:
test_parser_svmlight_column_skip()
|
Fix svm drop columns test - add test that missing skipped_columns or skipped_columns = [] should work correctly and give the same frame as original one
|
Fix svm drop columns test - add test that missing skipped_columns or skipped_columns = [] should work correctly and give the same frame as original one
|
Python
|
apache-2.0
|
h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3
|
Fix svm drop columns test - add test that missing skipped_columns or skipped_columns = [] should work correctly and give the same frame as original one
|
from __future__ import print_function
import sys
sys.path.insert(1, "../../")
import h2o
from tests import pyunit_utils
import os
def test_parser_svmlight_column_skip():
# generate a big frame
nrow = 10000
ncol = 10
seed = 12345
original_frame = h2o.create_frame(rows=nrow, cols=ncol, real_fraction=0.5, integer_fraction=0.5, missing_fraction=0,
has_response=True, seed=seed)
results_path = pyunit_utils.locate("results")
svmfile = os.path.join(results_path, 'out_large.svm')
# write h2o frame to svm format
pyunit_utils.write_H2OFrame_2_SVMLight(svmfile, original_frame)
# check if frame uploaded/imported from svm file is equal to original frame
svm_frame_uploaded = h2o.upload_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded, prob=1, returnResult=True),\
"Frame uploaded from svm file is not the same as original"
svm_frame_imported = h2o.import_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported, prob=1, returnResult=True), \
"Frame imported from svm file is not the same as original"
# test with null skipped_column list
svm_frame_uploaded_skipped_nothing = h2o.upload_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded_skipped_nothing, prob=1,
returnResult=True),\
"Frame uploaded from svm file with empty skipped_columns parameter is not the same as original"
svm_frame_imported_skipped_nothing = h2o.import_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported_skipped_nothing, prob=1,
returnResult=True),\
"Frame imported from svm file with empty skipped_columns parameter is not the same as original"
if __name__ == "__main__":
pyunit_utils.standalone_test(test_parser_svmlight_column_skip)
else:
test_parser_svmlight_column_skip()
|
<commit_before><commit_msg>Fix svm drop columns test - add test that missing skipped_columns or skipped_columns = [] should work correctly and give the same frame as original one<commit_after>
|
from __future__ import print_function
import sys
sys.path.insert(1, "../../")
import h2o
from tests import pyunit_utils
import os
def test_parser_svmlight_column_skip():
# generate a big frame
nrow = 10000
ncol = 10
seed = 12345
original_frame = h2o.create_frame(rows=nrow, cols=ncol, real_fraction=0.5, integer_fraction=0.5, missing_fraction=0,
has_response=True, seed=seed)
results_path = pyunit_utils.locate("results")
svmfile = os.path.join(results_path, 'out_large.svm')
# write h2o frame to svm format
pyunit_utils.write_H2OFrame_2_SVMLight(svmfile, original_frame)
# check if frame uploaded/imported from svm file is equal to original frame
svm_frame_uploaded = h2o.upload_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded, prob=1, returnResult=True),\
"Frame uploaded from svm file is not the same as original"
svm_frame_imported = h2o.import_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported, prob=1, returnResult=True), \
"Frame imported from svm file is not the same as original"
# test with null skipped_column list
svm_frame_uploaded_skipped_nothing = h2o.upload_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded_skipped_nothing, prob=1,
returnResult=True),\
"Frame uploaded from svm file with empty skipped_columns parameter is not the same as original"
svm_frame_imported_skipped_nothing = h2o.import_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported_skipped_nothing, prob=1,
returnResult=True),\
"Frame imported from svm file with empty skipped_columns parameter is not the same as original"
if __name__ == "__main__":
pyunit_utils.standalone_test(test_parser_svmlight_column_skip)
else:
test_parser_svmlight_column_skip()
|
Fix svm drop columns test - add test that missing skipped_columns or skipped_columns = [] should work correctly and give the same frame as original onefrom __future__ import print_function
import sys
sys.path.insert(1, "../../")
import h2o
from tests import pyunit_utils
import os
def test_parser_svmlight_column_skip():
# generate a big frame
nrow = 10000
ncol = 10
seed = 12345
original_frame = h2o.create_frame(rows=nrow, cols=ncol, real_fraction=0.5, integer_fraction=0.5, missing_fraction=0,
has_response=True, seed=seed)
results_path = pyunit_utils.locate("results")
svmfile = os.path.join(results_path, 'out_large.svm')
# write h2o frame to svm format
pyunit_utils.write_H2OFrame_2_SVMLight(svmfile, original_frame)
# check if frame uploaded/imported from svm file is equal to original frame
svm_frame_uploaded = h2o.upload_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded, prob=1, returnResult=True),\
"Frame uploaded from svm file is not the same as original"
svm_frame_imported = h2o.import_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported, prob=1, returnResult=True), \
"Frame imported from svm file is not the same as original"
# test with null skipped_column list
svm_frame_uploaded_skipped_nothing = h2o.upload_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded_skipped_nothing, prob=1,
returnResult=True),\
"Frame uploaded from svm file with empty skipped_columns parameter is not the same as original"
svm_frame_imported_skipped_nothing = h2o.import_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported_skipped_nothing, prob=1,
returnResult=True),\
"Frame imported from svm file with empty skipped_columns parameter is not the same as original"
if __name__ == "__main__":
pyunit_utils.standalone_test(test_parser_svmlight_column_skip)
else:
test_parser_svmlight_column_skip()
|
<commit_before><commit_msg>Fix svm drop columns test - add test that missing skipped_columns or skipped_columns = [] should work correctly and give the same frame as original one<commit_after>from __future__ import print_function
import sys
sys.path.insert(1, "../../")
import h2o
from tests import pyunit_utils
import os
def test_parser_svmlight_column_skip():
# generate a big frame
nrow = 10000
ncol = 10
seed = 12345
original_frame = h2o.create_frame(rows=nrow, cols=ncol, real_fraction=0.5, integer_fraction=0.5, missing_fraction=0,
has_response=True, seed=seed)
results_path = pyunit_utils.locate("results")
svmfile = os.path.join(results_path, 'out_large.svm')
# write h2o frame to svm format
pyunit_utils.write_H2OFrame_2_SVMLight(svmfile, original_frame)
# check if frame uploaded/imported from svm file is equal to original frame
svm_frame_uploaded = h2o.upload_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded, prob=1, returnResult=True),\
"Frame uploaded from svm file is not the same as original"
svm_frame_imported = h2o.import_file(svmfile)
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported, prob=1, returnResult=True), \
"Frame imported from svm file is not the same as original"
# test with null skipped_column list
svm_frame_uploaded_skipped_nothing = h2o.upload_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_uploaded_skipped_nothing, prob=1,
returnResult=True),\
"Frame uploaded from svm file with empty skipped_columns parameter is not the same as original"
svm_frame_imported_skipped_nothing = h2o.import_file(svmfile, skipped_columns=[])
assert pyunit_utils.compare_frames_local_svm(original_frame, svm_frame_imported_skipped_nothing, prob=1,
returnResult=True),\
"Frame imported from svm file with empty skipped_columns parameter is not the same as original"
if __name__ == "__main__":
pyunit_utils.standalone_test(test_parser_svmlight_column_skip)
else:
test_parser_svmlight_column_skip()
|
|
32f4c67cda624f1840b1ab92d1d1afc826f13dd5
|
examples/plot_gmm_pdf.py
|
examples/plot_gmm_pdf.py
|
"""
=================================
Gaussian Mixture Model Ellipsoids
=================================
Plot the confidence ellipsoids of a mixture of two gaussians.
"""
import numpy as np
from scikits.learn import gmm
import itertools
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt
n, m = 300, 2
# generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
X_train = np.r_[np.dot(np.random.randn(n, 2), C),
np.random.randn(n, 2) + np.array([20, 20])]
clf = gmm.GMM(2, cvtype='full')
clf.fit(X_train)
x = np.linspace(-20.0, 30.0)
y = np.linspace(-20.0, 40.0)
X, Y = np.meshgrid(x, y)
XX = np.c_[X.ravel(), Y.ravel()]
Z = np.log(-clf.eval(XX)[0])
Z = Z.reshape(X.shape)
CS = pl.contour(X, Y, Z)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
pl.scatter(X_train[:, 0], X_train[:, 1], .8)
pl.axis('tight')
pl.show()
|
Add an example with probability distribution estimates using GMM.
|
Add an example with probability distribution estimates using GMM.
This is a work in progress. Also, the .eval() function from GMM
might very likely change it's return type in the future.
|
Python
|
bsd-3-clause
|
mayblue9/scikit-learn,qifeigit/scikit-learn,rrohan/scikit-learn,shusenl/scikit-learn,jseabold/scikit-learn,huobaowangxi/scikit-learn,frank-tancf/scikit-learn,shangwuhencc/scikit-learn,Srisai85/scikit-learn,RPGOne/scikit-learn,appapantula/scikit-learn,raghavrv/scikit-learn,hdmetor/scikit-learn,yask123/scikit-learn,xyguo/scikit-learn,costypetrisor/scikit-learn,giorgiop/scikit-learn,nrhine1/scikit-learn,mfjb/scikit-learn,siutanwong/scikit-learn,zorroblue/scikit-learn,Jimmy-Morzaria/scikit-learn,Srisai85/scikit-learn,hainm/scikit-learn,huzq/scikit-learn,michigraber/scikit-learn,Clyde-fare/scikit-learn,ominux/scikit-learn,hainm/scikit-learn,kashif/scikit-learn,abhishekgahlot/scikit-learn,mattilyra/scikit-learn,YinongLong/scikit-learn,IshankGulati/scikit-learn,alexeyum/scikit-learn,AIML/scikit-learn,yunfeilu/scikit-learn,cdegroc/scikit-learn,manhhomienbienthuy/scikit-learn,rsivapr/scikit-learn,ishanic/scikit-learn,aewhatley/scikit-learn,anntzer/scikit-learn,lucidfrontier45/scikit-learn,quheng/scikit-learn,aetilley/scikit-learn,belltailjp/scikit-learn,roxyboy/scikit-learn,ashhher3/scikit-learn,heli522/scikit-learn,mlyundin/scikit-learn,mojoboss/scikit-learn,luo66/scikit-learn,AlexRobson/scikit-learn,kagayakidan/scikit-learn,huzq/scikit-learn,kevin-intel/scikit-learn,smartscheduling/scikit-learn-categorical-tree,manashmndl/scikit-learn,shusenl/scikit-learn,LohithBlaze/scikit-learn,Lawrence-Liu/scikit-learn,jzt5132/scikit-learn,aetilley/scikit-learn,mrshu/scikit-learn,cauchycui/scikit-learn,hitszxp/scikit-learn,xwolf12/scikit-learn,davidgbe/scikit-learn,costypetrisor/scikit-learn,Aasmi/scikit-learn,luo66/scikit-learn,devanshdalal/scikit-learn,f3r/scikit-learn,ZenDevelopmentSystems/scikit-learn,michigraber/scikit-learn,mattilyra/scikit-learn,jmetzen/scikit-learn,dsquareindia/scikit-learn,moutai/scikit-learn,vibhorag/scikit-learn,gclenaghan/scikit-learn,mattilyra/scikit-learn,yask123/scikit-learn,h2educ/scikit-learn,costypetrisor/scikit-learn,gotomypc/scikit-learn,ephes/scikit-learn,jmschrei/scikit-learn,jorik041/scikit-learn,jorik041/scikit-learn,jm-begon/scikit-learn,B3AU/waveTree,rvraghav93/scikit-learn,arjoly/scikit-learn,petosegan/scikit-learn,potash/scikit-learn,shikhardb/scikit-learn,PatrickChrist/scikit-learn,xzh86/scikit-learn,MatthieuBizien/scikit-learn,TomDLT/scikit-learn,thientu/scikit-learn,pv/scikit-learn,heli522/scikit-learn,andaag/scikit-learn,appapantula/scikit-learn,sergeyf/scikit-learn,chrsrds/scikit-learn,sgenoud/scikit-learn,RachitKansal/scikit-learn,betatim/scikit-learn,Barmaley-exe/scikit-learn,JeanKossaifi/scikit-learn,carrillo/scikit-learn,anirudhjayaraman/scikit-learn,ElDeveloper/scikit-learn,bhargav/scikit-learn,frank-tancf/scikit-learn,Windy-Ground/scikit-learn,samuel1208/scikit-learn,jakirkham/scikit-learn,toastedcornflakes/scikit-learn,icdishb/scikit-learn,Lawrence-Liu/scikit-learn,imaculate/scikit-learn,kashif/scikit-learn,smartscheduling/scikit-learn-categorical-tree,mhdella/scikit-learn,eickenberg/scikit-learn,beepee14/scikit-learn,aflaxman/scikit-learn,ldirer/scikit-learn,stylianos-kampakis/scikit-learn,ndingwall/scikit-learn,B3AU/waveTree,Myasuka/scikit-learn,vinayak-mehta/scikit-learn,mugizico/scikit-learn,LiaoPan/scikit-learn,spallavolu/scikit-learn,DonBeo/scikit-learn,thilbern/scikit-learn,ephes/scikit-learn,scikit-learn/scikit-learn,meduz/scikit-learn,tosolveit/scikit-learn,petosegan/scikit-learn,iismd17/scikit-learn,nomadcube/scikit-learn,MechCoder/scikit-learn,pv/scikit-learn,AlexanderFabisch/scikit-learn,etkirsch/scikit-learn,NunoEdgarGub1/scikit-learn,tomlof/scikit-learn,MohammedWasim/scikit-learn,lesteve/scikit-learn,vortex-ape/scikit-learn,madjelan/scikit-learn,tawsifkhan/scikit-learn,schets/scikit-learn,JosmanPS/scikit-learn,liangz0707/scikit-learn,bikong2/scikit-learn,ElDeveloper/scikit-learn,siutanwong/scikit-learn,nesterione/scikit-learn,victorbergelin/scikit-learn,Barmaley-exe/scikit-learn,lucidfrontier45/scikit-learn,altairpearl/scikit-learn,NunoEdgarGub1/scikit-learn,liangz0707/scikit-learn,jayflo/scikit-learn,scikit-learn/scikit-learn,JPFrancoia/scikit-learn,pianomania/scikit-learn,MohammedWasim/scikit-learn,idlead/scikit-learn,cwu2011/scikit-learn,shangwuhencc/scikit-learn,B3AU/waveTree,wazeerzulfikar/scikit-learn,Garrett-R/scikit-learn,henridwyer/scikit-learn,trungnt13/scikit-learn,3manuek/scikit-learn,abhishekkrthakur/scikit-learn,simon-pepin/scikit-learn,rajat1994/scikit-learn,evgchz/scikit-learn,larsmans/scikit-learn,florian-f/sklearn,idlead/scikit-learn,Fireblend/scikit-learn,ky822/scikit-learn,cainiaocome/scikit-learn,kylerbrown/scikit-learn,xwolf12/scikit-learn,shangwuhencc/scikit-learn,costypetrisor/scikit-learn,btabibian/scikit-learn,Achuth17/scikit-learn,PatrickOReilly/scikit-learn,anntzer/scikit-learn,russel1237/scikit-learn,zorroblue/scikit-learn,wzbozon/scikit-learn,tawsifkhan/scikit-learn,qifeigit/scikit-learn,PrashntS/scikit-learn,alexsavio/scikit-learn,AlexRobson/scikit-learn,Myasuka/scikit-learn,evgchz/scikit-learn,PatrickOReilly/scikit-learn,AnasGhrab/scikit-learn,iismd17/scikit-learn,pkruskal/scikit-learn,lenovor/scikit-learn,tosolveit/scikit-learn,massmutual/scikit-learn,anirudhjayaraman/scikit-learn,anurag313/scikit-learn,siutanwong/scikit-learn,liangz0707/scikit-learn,hrjn/scikit-learn,jpautom/scikit-learn,cainiaocome/scikit-learn,idlead/scikit-learn,samzhang111/scikit-learn,hrjn/scikit-learn,AlexanderFabisch/scikit-learn,deepesch/scikit-learn,Garrett-R/scikit-learn,JeanKossaifi/scikit-learn,kaichogami/scikit-learn,samuel1208/scikit-learn,xiaoxiamii/scikit-learn,lenovor/scikit-learn,giorgiop/scikit-learn,aewhatley/scikit-learn,rohanp/scikit-learn,alexeyum/scikit-learn,bigdataelephants/scikit-learn,jm-begon/scikit-learn,yask123/scikit-learn,kylerbrown/scikit-learn,cl4rke/scikit-learn,hitszxp/scikit-learn,abimannans/scikit-learn,schets/scikit-learn,lucidfrontier45/scikit-learn,Djabbz/scikit-learn,victorbergelin/scikit-learn,belltailjp/scikit-learn,mayblue9/scikit-learn,q1ang/scikit-learn,joshloyal/scikit-learn,fzalkow/scikit-learn,shikhardb/scikit-learn,betatim/scikit-learn,xavierwu/scikit-learn,vivekmishra1991/scikit-learn,yonglehou/scikit-learn,dsullivan7/scikit-learn,h2educ/scikit-learn,herilalaina/scikit-learn,xuewei4d/scikit-learn,mugizico/scikit-learn,cdegroc/scikit-learn,ltiao/scikit-learn,jakobworldpeace/scikit-learn,OshynSong/scikit-learn,yanlend/scikit-learn,fabianp/scikit-learn,nvoron23/scikit-learn,aminert/scikit-learn,nesterione/scikit-learn,jpautom/scikit-learn,ilo10/scikit-learn,anurag313/scikit-learn,RachitKansal/scikit-learn,voxlol/scikit-learn,ClimbsRocks/scikit-learn,justincassidy/scikit-learn,jjx02230808/project0223,vivekmishra1991/scikit-learn,jm-begon/scikit-learn,ogrisel/scikit-learn,thientu/scikit-learn,hugobowne/scikit-learn,sinhrks/scikit-learn,ngoix/OCRF,murali-munna/scikit-learn,pompiduskus/scikit-learn,LohithBlaze/scikit-learn,elkingtonmcb/scikit-learn,roxyboy/scikit-learn,RayMick/scikit-learn,kjung/scikit-learn,jmschrei/scikit-learn,bigdataelephants/scikit-learn,CforED/Machine-Learning,hrjn/scikit-learn,wlamond/scikit-learn,cwu2011/scikit-learn,aewhatley/scikit-learn,aminert/scikit-learn,shusenl/scikit-learn,lazywei/scikit-learn,ChanderG/scikit-learn,krez13/scikit-learn,icdishb/scikit-learn,shyamalschandra/scikit-learn,MartinDelzant/scikit-learn,f3r/scikit-learn,scikit-learn/scikit-learn,fengzhyuan/scikit-learn,JeanKossaifi/scikit-learn,yunfeilu/scikit-learn,jakirkham/scikit-learn,ankurankan/scikit-learn,gotomypc/scikit-learn,yyjiang/scikit-learn,icdishb/scikit-learn,davidgbe/scikit-learn,robbymeals/scikit-learn,potash/scikit-learn,rahuldhote/scikit-learn,mfjb/scikit-learn,q1ang/scikit-learn,nesterione/scikit-learn,raghavrv/scikit-learn,yonglehou/scikit-learn,aetilley/scikit-learn,ashhher3/scikit-learn,zorroblue/scikit-learn,smartscheduling/scikit-learn-categorical-tree,joernhees/scikit-learn,robin-lai/scikit-learn,shahankhatch/scikit-learn,glennq/scikit-learn,jmschrei/scikit-learn,maheshakya/scikit-learn,arahuja/scikit-learn,IssamLaradji/scikit-learn,hlin117/scikit-learn,cl4rke/scikit-learn,larsmans/scikit-learn,hainm/scikit-learn,fabioticconi/scikit-learn,mehdidc/scikit-learn,YinongLong/scikit-learn,samzhang111/scikit-learn,billy-inn/scikit-learn,pypot/scikit-learn,LiaoPan/scikit-learn,arabenjamin/scikit-learn,billy-inn/scikit-learn,adamgreenhall/scikit-learn,nikitasingh981/scikit-learn,imaculate/scikit-learn,ilyes14/scikit-learn,vybstat/scikit-learn,alexsavio/scikit-learn,simon-pepin/scikit-learn,AnasGhrab/scikit-learn,terkkila/scikit-learn,espg/scikit-learn,mrshu/scikit-learn,poryfly/scikit-learn,HolgerPeters/scikit-learn,gclenaghan/scikit-learn,fzalkow/scikit-learn,Obus/scikit-learn,zhenv5/scikit-learn,mxjl620/scikit-learn,billy-inn/scikit-learn,wzbozon/scikit-learn,liberatorqjw/scikit-learn,Sentient07/scikit-learn,Adai0808/scikit-learn,TomDLT/scikit-learn,fabioticconi/scikit-learn,marcocaccin/scikit-learn,jakobworldpeace/scikit-learn,djgagne/scikit-learn,cybernet14/scikit-learn,466152112/scikit-learn,equialgo/scikit-learn,CVML/scikit-learn,ishanic/scikit-learn,vermouthmjl/scikit-learn,0asa/scikit-learn,hsiaoyi0504/scikit-learn,MartinSavc/scikit-learn,robbymeals/scikit-learn,MartinSavc/scikit-learn,xavierwu/scikit-learn,mehdidc/scikit-learn,loli/semisupervisedforests,chrisburr/scikit-learn,gotomypc/scikit-learn,vybstat/scikit-learn,dhruv13J/scikit-learn,meduz/scikit-learn,rajat1994/scikit-learn,belltailjp/scikit-learn,glouppe/scikit-learn,Titan-C/scikit-learn,RayMick/scikit-learn,btabibian/scikit-learn,ycaihua/scikit-learn,kagayakidan/scikit-learn,bthirion/scikit-learn,pkruskal/scikit-learn,krez13/scikit-learn,MechCoder/scikit-learn,0asa/scikit-learn,sonnyhu/scikit-learn,justincassidy/scikit-learn,liyu1990/sklearn,voxlol/scikit-learn,pratapvardhan/scikit-learn,mhue/scikit-learn,ky822/scikit-learn,shyamalschandra/scikit-learn,bthirion/scikit-learn,rajat1994/scikit-learn,vshtanko/scikit-learn,michigraber/scikit-learn,equialgo/scikit-learn,MatthieuBizien/scikit-learn,hsiaoyi0504/scikit-learn,loli/semisupervisedforests,CVML/scikit-learn,andaag/scikit-learn,pv/scikit-learn,massmutual/scikit-learn,kmike/scikit-learn,IndraVikas/scikit-learn,terkkila/scikit-learn,bnaul/scikit-learn,waterponey/scikit-learn,rishikksh20/scikit-learn,saiwing-yeung/scikit-learn,aabadie/scikit-learn,mwv/scikit-learn,TomDLT/scikit-learn,huzq/scikit-learn,zorojean/scikit-learn,MartinDelzant/scikit-learn,eg-zhang/scikit-learn,spallavolu/scikit-learn,liyu1990/sklearn,shahankhatch/scikit-learn,ngoix/OCRF,Adai0808/scikit-learn,mayblue9/scikit-learn,jereze/scikit-learn,lin-credible/scikit-learn,manhhomienbienthuy/scikit-learn,terkkila/scikit-learn,jzt5132/scikit-learn,tdhopper/scikit-learn,dsullivan7/scikit-learn,mwv/scikit-learn,LiaoPan/scikit-learn,djgagne/scikit-learn,akionakamura/scikit-learn,Vimos/scikit-learn,jorik041/scikit-learn,r-mart/scikit-learn,herilalaina/scikit-learn,hugobowne/scikit-learn,DonBeo/scikit-learn,HolgerPeters/scikit-learn,ClimbsRocks/scikit-learn,jlegendary/scikit-learn,zuku1985/scikit-learn,aetilley/scikit-learn,stylianos-kampakis/scikit-learn,dingocuster/scikit-learn,huobaowangxi/scikit-learn,yanlend/scikit-learn,espg/scikit-learn,huzq/scikit-learn,ky822/scikit-learn,chrsrds/scikit-learn,equialgo/scikit-learn,xyguo/scikit-learn,cybernet14/scikit-learn,cainiaocome/scikit-learn,loli/sklearn-ensembletrees,maheshakya/scikit-learn,CforED/Machine-Learning,hsuantien/scikit-learn,mikebenfield/scikit-learn,fabioticconi/scikit-learn,IndraVikas/scikit-learn,hdmetor/scikit-learn,mhue/scikit-learn,xyguo/scikit-learn,untom/scikit-learn,chrisburr/scikit-learn,466152112/scikit-learn,AlexandreAbraham/scikit-learn,jayflo/scikit-learn,fyffyt/scikit-learn,tmhm/scikit-learn,poryfly/scikit-learn,BiaDarkia/scikit-learn,pkruskal/scikit-learn,jakobworldpeace/scikit-learn,yunfeilu/scikit-learn,PatrickChrist/scikit-learn,justincassidy/scikit-learn,ningchi/scikit-learn,procoder317/scikit-learn,RPGOne/scikit-learn,kjung/scikit-learn,Myasuka/scikit-learn,zaxtax/scikit-learn,vshtanko/scikit-learn,arahuja/scikit-learn,CVML/scikit-learn,abhishekkrthakur/scikit-learn,lenovor/scikit-learn,ldirer/scikit-learn,mjudsp/Tsallis,ElDeveloper/scikit-learn,fengzhyuan/scikit-learn,Clyde-fare/scikit-learn,RachitKansal/scikit-learn,MatthieuBizien/scikit-learn,anirudhjayaraman/scikit-learn,hsuantien/scikit-learn,pypot/scikit-learn,ahoyosid/scikit-learn,nhejazi/scikit-learn,lazywei/scikit-learn,cybernet14/scikit-learn,henrykironde/scikit-learn,ndingwall/scikit-learn,Obus/scikit-learn,jpautom/scikit-learn,heli522/scikit-learn,fabianp/scikit-learn,henridwyer/scikit-learn,henrykironde/scikit-learn,samuel1208/scikit-learn,plissonf/scikit-learn,lucidfrontier45/scikit-learn,0x0all/scikit-learn,DonBeo/scikit-learn,ChanChiChoi/scikit-learn,etkirsch/scikit-learn,robbymeals/scikit-learn,cauchycui/scikit-learn,rahuldhote/scikit-learn,glennq/scikit-learn,ahoyosid/scikit-learn,MatthieuBizien/scikit-learn,jjx02230808/project0223,jblackburne/scikit-learn,ZenDevelopmentSystems/scikit-learn,jaidevd/scikit-learn,dsquareindia/scikit-learn,IshankGulati/scikit-learn,ssaeger/scikit-learn,ivannz/scikit-learn,untom/scikit-learn,ky822/scikit-learn,nrhine1/scikit-learn,nhejazi/scikit-learn,kylerbrown/scikit-learn,espg/scikit-learn,khkaminska/scikit-learn,rexshihaoren/scikit-learn,glouppe/scikit-learn,wlamond/scikit-learn,bnaul/scikit-learn,arjoly/scikit-learn,shenzebang/scikit-learn,florian-f/sklearn,cdegroc/scikit-learn,toastedcornflakes/scikit-learn,beepee14/scikit-learn,treycausey/scikit-learn,tmhm/scikit-learn,mattgiguere/scikit-learn,eickenberg/scikit-learn,Nyker510/scikit-learn,procoder317/scikit-learn,untom/scikit-learn,PrashntS/scikit-learn,AIML/scikit-learn,macks22/scikit-learn,Garrett-R/scikit-learn,etkirsch/scikit-learn,eickenberg/scikit-learn,Achuth17/scikit-learn,sanketloke/scikit-learn,amueller/scikit-learn,wanggang3333/scikit-learn,murali-munna/scikit-learn,ankurankan/scikit-learn,yanlend/scikit-learn,nikitasingh981/scikit-learn,RPGOne/scikit-learn,f3r/scikit-learn,DSLituiev/scikit-learn,OshynSong/scikit-learn,ahoyosid/scikit-learn,ningchi/scikit-learn,jorge2703/scikit-learn,rohanp/scikit-learn,jakobworldpeace/scikit-learn,jzt5132/scikit-learn,raghavrv/scikit-learn,rajat1994/scikit-learn,Vimos/scikit-learn,khkaminska/scikit-learn,abimannans/scikit-learn,wlamond/scikit-learn,MartinDelzant/scikit-learn,treycausey/scikit-learn,ZENGXH/scikit-learn,0asa/scikit-learn,ChanChiChoi/scikit-learn,vibhorag/scikit-learn,Barmaley-exe/scikit-learn,ogrisel/scikit-learn,bikong2/scikit-learn,chrsrds/scikit-learn,rishikksh20/scikit-learn,nikitasingh981/scikit-learn,murali-munna/scikit-learn,sergeyf/scikit-learn,rahul-c1/scikit-learn,kevin-intel/scikit-learn,bigdataelephants/scikit-learn,tosolveit/scikit-learn,nvoron23/scikit-learn,btabibian/scikit-learn,arabenjamin/scikit-learn,devanshdalal/scikit-learn,abhishekgahlot/scikit-learn,vybstat/scikit-learn,moutai/scikit-learn,hitszxp/scikit-learn,phdowling/scikit-learn,3manuek/scikit-learn,jereze/scikit-learn,pythonvietnam/scikit-learn,mattilyra/scikit-learn,arjoly/scikit-learn,jkarnows/scikit-learn,ominux/scikit-learn,JsNoNo/scikit-learn,zihua/scikit-learn,Jimmy-Morzaria/scikit-learn,schets/scikit-learn,rrohan/scikit-learn,dhruv13J/scikit-learn,saiwing-yeung/scikit-learn,loli/sklearn-ensembletrees,xwolf12/scikit-learn,AlexandreAbraham/scikit-learn,iismd17/scikit-learn,adamgreenhall/scikit-learn,zhenv5/scikit-learn,ephes/scikit-learn,nesterione/scikit-learn,ltiao/scikit-learn,bnaul/scikit-learn,altairpearl/scikit-learn,abhishekgahlot/scikit-learn,0x0all/scikit-learn,manhhomienbienthuy/scikit-learn,Sentient07/scikit-learn,depet/scikit-learn,maheshakya/scikit-learn,larsmans/scikit-learn,nomadcube/scikit-learn,rahul-c1/scikit-learn,rahul-c1/scikit-learn,hlin117/scikit-learn,AlexandreAbraham/scikit-learn,ChanChiChoi/scikit-learn,quheng/scikit-learn,eickenberg/scikit-learn,mayblue9/scikit-learn,quheng/scikit-learn,themrmax/scikit-learn,hugobowne/scikit-learn,lbishal/scikit-learn,Windy-Ground/scikit-learn,loli/sklearn-ensembletrees,h2educ/scikit-learn,abimannans/scikit-learn,depet/scikit-learn,mjgrav2001/scikit-learn,HolgerPeters/scikit-learn,glemaitre/scikit-learn,LiaoPan/scikit-learn,theoryno3/scikit-learn,vortex-ape/scikit-learn,thientu/scikit-learn,rsivapr/scikit-learn,PrashntS/scikit-learn,ningchi/scikit-learn,Garrett-R/scikit-learn,jzt5132/scikit-learn,dingocuster/scikit-learn,olologin/scikit-learn,LohithBlaze/scikit-learn,bhargav/scikit-learn,wzbozon/scikit-learn,xuewei4d/scikit-learn,ndingwall/scikit-learn,fabioticconi/scikit-learn,pythonvietnam/scikit-learn,marcocaccin/scikit-learn,NunoEdgarGub1/scikit-learn,glennq/scikit-learn,florian-f/sklearn,mjgrav2001/scikit-learn,mikebenfield/scikit-learn,mjgrav2001/scikit-learn,Fireblend/scikit-learn,UNR-AERIAL/scikit-learn,sinhrks/scikit-learn,lin-credible/scikit-learn,arahuja/scikit-learn,arabenjamin/scikit-learn,LohithBlaze/scikit-learn,ephes/scikit-learn,liberatorqjw/scikit-learn,sarahgrogan/scikit-learn,jblackburne/scikit-learn,stylianos-kampakis/scikit-learn,jkarnows/scikit-learn,pv/scikit-learn,bthirion/scikit-learn,IssamLaradji/scikit-learn,hdmetor/scikit-learn,mehdidc/scikit-learn,mblondel/scikit-learn,mattilyra/scikit-learn,Windy-Ground/scikit-learn,ngoix/OCRF,NunoEdgarGub1/scikit-learn,pypot/scikit-learn,rsivapr/scikit-learn,lesteve/scikit-learn,yonglehou/scikit-learn,ngoix/OCRF,vybstat/scikit-learn,amueller/scikit-learn,florian-f/sklearn,robin-lai/scikit-learn,nmayorov/scikit-learn,trankmichael/scikit-learn,Akshay0724/scikit-learn,nelson-liu/scikit-learn,lazywei/scikit-learn,wlamond/scikit-learn,akionakamura/scikit-learn,mwv/scikit-learn,petosegan/scikit-learn,hlin117/scikit-learn,shenzebang/scikit-learn,clemkoa/scikit-learn,PatrickChrist/scikit-learn,CforED/Machine-Learning,ycaihua/scikit-learn,RomainBrault/scikit-learn,JosmanPS/scikit-learn,zuku1985/scikit-learn,mhdella/scikit-learn,sergeyf/scikit-learn,manashmndl/scikit-learn,djgagne/scikit-learn,eickenberg/scikit-learn,joshloyal/scikit-learn,mfjb/scikit-learn,massmutual/scikit-learn,russel1237/scikit-learn,bnaul/scikit-learn,pratapvardhan/scikit-learn,vigilv/scikit-learn,vshtanko/scikit-learn,saiwing-yeung/scikit-learn,pianomania/scikit-learn,hainm/scikit-learn,ashhher3/scikit-learn,frank-tancf/scikit-learn,yunfeilu/scikit-learn,kjung/scikit-learn,NelisVerhoef/scikit-learn,Barmaley-exe/scikit-learn,0x0all/scikit-learn,pnedunuri/scikit-learn,alexsavio/scikit-learn,evgchz/scikit-learn,RomainBrault/scikit-learn,Djabbz/scikit-learn,ChanderG/scikit-learn,liberatorqjw/scikit-learn,akionakamura/scikit-learn,sgenoud/scikit-learn,sumspr/scikit-learn,q1ang/scikit-learn,rexshihaoren/scikit-learn,shikhardb/scikit-learn,YinongLong/scikit-learn,sanketloke/scikit-learn,ishanic/scikit-learn,shikhardb/scikit-learn,sarahgrogan/scikit-learn,xzh86/scikit-learn,tomlof/scikit-learn,marcocaccin/scikit-learn,nelson-liu/scikit-learn,yanlend/scikit-learn,devanshdalal/scikit-learn,equialgo/scikit-learn,chrisburr/scikit-learn,ssaeger/scikit-learn,ClimbsRocks/scikit-learn,tdhopper/scikit-learn,henridwyer/scikit-learn,sumspr/scikit-learn,jlegendary/scikit-learn,frank-tancf/scikit-learn,Nyker510/scikit-learn,iismd17/scikit-learn,xyguo/scikit-learn,quheng/scikit-learn,yyjiang/scikit-learn,Myasuka/scikit-learn,466152112/scikit-learn,arahuja/scikit-learn,tawsifkhan/scikit-learn,MohammedWasim/scikit-learn,spallavolu/scikit-learn,shenzebang/scikit-learn,marcocaccin/scikit-learn,anurag313/scikit-learn,deepesch/scikit-learn,Aasmi/scikit-learn,justincassidy/scikit-learn,HolgerPeters/scikit-learn,olologin/scikit-learn,toastedcornflakes/scikit-learn,cauchycui/scikit-learn,ilyes14/scikit-learn,trungnt13/scikit-learn,cl4rke/scikit-learn,maheshakya/scikit-learn,untom/scikit-learn,xubenben/scikit-learn,jseabold/scikit-learn,aewhatley/scikit-learn,adamgreenhall/scikit-learn,macks22/scikit-learn,shahankhatch/scikit-learn,BiaDarkia/scikit-learn,Lawrence-Liu/scikit-learn,sinhrks/scikit-learn,kmike/scikit-learn,liyu1990/sklearn,pythonvietnam/scikit-learn,luo66/scikit-learn,DSLituiev/scikit-learn,themrmax/scikit-learn,xubenben/scikit-learn,samzhang111/scikit-learn,xiaoxiamii/scikit-learn,Achuth17/scikit-learn,ominux/scikit-learn,pratapvardhan/scikit-learn,luo66/scikit-learn,NelisVerhoef/scikit-learn,sanketloke/scikit-learn,anntzer/scikit-learn,AlexandreAbraham/scikit-learn,maheshakya/scikit-learn,thilbern/scikit-learn,elkingtonmcb/scikit-learn,abhishekgahlot/scikit-learn,mjudsp/Tsallis,zhenv5/scikit-learn,vermouthmjl/scikit-learn,voxlol/scikit-learn,wanggang3333/scikit-learn,andaag/scikit-learn,hsuantien/scikit-learn,glennq/scikit-learn,sonnyhu/scikit-learn,kagayakidan/scikit-learn,kashif/scikit-learn,themrmax/scikit-learn,wanggang3333/scikit-learn,vermouthmjl/scikit-learn,abhishekkrthakur/scikit-learn,nelson-liu/scikit-learn,aabadie/scikit-learn,liyu1990/sklearn,IshankGulati/scikit-learn,evgchz/scikit-learn,anirudhjayaraman/scikit-learn,zorojean/scikit-learn,carrillo/scikit-learn,deepesch/scikit-learn,thilbern/scikit-learn,treycausey/scikit-learn,fyffyt/scikit-learn,jorge2703/scikit-learn,aflaxman/scikit-learn,manashmndl/scikit-learn,xzh86/scikit-learn,rvraghav93/scikit-learn,sarahgrogan/scikit-learn,spallavolu/scikit-learn,ivannz/scikit-learn,fredhusser/scikit-learn,trankmichael/scikit-learn,henrykironde/scikit-learn,AnasGhrab/scikit-learn,jlegendary/scikit-learn,IssamLaradji/scikit-learn,ahoyosid/scikit-learn,anurag313/scikit-learn,akionakamura/scikit-learn,poryfly/scikit-learn,nhejazi/scikit-learn,gotomypc/scikit-learn,alexeyum/scikit-learn,mrshu/scikit-learn,ssaeger/scikit-learn,yask123/scikit-learn,pratapvardhan/scikit-learn,phdowling/scikit-learn,mattgiguere/scikit-learn,elkingtonmcb/scikit-learn,ZenDevelopmentSystems/scikit-learn,fzalkow/scikit-learn,larsmans/scikit-learn,hitszxp/scikit-learn,dsquareindia/scikit-learn,henridwyer/scikit-learn,jlegendary/scikit-learn,cdegroc/scikit-learn,fredhusser/scikit-learn,rrohan/scikit-learn,Achuth17/scikit-learn,joernhees/scikit-learn,nrhine1/scikit-learn,florian-f/sklearn,sarahgrogan/scikit-learn,RomainBrault/scikit-learn,mwv/scikit-learn,jblackburne/scikit-learn,JeanKossaifi/scikit-learn,russel1237/scikit-learn,imaculate/scikit-learn,yyjiang/scikit-learn,sergeyf/scikit-learn,jseabold/scikit-learn,procoder317/scikit-learn,potash/scikit-learn,adamgreenhall/scikit-learn,3manuek/scikit-learn,trungnt13/scikit-learn,andaag/scikit-learn,olologin/scikit-learn,Akshay0724/scikit-learn,kashif/scikit-learn,phdowling/scikit-learn,hsuantien/scikit-learn,fredhusser/scikit-learn,hugobowne/scikit-learn,IndraVikas/scikit-learn,B3AU/waveTree,jmschrei/scikit-learn,ngoix/OCRF,anntzer/scikit-learn,JsNoNo/scikit-learn,djgagne/scikit-learn,jaidevd/scikit-learn,glemaitre/scikit-learn,kevin-intel/scikit-learn,yonglehou/scikit-learn,ycaihua/scikit-learn,treycausey/scikit-learn,jaidevd/scikit-learn,fengzhyuan/scikit-learn,themrmax/scikit-learn,Adai0808/scikit-learn,nhejazi/scikit-learn,shyamalschandra/scikit-learn,jorge2703/scikit-learn,petosegan/scikit-learn,harshaneelhg/scikit-learn,jpautom/scikit-learn,joernhees/scikit-learn,procoder317/scikit-learn,clemkoa/scikit-learn,tmhm/scikit-learn,devanshdalal/scikit-learn,kjung/scikit-learn,mlyundin/scikit-learn,wazeerzulfikar/scikit-learn,glemaitre/scikit-learn,CVML/scikit-learn,ldirer/scikit-learn,rahul-c1/scikit-learn,kaichogami/scikit-learn,thientu/scikit-learn,mxjl620/scikit-learn,Aasmi/scikit-learn,bigdataelephants/scikit-learn,mojoboss/scikit-learn,huobaowangxi/scikit-learn,mblondel/scikit-learn,mojoboss/scikit-learn,JPFrancoia/scikit-learn,Vimos/scikit-learn,joshloyal/scikit-learn,jakirkham/scikit-learn,TomDLT/scikit-learn,lenovor/scikit-learn,michigraber/scikit-learn,ashhher3/scikit-learn,BiaDarkia/scikit-learn,larsmans/scikit-learn,Obus/scikit-learn,russel1237/scikit-learn,NelisVerhoef/scikit-learn,elkingtonmcb/scikit-learn,pnedunuri/scikit-learn,cwu2011/scikit-learn,espg/scikit-learn,ZenDevelopmentSystems/scikit-learn,bikong2/scikit-learn,rishikksh20/scikit-learn,ZENGXH/scikit-learn,walterreade/scikit-learn,harshaneelhg/scikit-learn,schets/scikit-learn,appapantula/scikit-learn,raghavrv/scikit-learn,vinayak-mehta/scikit-learn,eg-zhang/scikit-learn,ankurankan/scikit-learn,lbishal/scikit-learn,MechCoder/scikit-learn,vigilv/scikit-learn,macks22/scikit-learn,mrshu/scikit-learn,theoryno3/scikit-learn,billy-inn/scikit-learn,nomadcube/scikit-learn,kagayakidan/scikit-learn,trungnt13/scikit-learn,lin-credible/scikit-learn,rohanp/scikit-learn,lesteve/scikit-learn,voxlol/scikit-learn,Fireblend/scikit-learn,cwu2011/scikit-learn,fzalkow/scikit-learn,fredhusser/scikit-learn,zorojean/scikit-learn,plissonf/scikit-learn,Srisai85/scikit-learn,saiwing-yeung/scikit-learn,jjx02230808/project0223,lin-credible/scikit-learn,sumspr/scikit-learn,mugizico/scikit-learn,joernhees/scikit-learn,manashmndl/scikit-learn,vshtanko/scikit-learn,khkaminska/scikit-learn,tmhm/scikit-learn,Garrett-R/scikit-learn,alexsavio/scikit-learn,zihua/scikit-learn,sanketloke/scikit-learn,fbagirov/scikit-learn,nmayorov/scikit-learn,pnedunuri/scikit-learn,walterreade/scikit-learn,btabibian/scikit-learn,chrsrds/scikit-learn,rvraghav93/scikit-learn,shahankhatch/scikit-learn,jereze/scikit-learn,dsullivan7/scikit-learn,olologin/scikit-learn,victorbergelin/scikit-learn,AlexRobson/scikit-learn,xzh86/scikit-learn,plissonf/scikit-learn,massmutual/scikit-learn,xuewei4d/scikit-learn,ivannz/scikit-learn,robbymeals/scikit-learn,beepee14/scikit-learn,vermouthmjl/scikit-learn,nikitasingh981/scikit-learn,potash/scikit-learn,aabadie/scikit-learn,Titan-C/scikit-learn,gclenaghan/scikit-learn,h2educ/scikit-learn,zaxtax/scikit-learn,ilyes14/scikit-learn,mjudsp/Tsallis,jereze/scikit-learn,0x0all/scikit-learn,zuku1985/scikit-learn,vigilv/scikit-learn,tdhopper/scikit-learn,harshaneelhg/scikit-learn,sgenoud/scikit-learn,nvoron23/scikit-learn,mjudsp/Tsallis,ycaihua/scikit-learn,pompiduskus/scikit-learn,jseabold/scikit-learn,abhishekkrthakur/scikit-learn,Titan-C/scikit-learn,robin-lai/scikit-learn,r-mart/scikit-learn,ominux/scikit-learn,vigilv/scikit-learn,davidgbe/scikit-learn,ilyes14/scikit-learn,ldirer/scikit-learn,loli/semisupervisedforests,dhruv13J/scikit-learn,JPFrancoia/scikit-learn,dsullivan7/scikit-learn,nomadcube/scikit-learn,DSLituiev/scikit-learn,aflaxman/scikit-learn,fabianp/scikit-learn,glemaitre/scikit-learn,cl4rke/scikit-learn,poryfly/scikit-learn,ltiao/scikit-learn,mehdidc/scikit-learn,aminert/scikit-learn,glouppe/scikit-learn,zuku1985/scikit-learn,depet/scikit-learn,mattgiguere/scikit-learn,alvarofierroclavero/scikit-learn,zorojean/scikit-learn,ningchi/scikit-learn,walterreade/scikit-learn,samuel1208/scikit-learn,rexshihaoren/scikit-learn,q1ang/scikit-learn,madjelan/scikit-learn,macks22/scikit-learn,fyffyt/scikit-learn,jblackburne/scikit-learn,roxyboy/scikit-learn,loli/sklearn-ensembletrees,gclenaghan/scikit-learn,mhdella/scikit-learn,hrjn/scikit-learn,vinayak-mehta/scikit-learn,jjx02230808/project0223,ilo10/scikit-learn,lesteve/scikit-learn,DonBeo/scikit-learn,evgchz/scikit-learn,alexeyum/scikit-learn,Jimmy-Morzaria/scikit-learn,nvoron23/scikit-learn,depet/scikit-learn,jakirkham/scikit-learn,jkarnows/scikit-learn,pianomania/scikit-learn,cauchycui/scikit-learn,IssamLaradji/scikit-learn,treycausey/scikit-learn,zorroblue/scikit-learn,pnedunuri/scikit-learn,liangz0707/scikit-learn,qifeigit/scikit-learn,PatrickOReilly/scikit-learn,xiaoxiamii/scikit-learn,zaxtax/scikit-learn,MartinSavc/scikit-learn,zihua/scikit-learn,ChanderG/scikit-learn,MartinSavc/scikit-learn,jayflo/scikit-learn,ishanic/scikit-learn,phdowling/scikit-learn,vortex-ape/scikit-learn,herilalaina/scikit-learn,shangwuhencc/scikit-learn,ilo10/scikit-learn,qifeigit/scikit-learn,Sentient07/scikit-learn,xwolf12/scikit-learn,moutai/scikit-learn,shusenl/scikit-learn,0asa/scikit-learn,andrewnc/scikit-learn,dhruv13J/scikit-learn,kevin-intel/scikit-learn,jaidevd/scikit-learn,pompiduskus/scikit-learn,mrshu/scikit-learn,rsivapr/scikit-learn,moutai/scikit-learn,chrisburr/scikit-learn,JsNoNo/scikit-learn,Clyde-fare/scikit-learn,mattgiguere/scikit-learn,nrhine1/scikit-learn,DSLituiev/scikit-learn,ltiao/scikit-learn,ChanderG/scikit-learn,simon-pepin/scikit-learn,Akshay0724/scikit-learn,mfjb/scikit-learn,fabianp/scikit-learn,xiaoxiamii/scikit-learn,ogrisel/scikit-learn,simon-pepin/scikit-learn,altairpearl/scikit-learn,sgenoud/scikit-learn,Titan-C/scikit-learn,smartscheduling/scikit-learn-categorical-tree,vinayak-mehta/scikit-learn,pianomania/scikit-learn,trankmichael/scikit-learn,Obus/scikit-learn,ndingwall/scikit-learn,OshynSong/scikit-learn,RPGOne/scikit-learn,tomlof/scikit-learn,JsNoNo/scikit-learn,YinongLong/scikit-learn,tawsifkhan/scikit-learn,aflaxman/scikit-learn,idlead/scikit-learn,UNR-AERIAL/scikit-learn,andrewnc/scikit-learn,Lawrence-Liu/scikit-learn,mblondel/scikit-learn,madjelan/scikit-learn,BiaDarkia/scikit-learn,trankmichael/scikit-learn,nelson-liu/scikit-learn,wzbozon/scikit-learn,robin-lai/scikit-learn,Adai0808/scikit-learn,ZENGXH/scikit-learn,bhargav/scikit-learn,AnasGhrab/scikit-learn,Nyker510/scikit-learn,PatrickChrist/scikit-learn,bthirion/scikit-learn,kaichogami/scikit-learn,eg-zhang/scikit-learn,davidgbe/scikit-learn,liberatorqjw/scikit-learn,abimannans/scikit-learn,altairpearl/scikit-learn,krez13/scikit-learn,icdishb/scikit-learn,rishikksh20/scikit-learn,Fireblend/scikit-learn,zihua/scikit-learn,fyffyt/scikit-learn,xubenben/scikit-learn,AlexanderFabisch/scikit-learn,jorik041/scikit-learn,PrashntS/scikit-learn,UNR-AERIAL/scikit-learn,loli/sklearn-ensembletrees,abhishekgahlot/scikit-learn,RayMick/scikit-learn,ClimbsRocks/scikit-learn,samzhang111/scikit-learn,krez13/scikit-learn,wanggang3333/scikit-learn,depet/scikit-learn,r-mart/scikit-learn,arjoly/scikit-learn,giorgiop/scikit-learn,roxyboy/scikit-learn,madjelan/scikit-learn,pompiduskus/scikit-learn,mojoboss/scikit-learn,zaxtax/scikit-learn,mhue/scikit-learn,terkkila/scikit-learn,IshankGulati/scikit-learn,rohanp/scikit-learn,stylianos-kampakis/scikit-learn,andrewnc/scikit-learn,dingocuster/scikit-learn,theoryno3/scikit-learn,Nyker510/scikit-learn,yyjiang/scikit-learn,xavierwu/scikit-learn,hdmetor/scikit-learn,shenzebang/scikit-learn,ycaihua/scikit-learn,glouppe/scikit-learn,jmetzen/scikit-learn,Vimos/scikit-learn,jkarnows/scikit-learn,waterponey/scikit-learn,plissonf/scikit-learn,etkirsch/scikit-learn,alvarofierroclavero/scikit-learn,rrohan/scikit-learn,meduz/scikit-learn,mjgrav2001/scikit-learn,carrillo/scikit-learn,466152112/scikit-learn,lucidfrontier45/scikit-learn,jm-begon/scikit-learn,nmayorov/scikit-learn,xavierwu/scikit-learn,wazeerzulfikar/scikit-learn,mhdella/scikit-learn,NelisVerhoef/scikit-learn,walterreade/scikit-learn,dsquareindia/scikit-learn,ssaeger/scikit-learn,Djabbz/scikit-learn,mikebenfield/scikit-learn,lbishal/scikit-learn,nmayorov/scikit-learn,ngoix/OCRF,cybernet14/scikit-learn,xubenben/scikit-learn,bhargav/scikit-learn,Windy-Ground/scikit-learn,dingocuster/scikit-learn,beepee14/scikit-learn,ChanChiChoi/scikit-learn,mugizico/scikit-learn,Djabbz/scikit-learn,tosolveit/scikit-learn,waterponey/scikit-learn,AIML/scikit-learn,mikebenfield/scikit-learn,rexshihaoren/scikit-learn,jmetzen/scikit-learn,wazeerzulfikar/scikit-learn,lazywei/scikit-learn,sonnyhu/scikit-learn,mblondel/scikit-learn,shyamalschandra/scikit-learn,xuewei4d/scikit-learn,ivannz/scikit-learn,hsiaoyi0504/scikit-learn,aminert/scikit-learn,kmike/scikit-learn,AlexRobson/scikit-learn,vortex-ape/scikit-learn,JosmanPS/scikit-learn,alvarofierroclavero/scikit-learn,pypot/scikit-learn,giorgiop/scikit-learn,harshaneelhg/scikit-learn,RachitKansal/scikit-learn,ankurankan/scikit-learn,scikit-learn/scikit-learn,herilalaina/scikit-learn,ElDeveloper/scikit-learn,betatim/scikit-learn,AlexanderFabisch/scikit-learn,betatim/scikit-learn,CforED/Machine-Learning,0asa/scikit-learn,jayflo/scikit-learn,meduz/scikit-learn,kmike/scikit-learn,rsivapr/scikit-learn,belltailjp/scikit-learn,eg-zhang/scikit-learn,jorge2703/scikit-learn,thilbern/scikit-learn,Clyde-fare/scikit-learn,vibhorag/scikit-learn,mjudsp/Tsallis,fengzhyuan/scikit-learn,hlin117/scikit-learn,RayMick/scikit-learn,Srisai85/scikit-learn,fbagirov/scikit-learn,ogrisel/scikit-learn,MartinDelzant/scikit-learn,tomlof/scikit-learn,Sentient07/scikit-learn,UNR-AERIAL/scikit-learn,clemkoa/scikit-learn,sonnyhu/scikit-learn,bikong2/scikit-learn,murali-munna/scikit-learn,sgenoud/scikit-learn,heli522/scikit-learn,vibhorag/scikit-learn,Jimmy-Morzaria/scikit-learn,amueller/scikit-learn,r-mart/scikit-learn,IndraVikas/scikit-learn,vivekmishra1991/scikit-learn,clemkoa/scikit-learn,cainiaocome/scikit-learn,imaculate/scikit-learn,ZENGXH/scikit-learn,mlyundin/scikit-learn,hitszxp/scikit-learn,MohammedWasim/scikit-learn,andrewnc/scikit-learn,kylerbrown/scikit-learn,arabenjamin/scikit-learn,huobaowangxi/scikit-learn,ankurankan/scikit-learn,RomainBrault/scikit-learn,JPFrancoia/scikit-learn,zhenv5/scikit-learn,manhhomienbienthuy/scikit-learn,Aasmi/scikit-learn,fbagirov/scikit-learn,mxjl620/scikit-learn,pythonvietnam/scikit-learn,0x0all/scikit-learn,deepesch/scikit-learn,mhue/scikit-learn,ilo10/scikit-learn,MechCoder/scikit-learn,kmike/scikit-learn,siutanwong/scikit-learn,aabadie/scikit-learn,lbishal/scikit-learn,pkruskal/scikit-learn,carrillo/scikit-learn,AIML/scikit-learn,rahuldhote/scikit-learn,tdhopper/scikit-learn,rahuldhote/scikit-learn,khkaminska/scikit-learn,hsiaoyi0504/scikit-learn,JosmanPS/scikit-learn,loli/semisupervisedforests,f3r/scikit-learn,mlyundin/scikit-learn,3manuek/scikit-learn,fbagirov/scikit-learn,mxjl620/scikit-learn,OshynSong/scikit-learn,Akshay0724/scikit-learn,amueller/scikit-learn,theoryno3/scikit-learn,jmetzen/scikit-learn,sinhrks/scikit-learn,waterponey/scikit-learn,PatrickOReilly/scikit-learn,vivekmishra1991/scikit-learn,alvarofierroclavero/scikit-learn,toastedcornflakes/scikit-learn,B3AU/waveTree,sumspr/scikit-learn,victorbergelin/scikit-learn,appapantula/scikit-learn,henrykironde/scikit-learn,rvraghav93/scikit-learn,kaichogami/scikit-learn,joshloyal/scikit-learn
|
Add an example with probability distribution estimates using GMM.
This is a work in progress. Also, the .eval() function from GMM
might very likely change it's return type in the future.
|
"""
=================================
Gaussian Mixture Model Ellipsoids
=================================
Plot the confidence ellipsoids of a mixture of two gaussians.
"""
import numpy as np
from scikits.learn import gmm
import itertools
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt
n, m = 300, 2
# generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
X_train = np.r_[np.dot(np.random.randn(n, 2), C),
np.random.randn(n, 2) + np.array([20, 20])]
clf = gmm.GMM(2, cvtype='full')
clf.fit(X_train)
x = np.linspace(-20.0, 30.0)
y = np.linspace(-20.0, 40.0)
X, Y = np.meshgrid(x, y)
XX = np.c_[X.ravel(), Y.ravel()]
Z = np.log(-clf.eval(XX)[0])
Z = Z.reshape(X.shape)
CS = pl.contour(X, Y, Z)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
pl.scatter(X_train[:, 0], X_train[:, 1], .8)
pl.axis('tight')
pl.show()
|
<commit_before><commit_msg>Add an example with probability distribution estimates using GMM.
This is a work in progress. Also, the .eval() function from GMM
might very likely change it's return type in the future.<commit_after>
|
"""
=================================
Gaussian Mixture Model Ellipsoids
=================================
Plot the confidence ellipsoids of a mixture of two gaussians.
"""
import numpy as np
from scikits.learn import gmm
import itertools
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt
n, m = 300, 2
# generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
X_train = np.r_[np.dot(np.random.randn(n, 2), C),
np.random.randn(n, 2) + np.array([20, 20])]
clf = gmm.GMM(2, cvtype='full')
clf.fit(X_train)
x = np.linspace(-20.0, 30.0)
y = np.linspace(-20.0, 40.0)
X, Y = np.meshgrid(x, y)
XX = np.c_[X.ravel(), Y.ravel()]
Z = np.log(-clf.eval(XX)[0])
Z = Z.reshape(X.shape)
CS = pl.contour(X, Y, Z)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
pl.scatter(X_train[:, 0], X_train[:, 1], .8)
pl.axis('tight')
pl.show()
|
Add an example with probability distribution estimates using GMM.
This is a work in progress. Also, the .eval() function from GMM
might very likely change it's return type in the future."""
=================================
Gaussian Mixture Model Ellipsoids
=================================
Plot the confidence ellipsoids of a mixture of two gaussians.
"""
import numpy as np
from scikits.learn import gmm
import itertools
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt
n, m = 300, 2
# generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
X_train = np.r_[np.dot(np.random.randn(n, 2), C),
np.random.randn(n, 2) + np.array([20, 20])]
clf = gmm.GMM(2, cvtype='full')
clf.fit(X_train)
x = np.linspace(-20.0, 30.0)
y = np.linspace(-20.0, 40.0)
X, Y = np.meshgrid(x, y)
XX = np.c_[X.ravel(), Y.ravel()]
Z = np.log(-clf.eval(XX)[0])
Z = Z.reshape(X.shape)
CS = pl.contour(X, Y, Z)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
pl.scatter(X_train[:, 0], X_train[:, 1], .8)
pl.axis('tight')
pl.show()
|
<commit_before><commit_msg>Add an example with probability distribution estimates using GMM.
This is a work in progress. Also, the .eval() function from GMM
might very likely change it's return type in the future.<commit_after>"""
=================================
Gaussian Mixture Model Ellipsoids
=================================
Plot the confidence ellipsoids of a mixture of two gaussians.
"""
import numpy as np
from scikits.learn import gmm
import itertools
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt
n, m = 300, 2
# generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
X_train = np.r_[np.dot(np.random.randn(n, 2), C),
np.random.randn(n, 2) + np.array([20, 20])]
clf = gmm.GMM(2, cvtype='full')
clf.fit(X_train)
x = np.linspace(-20.0, 30.0)
y = np.linspace(-20.0, 40.0)
X, Y = np.meshgrid(x, y)
XX = np.c_[X.ravel(), Y.ravel()]
Z = np.log(-clf.eval(XX)[0])
Z = Z.reshape(X.shape)
CS = pl.contour(X, Y, Z)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
pl.scatter(X_train[:, 0], X_train[:, 1], .8)
pl.axis('tight')
pl.show()
|
|
a3a8ba38c05741418c1a29e53a2079482b454453
|
check_numbers.py
|
check_numbers.py
|
# -*- coding: utf-8 -*-
import feedparser
url = "https://edit.yournextmp.com/results/all.atom"
feed = feedparser.parse(url)
entries = {x['post_id']: x for x in feed.entries}.values()
results = {}
for x in entries:
if x['winner_party_name'] not in results:
results[x['winner_party_name']] = 0
results[x['winner_party_name']] += 1
for x, y in results.items():
print x, y
print len(entries)
|
Add handy number checker script
|
Add handy number checker script
|
Python
|
mit
|
andylolz/ge2015-results-bot
|
Add handy number checker script
|
# -*- coding: utf-8 -*-
import feedparser
url = "https://edit.yournextmp.com/results/all.atom"
feed = feedparser.parse(url)
entries = {x['post_id']: x for x in feed.entries}.values()
results = {}
for x in entries:
if x['winner_party_name'] not in results:
results[x['winner_party_name']] = 0
results[x['winner_party_name']] += 1
for x, y in results.items():
print x, y
print len(entries)
|
<commit_before><commit_msg>Add handy number checker script<commit_after>
|
# -*- coding: utf-8 -*-
import feedparser
url = "https://edit.yournextmp.com/results/all.atom"
feed = feedparser.parse(url)
entries = {x['post_id']: x for x in feed.entries}.values()
results = {}
for x in entries:
if x['winner_party_name'] not in results:
results[x['winner_party_name']] = 0
results[x['winner_party_name']] += 1
for x, y in results.items():
print x, y
print len(entries)
|
Add handy number checker script# -*- coding: utf-8 -*-
import feedparser
url = "https://edit.yournextmp.com/results/all.atom"
feed = feedparser.parse(url)
entries = {x['post_id']: x for x in feed.entries}.values()
results = {}
for x in entries:
if x['winner_party_name'] not in results:
results[x['winner_party_name']] = 0
results[x['winner_party_name']] += 1
for x, y in results.items():
print x, y
print len(entries)
|
<commit_before><commit_msg>Add handy number checker script<commit_after># -*- coding: utf-8 -*-
import feedparser
url = "https://edit.yournextmp.com/results/all.atom"
feed = feedparser.parse(url)
entries = {x['post_id']: x for x in feed.entries}.values()
results = {}
for x in entries:
if x['winner_party_name'] not in results:
results[x['winner_party_name']] = 0
results[x['winner_party_name']] += 1
for x, y in results.items():
print x, y
print len(entries)
|
|
a8b92ad8318f86877986de8bfc9911e8363d2d2e
|
lib/FriendlyName.py
|
lib/FriendlyName.py
|
try:
import logging
except ImportError as err:
print("Error import module: " + str(err))
exit(128)
def FriendlyName(input_format):
# This function returns with friendly name of selected input format
if input_format == 'las':
return 'LAS PointCloud'
elif input_format == 'laz':
return 'LAZ PointCloud'
elif input_format == 'txt':
return 'PointText'
elif input_format == 'lastxt':
return 'PointText'
elif input_format == 'iml':
return 'TerraPhoto Image List'
elif input_format == 'csv':
return 'Riegl Camera CSV'
|
Use function for input_format friendly names
|
Use function for input_format friendly names
Refactoring TransformerWorkflow to use newly created FriendlyName that returns a human readable names of specific input formats.
|
Python
|
mpl-2.0
|
KAMI911/lactransformer
|
Use function for input_format friendly names
Refactoring TransformerWorkflow to use newly created FriendlyName that returns a human readable names of specific input formats.
|
try:
import logging
except ImportError as err:
print("Error import module: " + str(err))
exit(128)
def FriendlyName(input_format):
# This function returns with friendly name of selected input format
if input_format == 'las':
return 'LAS PointCloud'
elif input_format == 'laz':
return 'LAZ PointCloud'
elif input_format == 'txt':
return 'PointText'
elif input_format == 'lastxt':
return 'PointText'
elif input_format == 'iml':
return 'TerraPhoto Image List'
elif input_format == 'csv':
return 'Riegl Camera CSV'
|
<commit_before><commit_msg>Use function for input_format friendly names
Refactoring TransformerWorkflow to use newly created FriendlyName that returns a human readable names of specific input formats.<commit_after>
|
try:
import logging
except ImportError as err:
print("Error import module: " + str(err))
exit(128)
def FriendlyName(input_format):
# This function returns with friendly name of selected input format
if input_format == 'las':
return 'LAS PointCloud'
elif input_format == 'laz':
return 'LAZ PointCloud'
elif input_format == 'txt':
return 'PointText'
elif input_format == 'lastxt':
return 'PointText'
elif input_format == 'iml':
return 'TerraPhoto Image List'
elif input_format == 'csv':
return 'Riegl Camera CSV'
|
Use function for input_format friendly names
Refactoring TransformerWorkflow to use newly created FriendlyName that returns a human readable names of specific input formats.try:
import logging
except ImportError as err:
print("Error import module: " + str(err))
exit(128)
def FriendlyName(input_format):
# This function returns with friendly name of selected input format
if input_format == 'las':
return 'LAS PointCloud'
elif input_format == 'laz':
return 'LAZ PointCloud'
elif input_format == 'txt':
return 'PointText'
elif input_format == 'lastxt':
return 'PointText'
elif input_format == 'iml':
return 'TerraPhoto Image List'
elif input_format == 'csv':
return 'Riegl Camera CSV'
|
<commit_before><commit_msg>Use function for input_format friendly names
Refactoring TransformerWorkflow to use newly created FriendlyName that returns a human readable names of specific input formats.<commit_after>try:
import logging
except ImportError as err:
print("Error import module: " + str(err))
exit(128)
def FriendlyName(input_format):
# This function returns with friendly name of selected input format
if input_format == 'las':
return 'LAS PointCloud'
elif input_format == 'laz':
return 'LAZ PointCloud'
elif input_format == 'txt':
return 'PointText'
elif input_format == 'lastxt':
return 'PointText'
elif input_format == 'iml':
return 'TerraPhoto Image List'
elif input_format == 'csv':
return 'Riegl Camera CSV'
|
|
b1bb4154a69a6ae4bb31cbf27f0871069291e1d6
|
nist_beacon_constants.py
|
nist_beacon_constants.py
|
NIST_KEY_FREQUENCY = 'frequency'
NIST_KEY_OUTPUT_VALUE = 'outputValue'
NIST_KEY_PREVIOUS_OUTPUT_VALUE = 'previousOutputValue'
NIST_KEY_SEED_VALUE = 'seedValue'
NIST_KEY_SIGNATURE_VALUE = 'signatureValue'
NIST_KEY_STATUS_CODE = 'statusCode'
NIST_KEY_TIMESTAMP = 'timeStamp'
NIST_KEY_VERSION = 'version'
|
Prepare constants into seperate location
|
Prepare constants into seperate location
|
Python
|
apache-2.0
|
urda/nistbeacon
|
Prepare constants into seperate location
|
NIST_KEY_FREQUENCY = 'frequency'
NIST_KEY_OUTPUT_VALUE = 'outputValue'
NIST_KEY_PREVIOUS_OUTPUT_VALUE = 'previousOutputValue'
NIST_KEY_SEED_VALUE = 'seedValue'
NIST_KEY_SIGNATURE_VALUE = 'signatureValue'
NIST_KEY_STATUS_CODE = 'statusCode'
NIST_KEY_TIMESTAMP = 'timeStamp'
NIST_KEY_VERSION = 'version'
|
<commit_before><commit_msg>Prepare constants into seperate location<commit_after>
|
NIST_KEY_FREQUENCY = 'frequency'
NIST_KEY_OUTPUT_VALUE = 'outputValue'
NIST_KEY_PREVIOUS_OUTPUT_VALUE = 'previousOutputValue'
NIST_KEY_SEED_VALUE = 'seedValue'
NIST_KEY_SIGNATURE_VALUE = 'signatureValue'
NIST_KEY_STATUS_CODE = 'statusCode'
NIST_KEY_TIMESTAMP = 'timeStamp'
NIST_KEY_VERSION = 'version'
|
Prepare constants into seperate locationNIST_KEY_FREQUENCY = 'frequency'
NIST_KEY_OUTPUT_VALUE = 'outputValue'
NIST_KEY_PREVIOUS_OUTPUT_VALUE = 'previousOutputValue'
NIST_KEY_SEED_VALUE = 'seedValue'
NIST_KEY_SIGNATURE_VALUE = 'signatureValue'
NIST_KEY_STATUS_CODE = 'statusCode'
NIST_KEY_TIMESTAMP = 'timeStamp'
NIST_KEY_VERSION = 'version'
|
<commit_before><commit_msg>Prepare constants into seperate location<commit_after>NIST_KEY_FREQUENCY = 'frequency'
NIST_KEY_OUTPUT_VALUE = 'outputValue'
NIST_KEY_PREVIOUS_OUTPUT_VALUE = 'previousOutputValue'
NIST_KEY_SEED_VALUE = 'seedValue'
NIST_KEY_SIGNATURE_VALUE = 'signatureValue'
NIST_KEY_STATUS_CODE = 'statusCode'
NIST_KEY_TIMESTAMP = 'timeStamp'
NIST_KEY_VERSION = 'version'
|
|
dc2def7ab47c93ed9c92c5535609ebbb375dff56
|
src/draw_json_graph.py
|
src/draw_json_graph.py
|
import json
import networkx as nx
from networkx.readwrite import json_graph
with open("/query_results.json") as f:
json_data = f.read()
x = json.loads(json_data)
doc_graphs = list()
for corpus_name, data in x.iteritems():
for query, results in data["queries"].iteritems():
new_graph = nx.Graph()
new_graph.add_node(query)
for result in results.itervalues():
new_graph.add_node(result["file"])
if "similarity" in results:
sim = results["similarity"]
else:
print "no sim"
sim = 1
print sim
new_graph.add_edge(query, result["file"], weight=sim)
doc_graphs.append(new_graph)
for graph in doc_graphs:
for n in graph:
graph.node[n]['name'] = n
similarity_graph_json = json_graph.node_link_data(graph)
# Todo: Save json file
|
Add skeleton script to draw similarites between objects
|
Add skeleton script to draw similarites between objects
|
Python
|
mit
|
PinPinIre/Final-Year-Project,PinPinIre/Final-Year-Project,PinPinIre/Final-Year-Project
|
Add skeleton script to draw similarites between objects
|
import json
import networkx as nx
from networkx.readwrite import json_graph
with open("/query_results.json") as f:
json_data = f.read()
x = json.loads(json_data)
doc_graphs = list()
for corpus_name, data in x.iteritems():
for query, results in data["queries"].iteritems():
new_graph = nx.Graph()
new_graph.add_node(query)
for result in results.itervalues():
new_graph.add_node(result["file"])
if "similarity" in results:
sim = results["similarity"]
else:
print "no sim"
sim = 1
print sim
new_graph.add_edge(query, result["file"], weight=sim)
doc_graphs.append(new_graph)
for graph in doc_graphs:
for n in graph:
graph.node[n]['name'] = n
similarity_graph_json = json_graph.node_link_data(graph)
# Todo: Save json file
|
<commit_before><commit_msg>Add skeleton script to draw similarites between objects<commit_after>
|
import json
import networkx as nx
from networkx.readwrite import json_graph
with open("/query_results.json") as f:
json_data = f.read()
x = json.loads(json_data)
doc_graphs = list()
for corpus_name, data in x.iteritems():
for query, results in data["queries"].iteritems():
new_graph = nx.Graph()
new_graph.add_node(query)
for result in results.itervalues():
new_graph.add_node(result["file"])
if "similarity" in results:
sim = results["similarity"]
else:
print "no sim"
sim = 1
print sim
new_graph.add_edge(query, result["file"], weight=sim)
doc_graphs.append(new_graph)
for graph in doc_graphs:
for n in graph:
graph.node[n]['name'] = n
similarity_graph_json = json_graph.node_link_data(graph)
# Todo: Save json file
|
Add skeleton script to draw similarites between objectsimport json
import networkx as nx
from networkx.readwrite import json_graph
with open("/query_results.json") as f:
json_data = f.read()
x = json.loads(json_data)
doc_graphs = list()
for corpus_name, data in x.iteritems():
for query, results in data["queries"].iteritems():
new_graph = nx.Graph()
new_graph.add_node(query)
for result in results.itervalues():
new_graph.add_node(result["file"])
if "similarity" in results:
sim = results["similarity"]
else:
print "no sim"
sim = 1
print sim
new_graph.add_edge(query, result["file"], weight=sim)
doc_graphs.append(new_graph)
for graph in doc_graphs:
for n in graph:
graph.node[n]['name'] = n
similarity_graph_json = json_graph.node_link_data(graph)
# Todo: Save json file
|
<commit_before><commit_msg>Add skeleton script to draw similarites between objects<commit_after>import json
import networkx as nx
from networkx.readwrite import json_graph
with open("/query_results.json") as f:
json_data = f.read()
x = json.loads(json_data)
doc_graphs = list()
for corpus_name, data in x.iteritems():
for query, results in data["queries"].iteritems():
new_graph = nx.Graph()
new_graph.add_node(query)
for result in results.itervalues():
new_graph.add_node(result["file"])
if "similarity" in results:
sim = results["similarity"]
else:
print "no sim"
sim = 1
print sim
new_graph.add_edge(query, result["file"], weight=sim)
doc_graphs.append(new_graph)
for graph in doc_graphs:
for n in graph:
graph.node[n]['name'] = n
similarity_graph_json = json_graph.node_link_data(graph)
# Todo: Save json file
|
|
4462b32aaa88628bc7c9f6746829c627bf79ddd2
|
django_project/realtime/tasks/test/test_realtime_tasks.py
|
django_project/realtime/tasks/test/test_realtime_tasks.py
|
# coding=utf-8
"""Docstring here."""
import unittest
from django import test
from realtime.tasks.realtime.flood import process_flood
from realtime.tasks.flood import create_flood_report
from realtime.tasks.realtime.celery_app import app as realtime_app
@unittest.skipUnless(
realtime_app.control.ping(), 'Realtime Worker needs to be run')
class TestRealtimeCeleryTask(test.SimpleTestCase):
"""Unit test for Realtime Celery tasks."""
def test_process_flood(self):
"""Test process flood."""
async_result = process_flood.delay()
result = async_result.get()
self.assertTrue(result['success'])
def test_create_flood_report(self):
"""Test Create Flood report task"""
create_flood_report()
|
Add unit test for process flood.
|
Add unit test for process flood.
|
Python
|
bsd-2-clause
|
AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django
|
Add unit test for process flood.
|
# coding=utf-8
"""Docstring here."""
import unittest
from django import test
from realtime.tasks.realtime.flood import process_flood
from realtime.tasks.flood import create_flood_report
from realtime.tasks.realtime.celery_app import app as realtime_app
@unittest.skipUnless(
realtime_app.control.ping(), 'Realtime Worker needs to be run')
class TestRealtimeCeleryTask(test.SimpleTestCase):
"""Unit test for Realtime Celery tasks."""
def test_process_flood(self):
"""Test process flood."""
async_result = process_flood.delay()
result = async_result.get()
self.assertTrue(result['success'])
def test_create_flood_report(self):
"""Test Create Flood report task"""
create_flood_report()
|
<commit_before><commit_msg>Add unit test for process flood.<commit_after>
|
# coding=utf-8
"""Docstring here."""
import unittest
from django import test
from realtime.tasks.realtime.flood import process_flood
from realtime.tasks.flood import create_flood_report
from realtime.tasks.realtime.celery_app import app as realtime_app
@unittest.skipUnless(
realtime_app.control.ping(), 'Realtime Worker needs to be run')
class TestRealtimeCeleryTask(test.SimpleTestCase):
"""Unit test for Realtime Celery tasks."""
def test_process_flood(self):
"""Test process flood."""
async_result = process_flood.delay()
result = async_result.get()
self.assertTrue(result['success'])
def test_create_flood_report(self):
"""Test Create Flood report task"""
create_flood_report()
|
Add unit test for process flood.# coding=utf-8
"""Docstring here."""
import unittest
from django import test
from realtime.tasks.realtime.flood import process_flood
from realtime.tasks.flood import create_flood_report
from realtime.tasks.realtime.celery_app import app as realtime_app
@unittest.skipUnless(
realtime_app.control.ping(), 'Realtime Worker needs to be run')
class TestRealtimeCeleryTask(test.SimpleTestCase):
"""Unit test for Realtime Celery tasks."""
def test_process_flood(self):
"""Test process flood."""
async_result = process_flood.delay()
result = async_result.get()
self.assertTrue(result['success'])
def test_create_flood_report(self):
"""Test Create Flood report task"""
create_flood_report()
|
<commit_before><commit_msg>Add unit test for process flood.<commit_after># coding=utf-8
"""Docstring here."""
import unittest
from django import test
from realtime.tasks.realtime.flood import process_flood
from realtime.tasks.flood import create_flood_report
from realtime.tasks.realtime.celery_app import app as realtime_app
@unittest.skipUnless(
realtime_app.control.ping(), 'Realtime Worker needs to be run')
class TestRealtimeCeleryTask(test.SimpleTestCase):
"""Unit test for Realtime Celery tasks."""
def test_process_flood(self):
"""Test process flood."""
async_result = process_flood.delay()
result = async_result.get()
self.assertTrue(result['success'])
def test_create_flood_report(self):
"""Test Create Flood report task"""
create_flood_report()
|
|
74ceceb6ccdb3b205a72aa6ca75b833c66eb659c
|
HearthStone2/copy_data.py
|
HearthStone2/copy_data.py
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""Copy data from the given zip file to the project."""
import argparse
import fnmatch
import os
import time
import zipfile
__author__ = 'fyabc'
DataDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'MyHearthStone')
DataFilePattern = '*/resources/*'
def main(args=None):
parser = argparse.ArgumentParser(description='Copy data from data zip file into this project.')
parser.add_argument('file', help='path to data zip file')
args = parser.parse_args(args)
time_start = time.time()
n_files = 0
with zipfile.ZipFile(args.file) as zip_file:
rc_names = fnmatch.filter((name for name in zip_file.namelist() if not name.endswith('/')), DataFilePattern)
for name in rc_names:
print('Copying file {} to {} ... '.format(name, os.path.join(DataDir, name)), end='')
zip_file.extract(name, path=DataDir)
n_files += 1
print('done')
print('Copy {} files in {:.6f}s.'.format(n_files, time.time() - time_start))
if __name__ == '__main__':
main()
|
Add a script to copy data files conveniently.
|
Add a script to copy data files conveniently.
|
Python
|
mit
|
fyabc/MiniGames
|
Add a script to copy data files conveniently.
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""Copy data from the given zip file to the project."""
import argparse
import fnmatch
import os
import time
import zipfile
__author__ = 'fyabc'
DataDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'MyHearthStone')
DataFilePattern = '*/resources/*'
def main(args=None):
parser = argparse.ArgumentParser(description='Copy data from data zip file into this project.')
parser.add_argument('file', help='path to data zip file')
args = parser.parse_args(args)
time_start = time.time()
n_files = 0
with zipfile.ZipFile(args.file) as zip_file:
rc_names = fnmatch.filter((name for name in zip_file.namelist() if not name.endswith('/')), DataFilePattern)
for name in rc_names:
print('Copying file {} to {} ... '.format(name, os.path.join(DataDir, name)), end='')
zip_file.extract(name, path=DataDir)
n_files += 1
print('done')
print('Copy {} files in {:.6f}s.'.format(n_files, time.time() - time_start))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to copy data files conveniently.<commit_after>
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""Copy data from the given zip file to the project."""
import argparse
import fnmatch
import os
import time
import zipfile
__author__ = 'fyabc'
DataDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'MyHearthStone')
DataFilePattern = '*/resources/*'
def main(args=None):
parser = argparse.ArgumentParser(description='Copy data from data zip file into this project.')
parser.add_argument('file', help='path to data zip file')
args = parser.parse_args(args)
time_start = time.time()
n_files = 0
with zipfile.ZipFile(args.file) as zip_file:
rc_names = fnmatch.filter((name for name in zip_file.namelist() if not name.endswith('/')), DataFilePattern)
for name in rc_names:
print('Copying file {} to {} ... '.format(name, os.path.join(DataDir, name)), end='')
zip_file.extract(name, path=DataDir)
n_files += 1
print('done')
print('Copy {} files in {:.6f}s.'.format(n_files, time.time() - time_start))
if __name__ == '__main__':
main()
|
Add a script to copy data files conveniently.#! /usr/bin/python
# -*- coding: utf-8 -*-
"""Copy data from the given zip file to the project."""
import argparse
import fnmatch
import os
import time
import zipfile
__author__ = 'fyabc'
DataDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'MyHearthStone')
DataFilePattern = '*/resources/*'
def main(args=None):
parser = argparse.ArgumentParser(description='Copy data from data zip file into this project.')
parser.add_argument('file', help='path to data zip file')
args = parser.parse_args(args)
time_start = time.time()
n_files = 0
with zipfile.ZipFile(args.file) as zip_file:
rc_names = fnmatch.filter((name for name in zip_file.namelist() if not name.endswith('/')), DataFilePattern)
for name in rc_names:
print('Copying file {} to {} ... '.format(name, os.path.join(DataDir, name)), end='')
zip_file.extract(name, path=DataDir)
n_files += 1
print('done')
print('Copy {} files in {:.6f}s.'.format(n_files, time.time() - time_start))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to copy data files conveniently.<commit_after>#! /usr/bin/python
# -*- coding: utf-8 -*-
"""Copy data from the given zip file to the project."""
import argparse
import fnmatch
import os
import time
import zipfile
__author__ = 'fyabc'
DataDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'MyHearthStone')
DataFilePattern = '*/resources/*'
def main(args=None):
parser = argparse.ArgumentParser(description='Copy data from data zip file into this project.')
parser.add_argument('file', help='path to data zip file')
args = parser.parse_args(args)
time_start = time.time()
n_files = 0
with zipfile.ZipFile(args.file) as zip_file:
rc_names = fnmatch.filter((name for name in zip_file.namelist() if not name.endswith('/')), DataFilePattern)
for name in rc_names:
print('Copying file {} to {} ... '.format(name, os.path.join(DataDir, name)), end='')
zip_file.extract(name, path=DataDir)
n_files += 1
print('done')
print('Copy {} files in {:.6f}s.'.format(n_files, time.time() - time_start))
if __name__ == '__main__':
main()
|
|
7775d27af722031fc696693415d9f38d7f5409e2
|
drivers/test_logger.py
|
drivers/test_logger.py
|
from __future__ import print_function
import time
from might_driver import MightyWattDriver
from buck_driver import FCCBuckDriver
discharger = FCCMPPTDriver()
discharger.open_serial('SNR=8543035353135160A201')
print(discharger.dev_id)
load = MightyWattDriver()
load.open_serial('SNR=854303535313513041E2')
print(load.dev_id)
# set load to something
load.set_resistance(20) # 20 ohms, at 10 volts would be 5 watts
# set buck to give 10 watts of power
discharger.set_power(5)
# every 250 ms, log the data
log_file = open("log.txt", "a+")
print("\nStarting:\n", file=log_file)
while True:
try:
time.sleep(0.250)
load_stat = load.read_status_string()
discharger_stat = discharger.read_status_string()
print(load_stat, file=log_file)
print(discharger_stat, file=log_file)
print(load_stat)
print(discharger_stat)
except KeyboardInterrupt:
print("\nKeyboard exit", file=log_file)
break
load.close()
discharger.close()
log_file.close()
|
Add a test Python script
|
Add a test Python script
|
Python
|
bsd-2-clause
|
fnoorian/Free-buck-boost,fnoorian/Free-buck-boost,fnoorian/Free-buck-boost
|
Add a test Python script
|
from __future__ import print_function
import time
from might_driver import MightyWattDriver
from buck_driver import FCCBuckDriver
discharger = FCCMPPTDriver()
discharger.open_serial('SNR=8543035353135160A201')
print(discharger.dev_id)
load = MightyWattDriver()
load.open_serial('SNR=854303535313513041E2')
print(load.dev_id)
# set load to something
load.set_resistance(20) # 20 ohms, at 10 volts would be 5 watts
# set buck to give 10 watts of power
discharger.set_power(5)
# every 250 ms, log the data
log_file = open("log.txt", "a+")
print("\nStarting:\n", file=log_file)
while True:
try:
time.sleep(0.250)
load_stat = load.read_status_string()
discharger_stat = discharger.read_status_string()
print(load_stat, file=log_file)
print(discharger_stat, file=log_file)
print(load_stat)
print(discharger_stat)
except KeyboardInterrupt:
print("\nKeyboard exit", file=log_file)
break
load.close()
discharger.close()
log_file.close()
|
<commit_before><commit_msg>Add a test Python script<commit_after>
|
from __future__ import print_function
import time
from might_driver import MightyWattDriver
from buck_driver import FCCBuckDriver
discharger = FCCMPPTDriver()
discharger.open_serial('SNR=8543035353135160A201')
print(discharger.dev_id)
load = MightyWattDriver()
load.open_serial('SNR=854303535313513041E2')
print(load.dev_id)
# set load to something
load.set_resistance(20) # 20 ohms, at 10 volts would be 5 watts
# set buck to give 10 watts of power
discharger.set_power(5)
# every 250 ms, log the data
log_file = open("log.txt", "a+")
print("\nStarting:\n", file=log_file)
while True:
try:
time.sleep(0.250)
load_stat = load.read_status_string()
discharger_stat = discharger.read_status_string()
print(load_stat, file=log_file)
print(discharger_stat, file=log_file)
print(load_stat)
print(discharger_stat)
except KeyboardInterrupt:
print("\nKeyboard exit", file=log_file)
break
load.close()
discharger.close()
log_file.close()
|
Add a test Python scriptfrom __future__ import print_function
import time
from might_driver import MightyWattDriver
from buck_driver import FCCBuckDriver
discharger = FCCMPPTDriver()
discharger.open_serial('SNR=8543035353135160A201')
print(discharger.dev_id)
load = MightyWattDriver()
load.open_serial('SNR=854303535313513041E2')
print(load.dev_id)
# set load to something
load.set_resistance(20) # 20 ohms, at 10 volts would be 5 watts
# set buck to give 10 watts of power
discharger.set_power(5)
# every 250 ms, log the data
log_file = open("log.txt", "a+")
print("\nStarting:\n", file=log_file)
while True:
try:
time.sleep(0.250)
load_stat = load.read_status_string()
discharger_stat = discharger.read_status_string()
print(load_stat, file=log_file)
print(discharger_stat, file=log_file)
print(load_stat)
print(discharger_stat)
except KeyboardInterrupt:
print("\nKeyboard exit", file=log_file)
break
load.close()
discharger.close()
log_file.close()
|
<commit_before><commit_msg>Add a test Python script<commit_after>from __future__ import print_function
import time
from might_driver import MightyWattDriver
from buck_driver import FCCBuckDriver
discharger = FCCMPPTDriver()
discharger.open_serial('SNR=8543035353135160A201')
print(discharger.dev_id)
load = MightyWattDriver()
load.open_serial('SNR=854303535313513041E2')
print(load.dev_id)
# set load to something
load.set_resistance(20) # 20 ohms, at 10 volts would be 5 watts
# set buck to give 10 watts of power
discharger.set_power(5)
# every 250 ms, log the data
log_file = open("log.txt", "a+")
print("\nStarting:\n", file=log_file)
while True:
try:
time.sleep(0.250)
load_stat = load.read_status_string()
discharger_stat = discharger.read_status_string()
print(load_stat, file=log_file)
print(discharger_stat, file=log_file)
print(load_stat)
print(discharger_stat)
except KeyboardInterrupt:
print("\nKeyboard exit", file=log_file)
break
load.close()
discharger.close()
log_file.close()
|
|
da50bb53f05e487a00b3db2feb97ce37cf449afd
|
tests/squid.py
|
tests/squid.py
|
"""
Squid Proxy Detector
********************
"""
import os
import httplib
import urllib2
from urlparse import urlparse
from plugoo import gen_headers
from plugoo.assets import Asset
from plugoo.tests import Test
__plugoo__ = "SquidProxy"
__desc__ = "This Test aims at detecting the squid transparent proxy"
class SquidAsset(Asset):
"""
This is the asset that should be used by the Test. It will
contain all the code responsible for parsing the asset file
and should be passed on instantiation to the test.
"""
def __init__(self, file=None):
self = Asset.__init__(self, file)
class Squid(Test):
"""
Squid Proxy testing class.
"""
def _http_request(self, method, url,
path=None, headers=None):
"""
Perform an HTTP Request.
XXX move this function to the core OONI
code.
"""
url = urlparse(url)
host = url.netloc
conn = httplib.HTTPConnection(host, 80)
conn.connect()
if path is None:
path = purl.path
conn.putrequest(method, path)
for h in gen_headers():
conn.putheaders(h[0], h[1])
conn.endheaders()
send_browser_headers(self, None, conn)
response = conn.getresponse()
headers = dict(response.getheaders())
self.headers = headers
self.data = response.read()
return True
def invalid_request(self):
"""
This will trigger squids "Invalid Request" error.
"""
pass
def cache_object(self):
"""
This attempts to do a GET cache_object://localhost/info on
any destination and checks to see if the response contains
is that of Squid.
"""
pass
def experiment(self, *a, **kw):
"""
Fill this up with the tasks that should be performed
on the "dirty" network and should be compared with the
control.
"""
def control(self):
"""
Fill this up with the control related code.
"""
return True
def run(ooni):
"""
This is the function that will be called by OONI
and it is responsible for instantiating and passing
the arguments to the Test class.
"""
config = ooni.config
# This the assets array to be passed to the run function of
# the test
assets = [TestTemplateAsset(os.path.join(config.main.assetdir, \
"someasset.txt"))]
# Instantiate the Test
thetest = TestTemplate(ooni)
ooni.logger.info("starting SquidProxyTest...")
# Run the test with argument assets
thetest.run(assets)
ooni.logger.info("finished.")
|
Add some scaffolding for Squid Proxy detection test
|
Add some scaffolding for Squid Proxy detection test
|
Python
|
bsd-2-clause
|
juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,hackerberry/ooni-probe,hackerberry/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe
|
Add some scaffolding for Squid Proxy detection test
|
"""
Squid Proxy Detector
********************
"""
import os
import httplib
import urllib2
from urlparse import urlparse
from plugoo import gen_headers
from plugoo.assets import Asset
from plugoo.tests import Test
__plugoo__ = "SquidProxy"
__desc__ = "This Test aims at detecting the squid transparent proxy"
class SquidAsset(Asset):
"""
This is the asset that should be used by the Test. It will
contain all the code responsible for parsing the asset file
and should be passed on instantiation to the test.
"""
def __init__(self, file=None):
self = Asset.__init__(self, file)
class Squid(Test):
"""
Squid Proxy testing class.
"""
def _http_request(self, method, url,
path=None, headers=None):
"""
Perform an HTTP Request.
XXX move this function to the core OONI
code.
"""
url = urlparse(url)
host = url.netloc
conn = httplib.HTTPConnection(host, 80)
conn.connect()
if path is None:
path = purl.path
conn.putrequest(method, path)
for h in gen_headers():
conn.putheaders(h[0], h[1])
conn.endheaders()
send_browser_headers(self, None, conn)
response = conn.getresponse()
headers = dict(response.getheaders())
self.headers = headers
self.data = response.read()
return True
def invalid_request(self):
"""
This will trigger squids "Invalid Request" error.
"""
pass
def cache_object(self):
"""
This attempts to do a GET cache_object://localhost/info on
any destination and checks to see if the response contains
is that of Squid.
"""
pass
def experiment(self, *a, **kw):
"""
Fill this up with the tasks that should be performed
on the "dirty" network and should be compared with the
control.
"""
def control(self):
"""
Fill this up with the control related code.
"""
return True
def run(ooni):
"""
This is the function that will be called by OONI
and it is responsible for instantiating and passing
the arguments to the Test class.
"""
config = ooni.config
# This the assets array to be passed to the run function of
# the test
assets = [TestTemplateAsset(os.path.join(config.main.assetdir, \
"someasset.txt"))]
# Instantiate the Test
thetest = TestTemplate(ooni)
ooni.logger.info("starting SquidProxyTest...")
# Run the test with argument assets
thetest.run(assets)
ooni.logger.info("finished.")
|
<commit_before><commit_msg>Add some scaffolding for Squid Proxy detection test<commit_after>
|
"""
Squid Proxy Detector
********************
"""
import os
import httplib
import urllib2
from urlparse import urlparse
from plugoo import gen_headers
from plugoo.assets import Asset
from plugoo.tests import Test
__plugoo__ = "SquidProxy"
__desc__ = "This Test aims at detecting the squid transparent proxy"
class SquidAsset(Asset):
"""
This is the asset that should be used by the Test. It will
contain all the code responsible for parsing the asset file
and should be passed on instantiation to the test.
"""
def __init__(self, file=None):
self = Asset.__init__(self, file)
class Squid(Test):
"""
Squid Proxy testing class.
"""
def _http_request(self, method, url,
path=None, headers=None):
"""
Perform an HTTP Request.
XXX move this function to the core OONI
code.
"""
url = urlparse(url)
host = url.netloc
conn = httplib.HTTPConnection(host, 80)
conn.connect()
if path is None:
path = purl.path
conn.putrequest(method, path)
for h in gen_headers():
conn.putheaders(h[0], h[1])
conn.endheaders()
send_browser_headers(self, None, conn)
response = conn.getresponse()
headers = dict(response.getheaders())
self.headers = headers
self.data = response.read()
return True
def invalid_request(self):
"""
This will trigger squids "Invalid Request" error.
"""
pass
def cache_object(self):
"""
This attempts to do a GET cache_object://localhost/info on
any destination and checks to see if the response contains
is that of Squid.
"""
pass
def experiment(self, *a, **kw):
"""
Fill this up with the tasks that should be performed
on the "dirty" network and should be compared with the
control.
"""
def control(self):
"""
Fill this up with the control related code.
"""
return True
def run(ooni):
"""
This is the function that will be called by OONI
and it is responsible for instantiating and passing
the arguments to the Test class.
"""
config = ooni.config
# This the assets array to be passed to the run function of
# the test
assets = [TestTemplateAsset(os.path.join(config.main.assetdir, \
"someasset.txt"))]
# Instantiate the Test
thetest = TestTemplate(ooni)
ooni.logger.info("starting SquidProxyTest...")
# Run the test with argument assets
thetest.run(assets)
ooni.logger.info("finished.")
|
Add some scaffolding for Squid Proxy detection test"""
Squid Proxy Detector
********************
"""
import os
import httplib
import urllib2
from urlparse import urlparse
from plugoo import gen_headers
from plugoo.assets import Asset
from plugoo.tests import Test
__plugoo__ = "SquidProxy"
__desc__ = "This Test aims at detecting the squid transparent proxy"
class SquidAsset(Asset):
"""
This is the asset that should be used by the Test. It will
contain all the code responsible for parsing the asset file
and should be passed on instantiation to the test.
"""
def __init__(self, file=None):
self = Asset.__init__(self, file)
class Squid(Test):
"""
Squid Proxy testing class.
"""
def _http_request(self, method, url,
path=None, headers=None):
"""
Perform an HTTP Request.
XXX move this function to the core OONI
code.
"""
url = urlparse(url)
host = url.netloc
conn = httplib.HTTPConnection(host, 80)
conn.connect()
if path is None:
path = purl.path
conn.putrequest(method, path)
for h in gen_headers():
conn.putheaders(h[0], h[1])
conn.endheaders()
send_browser_headers(self, None, conn)
response = conn.getresponse()
headers = dict(response.getheaders())
self.headers = headers
self.data = response.read()
return True
def invalid_request(self):
"""
This will trigger squids "Invalid Request" error.
"""
pass
def cache_object(self):
"""
This attempts to do a GET cache_object://localhost/info on
any destination and checks to see if the response contains
is that of Squid.
"""
pass
def experiment(self, *a, **kw):
"""
Fill this up with the tasks that should be performed
on the "dirty" network and should be compared with the
control.
"""
def control(self):
"""
Fill this up with the control related code.
"""
return True
def run(ooni):
"""
This is the function that will be called by OONI
and it is responsible for instantiating and passing
the arguments to the Test class.
"""
config = ooni.config
# This the assets array to be passed to the run function of
# the test
assets = [TestTemplateAsset(os.path.join(config.main.assetdir, \
"someasset.txt"))]
# Instantiate the Test
thetest = TestTemplate(ooni)
ooni.logger.info("starting SquidProxyTest...")
# Run the test with argument assets
thetest.run(assets)
ooni.logger.info("finished.")
|
<commit_before><commit_msg>Add some scaffolding for Squid Proxy detection test<commit_after>"""
Squid Proxy Detector
********************
"""
import os
import httplib
import urllib2
from urlparse import urlparse
from plugoo import gen_headers
from plugoo.assets import Asset
from plugoo.tests import Test
__plugoo__ = "SquidProxy"
__desc__ = "This Test aims at detecting the squid transparent proxy"
class SquidAsset(Asset):
"""
This is the asset that should be used by the Test. It will
contain all the code responsible for parsing the asset file
and should be passed on instantiation to the test.
"""
def __init__(self, file=None):
self = Asset.__init__(self, file)
class Squid(Test):
"""
Squid Proxy testing class.
"""
def _http_request(self, method, url,
path=None, headers=None):
"""
Perform an HTTP Request.
XXX move this function to the core OONI
code.
"""
url = urlparse(url)
host = url.netloc
conn = httplib.HTTPConnection(host, 80)
conn.connect()
if path is None:
path = purl.path
conn.putrequest(method, path)
for h in gen_headers():
conn.putheaders(h[0], h[1])
conn.endheaders()
send_browser_headers(self, None, conn)
response = conn.getresponse()
headers = dict(response.getheaders())
self.headers = headers
self.data = response.read()
return True
def invalid_request(self):
"""
This will trigger squids "Invalid Request" error.
"""
pass
def cache_object(self):
"""
This attempts to do a GET cache_object://localhost/info on
any destination and checks to see if the response contains
is that of Squid.
"""
pass
def experiment(self, *a, **kw):
"""
Fill this up with the tasks that should be performed
on the "dirty" network and should be compared with the
control.
"""
def control(self):
"""
Fill this up with the control related code.
"""
return True
def run(ooni):
"""
This is the function that will be called by OONI
and it is responsible for instantiating and passing
the arguments to the Test class.
"""
config = ooni.config
# This the assets array to be passed to the run function of
# the test
assets = [TestTemplateAsset(os.path.join(config.main.assetdir, \
"someasset.txt"))]
# Instantiate the Test
thetest = TestTemplate(ooni)
ooni.logger.info("starting SquidProxyTest...")
# Run the test with argument assets
thetest.run(assets)
ooni.logger.info("finished.")
|
|
6e6f4b2ae0f085a649c1c4c3ebfe9c4aa6be37b1
|
libnamebench/config_test.py
|
libnamebench/config_test.py
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
Add some tests for dns config parsing
|
Add some tests for dns config parsing
|
Python
|
apache-2.0
|
catap/namebench,jimmsta/namebench-1
|
Add some tests for dns config parsing
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add some tests for dns config parsing<commit_after>
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
Add some tests for dns config parsing#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add some tests for dns config parsing<commit_after>#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
|
7284fac63bd5100700319ba655f5a2c335193a1b
|
ooni/tests/test_errors.py
|
ooni/tests/test_errors.py
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
Test that specific Failures are caught before parent Failures
|
Test that specific Failures are caught before parent Failures
|
Python
|
bsd-2-clause
|
0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe
|
Test that specific Failures are caught before parent Failures
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
<commit_before><commit_msg>Test that specific Failures are caught before parent Failures<commit_after>
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
Test that specific Failures are caught before parent Failuresfrom twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
<commit_before><commit_msg>Test that specific Failures are caught before parent Failures<commit_after>from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
|
f0e4228d648617e374fb4848f980d72684e913c7
|
openpnm/solvers/_scipy.py
|
openpnm/solvers/_scipy.py
|
import numpy as np
from scipy.integrate import solve_ivp
from scipy.interpolate import interp1d
from scipy.sparse import csr_matrix, csc_matrix
from scipy.sparse.linalg import spsolve
from openpnm.solvers import DirectSolver, Integrator
class ScipySpsolve(DirectSolver):
def solve(self, A, b, **kwargs):
if not isinstance(A, (csr_matrix, csc_matrix)):
A = A.tocsr()
return (spsolve(A, b), 0)
class ScipyRK45(Integrator):
def __init__(self, atol=1e-6, rtol=1e-6, verbose=False, linsolver=None):
self.atol = atol
self.rtol = rtol
self.verbose = verbose
self.linsolver = linsolver
def solve(self, rhs, x0, tspan, saveat, **kwargs):
options = {
"atol": self.atol,
"rtol": self.rtol,
"t_eval": saveat,
"verbose": self.verbose,
}
sol = solve_ivp(rhs, tspan, x0, method="RK45", **options)
if sol.success:
return TransientSolution(sol.t, sol.y)
raise Exception(sol.message)
class Solution(np.ndarray): ...
class TransientSolution(Solution):
def __new__(cls, t, x):
obj = np.asarray(x).view(cls)
obj.t = np.asarray(t).view(cls)
return obj
def _create_interpolant(self):
self._interpolant = interp1d(self.t, self, bounds_error=True)
def interpolate(self, t):
if not hasattr(self, "_interpolant"):
self._create_interpolant()
return self._interpolant(t)
__call__ = interpolate
|
Add a basic integrator + solution class
|
Add a basic integrator + solution class
|
Python
|
mit
|
PMEAL/OpenPNM
|
Add a basic integrator + solution class
|
import numpy as np
from scipy.integrate import solve_ivp
from scipy.interpolate import interp1d
from scipy.sparse import csr_matrix, csc_matrix
from scipy.sparse.linalg import spsolve
from openpnm.solvers import DirectSolver, Integrator
class ScipySpsolve(DirectSolver):
def solve(self, A, b, **kwargs):
if not isinstance(A, (csr_matrix, csc_matrix)):
A = A.tocsr()
return (spsolve(A, b), 0)
class ScipyRK45(Integrator):
def __init__(self, atol=1e-6, rtol=1e-6, verbose=False, linsolver=None):
self.atol = atol
self.rtol = rtol
self.verbose = verbose
self.linsolver = linsolver
def solve(self, rhs, x0, tspan, saveat, **kwargs):
options = {
"atol": self.atol,
"rtol": self.rtol,
"t_eval": saveat,
"verbose": self.verbose,
}
sol = solve_ivp(rhs, tspan, x0, method="RK45", **options)
if sol.success:
return TransientSolution(sol.t, sol.y)
raise Exception(sol.message)
class Solution(np.ndarray): ...
class TransientSolution(Solution):
def __new__(cls, t, x):
obj = np.asarray(x).view(cls)
obj.t = np.asarray(t).view(cls)
return obj
def _create_interpolant(self):
self._interpolant = interp1d(self.t, self, bounds_error=True)
def interpolate(self, t):
if not hasattr(self, "_interpolant"):
self._create_interpolant()
return self._interpolant(t)
__call__ = interpolate
|
<commit_before><commit_msg>Add a basic integrator + solution class<commit_after>
|
import numpy as np
from scipy.integrate import solve_ivp
from scipy.interpolate import interp1d
from scipy.sparse import csr_matrix, csc_matrix
from scipy.sparse.linalg import spsolve
from openpnm.solvers import DirectSolver, Integrator
class ScipySpsolve(DirectSolver):
def solve(self, A, b, **kwargs):
if not isinstance(A, (csr_matrix, csc_matrix)):
A = A.tocsr()
return (spsolve(A, b), 0)
class ScipyRK45(Integrator):
def __init__(self, atol=1e-6, rtol=1e-6, verbose=False, linsolver=None):
self.atol = atol
self.rtol = rtol
self.verbose = verbose
self.linsolver = linsolver
def solve(self, rhs, x0, tspan, saveat, **kwargs):
options = {
"atol": self.atol,
"rtol": self.rtol,
"t_eval": saveat,
"verbose": self.verbose,
}
sol = solve_ivp(rhs, tspan, x0, method="RK45", **options)
if sol.success:
return TransientSolution(sol.t, sol.y)
raise Exception(sol.message)
class Solution(np.ndarray): ...
class TransientSolution(Solution):
def __new__(cls, t, x):
obj = np.asarray(x).view(cls)
obj.t = np.asarray(t).view(cls)
return obj
def _create_interpolant(self):
self._interpolant = interp1d(self.t, self, bounds_error=True)
def interpolate(self, t):
if not hasattr(self, "_interpolant"):
self._create_interpolant()
return self._interpolant(t)
__call__ = interpolate
|
Add a basic integrator + solution classimport numpy as np
from scipy.integrate import solve_ivp
from scipy.interpolate import interp1d
from scipy.sparse import csr_matrix, csc_matrix
from scipy.sparse.linalg import spsolve
from openpnm.solvers import DirectSolver, Integrator
class ScipySpsolve(DirectSolver):
def solve(self, A, b, **kwargs):
if not isinstance(A, (csr_matrix, csc_matrix)):
A = A.tocsr()
return (spsolve(A, b), 0)
class ScipyRK45(Integrator):
def __init__(self, atol=1e-6, rtol=1e-6, verbose=False, linsolver=None):
self.atol = atol
self.rtol = rtol
self.verbose = verbose
self.linsolver = linsolver
def solve(self, rhs, x0, tspan, saveat, **kwargs):
options = {
"atol": self.atol,
"rtol": self.rtol,
"t_eval": saveat,
"verbose": self.verbose,
}
sol = solve_ivp(rhs, tspan, x0, method="RK45", **options)
if sol.success:
return TransientSolution(sol.t, sol.y)
raise Exception(sol.message)
class Solution(np.ndarray): ...
class TransientSolution(Solution):
def __new__(cls, t, x):
obj = np.asarray(x).view(cls)
obj.t = np.asarray(t).view(cls)
return obj
def _create_interpolant(self):
self._interpolant = interp1d(self.t, self, bounds_error=True)
def interpolate(self, t):
if not hasattr(self, "_interpolant"):
self._create_interpolant()
return self._interpolant(t)
__call__ = interpolate
|
<commit_before><commit_msg>Add a basic integrator + solution class<commit_after>import numpy as np
from scipy.integrate import solve_ivp
from scipy.interpolate import interp1d
from scipy.sparse import csr_matrix, csc_matrix
from scipy.sparse.linalg import spsolve
from openpnm.solvers import DirectSolver, Integrator
class ScipySpsolve(DirectSolver):
def solve(self, A, b, **kwargs):
if not isinstance(A, (csr_matrix, csc_matrix)):
A = A.tocsr()
return (spsolve(A, b), 0)
class ScipyRK45(Integrator):
def __init__(self, atol=1e-6, rtol=1e-6, verbose=False, linsolver=None):
self.atol = atol
self.rtol = rtol
self.verbose = verbose
self.linsolver = linsolver
def solve(self, rhs, x0, tspan, saveat, **kwargs):
options = {
"atol": self.atol,
"rtol": self.rtol,
"t_eval": saveat,
"verbose": self.verbose,
}
sol = solve_ivp(rhs, tspan, x0, method="RK45", **options)
if sol.success:
return TransientSolution(sol.t, sol.y)
raise Exception(sol.message)
class Solution(np.ndarray): ...
class TransientSolution(Solution):
def __new__(cls, t, x):
obj = np.asarray(x).view(cls)
obj.t = np.asarray(t).view(cls)
return obj
def _create_interpolant(self):
self._interpolant = interp1d(self.t, self, bounds_error=True)
def interpolate(self, t):
if not hasattr(self, "_interpolant"):
self._create_interpolant()
return self._interpolant(t)
__call__ = interpolate
|
|
34080178f92de8e74138d2e8c361b877c55b6150
|
redshirt/analyze.py
|
redshirt/analyze.py
|
import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == np.argmax(region_sizes[1:])
def extract_trace(image, axis=-1):
"""Get a total intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0)
return trace, roi
|
Add early analysis script for traces
|
Add early analysis script for traces
|
Python
|
mit
|
jni/python-redshirt
|
Add early analysis script for traces
|
import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == np.argmax(region_sizes[1:])
def extract_trace(image, axis=-1):
"""Get a total intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0)
return trace, roi
|
<commit_before><commit_msg>Add early analysis script for traces<commit_after>
|
import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == np.argmax(region_sizes[1:])
def extract_trace(image, axis=-1):
"""Get a total intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0)
return trace, roi
|
Add early analysis script for tracesimport numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == np.argmax(region_sizes[1:])
def extract_trace(image, axis=-1):
"""Get a total intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0)
return trace, roi
|
<commit_before><commit_msg>Add early analysis script for traces<commit_after>import numpy as np
from scipy import ndimage as ndi
from skimage.filters import threshold_otsu
def _extract_roi(image, axis=-1):
max_frame = np.max(image, axis=axis)
initial_mask = max_frame > threshold_otsu(max_frame)
regions = ndi.label(initial_mask)[0]
region_sizes = np.bincount(np.ravel(regions))
return regions == np.argmax(region_sizes[1:])
def extract_trace(image, axis=-1):
"""Get a total intensity trace over time out of an image.
Parameters
----------
image : array
The input image.
axis : int, optional
The axis identifying frames.
Returns
-------
trace : array of float
The trace of the image data over time.
roi : array of bool
The mask used to obtain the trace.
"""
roi = _extract_roi(image, axis)
trace = np.sum(image[roi].astype(float), axis=0)
return trace, roi
|
|
0ed11ea6b92741a0ed232a93f1876204e14b0c53
|
datapipe/classifiers/__init__.py
|
datapipe/classifiers/__init__.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__all__ = []
|
Add an empty package (classifiers).
|
Add an empty package (classifiers).
|
Python
|
mit
|
jdhp-sap/data-pipeline-standalone-scripts,jdhp-sap/sap-cta-data-pipeline,jdhp-sap/sap-cta-data-pipeline,jdhp-sap/data-pipeline-standalone-scripts
|
Add an empty package (classifiers).
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__all__ = []
|
<commit_before><commit_msg>Add an empty package (classifiers).<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__all__ = []
|
Add an empty package (classifiers).#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__all__ = []
|
<commit_before><commit_msg>Add an empty package (classifiers).<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__all__ = []
|
|
4307ae93af2ee440570d2f642e62aa0a3baeecde
|
cumulusci/robotframework/locators_47.py
|
cumulusci/robotframework/locators_47.py
|
from cumulusci.robotframework import locators_46
lex_locators = locators_46.lex_locators.copy()
# At the moment, there are no changes to the locators.
|
Add locator file for winter '20. Without this file, Salesforce.py will throw an error when it's imported while testing Winter '20.
|
Add locator file for winter '20.
Without this file, Salesforce.py will throw an error when it's imported
while testing Winter '20.
|
Python
|
bsd-3-clause
|
SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI
|
Add locator file for winter '20.
Without this file, Salesforce.py will throw an error when it's imported
while testing Winter '20.
|
from cumulusci.robotframework import locators_46
lex_locators = locators_46.lex_locators.copy()
# At the moment, there are no changes to the locators.
|
<commit_before><commit_msg>Add locator file for winter '20.
Without this file, Salesforce.py will throw an error when it's imported
while testing Winter '20.<commit_after>
|
from cumulusci.robotframework import locators_46
lex_locators = locators_46.lex_locators.copy()
# At the moment, there are no changes to the locators.
|
Add locator file for winter '20.
Without this file, Salesforce.py will throw an error when it's imported
while testing Winter '20.from cumulusci.robotframework import locators_46
lex_locators = locators_46.lex_locators.copy()
# At the moment, there are no changes to the locators.
|
<commit_before><commit_msg>Add locator file for winter '20.
Without this file, Salesforce.py will throw an error when it's imported
while testing Winter '20.<commit_after>from cumulusci.robotframework import locators_46
lex_locators = locators_46.lex_locators.copy()
# At the moment, there are no changes to the locators.
|
|
a83048076db03923f0b3eff1e9d77f3b4b259c34
|
tests/test_commands.py
|
tests/test_commands.py
|
"""Tests for the management commands"""
from __future__ import absolute_import
from django.conf import settings
from django.core import management
from importlib import import_module
from django_cas_ng.models import SessionTicket, ProxyGrantingTicket
import pytest
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
@pytest.mark.django_db
def test_command_clean_session(django_user_model):
# Use the configured session store to generate a fake session
session = SessionStore()
session['fake_session'] = 'fake-session'
session.save()
assert SessionStore(session_key=session.session_key) is not None
# Create a fake session ticket and make sure it exists in the db
session_ticket = SessionTicket.objects.create(
session_key=session.session_key,
ticket='fake-ticket'
)
assert session_ticket is not None
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is True
# Create a fake user for the proxy granting ticket
user = django_user_model.objects.create(username='test-user', email='test@example.com')
assert user is not None
assert django_user_model.objects.filter(username='test-user').exists() is True
# Create a fake pgt
pgt = ProxyGrantingTicket.objects.create(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket')
assert pgt is not None
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is True
# Call the clean sessions command and make sure things are cleaned up
management.call_command('django_cas_ng_clean_sessions')
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is False
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is False
|
Add the management commands unit tests.
|
Add the management commands unit tests.
|
Python
|
mit
|
mingchen/django-cas-ng,nitmir/django-cas-ng,pbaehr/django-cas-ng,bgroff/django-cas-ng
|
Add the management commands unit tests.
|
"""Tests for the management commands"""
from __future__ import absolute_import
from django.conf import settings
from django.core import management
from importlib import import_module
from django_cas_ng.models import SessionTicket, ProxyGrantingTicket
import pytest
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
@pytest.mark.django_db
def test_command_clean_session(django_user_model):
# Use the configured session store to generate a fake session
session = SessionStore()
session['fake_session'] = 'fake-session'
session.save()
assert SessionStore(session_key=session.session_key) is not None
# Create a fake session ticket and make sure it exists in the db
session_ticket = SessionTicket.objects.create(
session_key=session.session_key,
ticket='fake-ticket'
)
assert session_ticket is not None
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is True
# Create a fake user for the proxy granting ticket
user = django_user_model.objects.create(username='test-user', email='test@example.com')
assert user is not None
assert django_user_model.objects.filter(username='test-user').exists() is True
# Create a fake pgt
pgt = ProxyGrantingTicket.objects.create(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket')
assert pgt is not None
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is True
# Call the clean sessions command and make sure things are cleaned up
management.call_command('django_cas_ng_clean_sessions')
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is False
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is False
|
<commit_before><commit_msg>Add the management commands unit tests.<commit_after>
|
"""Tests for the management commands"""
from __future__ import absolute_import
from django.conf import settings
from django.core import management
from importlib import import_module
from django_cas_ng.models import SessionTicket, ProxyGrantingTicket
import pytest
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
@pytest.mark.django_db
def test_command_clean_session(django_user_model):
# Use the configured session store to generate a fake session
session = SessionStore()
session['fake_session'] = 'fake-session'
session.save()
assert SessionStore(session_key=session.session_key) is not None
# Create a fake session ticket and make sure it exists in the db
session_ticket = SessionTicket.objects.create(
session_key=session.session_key,
ticket='fake-ticket'
)
assert session_ticket is not None
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is True
# Create a fake user for the proxy granting ticket
user = django_user_model.objects.create(username='test-user', email='test@example.com')
assert user is not None
assert django_user_model.objects.filter(username='test-user').exists() is True
# Create a fake pgt
pgt = ProxyGrantingTicket.objects.create(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket')
assert pgt is not None
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is True
# Call the clean sessions command and make sure things are cleaned up
management.call_command('django_cas_ng_clean_sessions')
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is False
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is False
|
Add the management commands unit tests."""Tests for the management commands"""
from __future__ import absolute_import
from django.conf import settings
from django.core import management
from importlib import import_module
from django_cas_ng.models import SessionTicket, ProxyGrantingTicket
import pytest
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
@pytest.mark.django_db
def test_command_clean_session(django_user_model):
# Use the configured session store to generate a fake session
session = SessionStore()
session['fake_session'] = 'fake-session'
session.save()
assert SessionStore(session_key=session.session_key) is not None
# Create a fake session ticket and make sure it exists in the db
session_ticket = SessionTicket.objects.create(
session_key=session.session_key,
ticket='fake-ticket'
)
assert session_ticket is not None
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is True
# Create a fake user for the proxy granting ticket
user = django_user_model.objects.create(username='test-user', email='test@example.com')
assert user is not None
assert django_user_model.objects.filter(username='test-user').exists() is True
# Create a fake pgt
pgt = ProxyGrantingTicket.objects.create(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket')
assert pgt is not None
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is True
# Call the clean sessions command and make sure things are cleaned up
management.call_command('django_cas_ng_clean_sessions')
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is False
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is False
|
<commit_before><commit_msg>Add the management commands unit tests.<commit_after>"""Tests for the management commands"""
from __future__ import absolute_import
from django.conf import settings
from django.core import management
from importlib import import_module
from django_cas_ng.models import SessionTicket, ProxyGrantingTicket
import pytest
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
@pytest.mark.django_db
def test_command_clean_session(django_user_model):
# Use the configured session store to generate a fake session
session = SessionStore()
session['fake_session'] = 'fake-session'
session.save()
assert SessionStore(session_key=session.session_key) is not None
# Create a fake session ticket and make sure it exists in the db
session_ticket = SessionTicket.objects.create(
session_key=session.session_key,
ticket='fake-ticket'
)
assert session_ticket is not None
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is True
# Create a fake user for the proxy granting ticket
user = django_user_model.objects.create(username='test-user', email='test@example.com')
assert user is not None
assert django_user_model.objects.filter(username='test-user').exists() is True
# Create a fake pgt
pgt = ProxyGrantingTicket.objects.create(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket')
assert pgt is not None
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is True
# Call the clean sessions command and make sure things are cleaned up
management.call_command('django_cas_ng_clean_sessions')
assert SessionTicket.objects.filter(session_key=session.session_key,
ticket='fake-ticket').exists() is False
assert ProxyGrantingTicket.objects.filter(session_key=session.session_key,
user=user, pgtiou='fake-ticket-iou',
pgt='fake-ticket').exists() is False
|
|
9e900eb16e92027cfe990a07c5703a6adbb41a09
|
drivers/python/wappalyzer.py
|
drivers/python/wappalyzer.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import PyV8
import urllib
from urlparse import urlparse
try:
import json
except ImportError:
import simplejson as json
class Wappalyzer(object):
def __init__(self, url):
self.file_dir = os.path.dirname(__file__)
f = open(os.path.join(self.file_dir, '../../share/apps.json'))
data = json.loads(f.read())
f.close()
self.categories = data['categories']
self.apps = data['apps']
self.url = url
def analyze(self):
ctxt = PyV8.JSContext()
ctxt.enter()
f1 = open(os.path.join(self.file_dir, '../php/js/wappalyzer.js'))
f2 = open(os.path.join(self.file_dir, '../php/js/driver.js'))
ctxt.eval(f1.read())
ctxt.eval(f2.read())
f1.close()
f2.close()
host = urlparse(self.url).hostname
html = urllib.urlopen(self.url).read()
data = {'host': host, 'url': self.url, 'html': html, 'headers': {}}
apps = json.dumps(self.apps)
categories = json.dumps(self.categories)
return ctxt.eval("w.apps = %s; w.categories = %s; w.driver.data = %s; w.driver.init();" % (apps, categories, json.dumps(data)))
if __name__ == '__main__':
try:
w = Wappalyzer(sys.argv[1])
print w.analyze()
except IndexError:
print ('Usage: python %s <url>' % sys.argv[0])
|
Add python driver (depend on PyV8)
|
Add python driver (depend on PyV8)
|
Python
|
mit
|
WPO-Foundation/Wappalyzer,WPO-Foundation/Wappalyzer,WPO-Foundation/Wappalyzer,AliasIO/wappalyzer,AliasIO/wappalyzer
|
Add python driver (depend on PyV8)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import PyV8
import urllib
from urlparse import urlparse
try:
import json
except ImportError:
import simplejson as json
class Wappalyzer(object):
def __init__(self, url):
self.file_dir = os.path.dirname(__file__)
f = open(os.path.join(self.file_dir, '../../share/apps.json'))
data = json.loads(f.read())
f.close()
self.categories = data['categories']
self.apps = data['apps']
self.url = url
def analyze(self):
ctxt = PyV8.JSContext()
ctxt.enter()
f1 = open(os.path.join(self.file_dir, '../php/js/wappalyzer.js'))
f2 = open(os.path.join(self.file_dir, '../php/js/driver.js'))
ctxt.eval(f1.read())
ctxt.eval(f2.read())
f1.close()
f2.close()
host = urlparse(self.url).hostname
html = urllib.urlopen(self.url).read()
data = {'host': host, 'url': self.url, 'html': html, 'headers': {}}
apps = json.dumps(self.apps)
categories = json.dumps(self.categories)
return ctxt.eval("w.apps = %s; w.categories = %s; w.driver.data = %s; w.driver.init();" % (apps, categories, json.dumps(data)))
if __name__ == '__main__':
try:
w = Wappalyzer(sys.argv[1])
print w.analyze()
except IndexError:
print ('Usage: python %s <url>' % sys.argv[0])
|
<commit_before><commit_msg>Add python driver (depend on PyV8)<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import PyV8
import urllib
from urlparse import urlparse
try:
import json
except ImportError:
import simplejson as json
class Wappalyzer(object):
def __init__(self, url):
self.file_dir = os.path.dirname(__file__)
f = open(os.path.join(self.file_dir, '../../share/apps.json'))
data = json.loads(f.read())
f.close()
self.categories = data['categories']
self.apps = data['apps']
self.url = url
def analyze(self):
ctxt = PyV8.JSContext()
ctxt.enter()
f1 = open(os.path.join(self.file_dir, '../php/js/wappalyzer.js'))
f2 = open(os.path.join(self.file_dir, '../php/js/driver.js'))
ctxt.eval(f1.read())
ctxt.eval(f2.read())
f1.close()
f2.close()
host = urlparse(self.url).hostname
html = urllib.urlopen(self.url).read()
data = {'host': host, 'url': self.url, 'html': html, 'headers': {}}
apps = json.dumps(self.apps)
categories = json.dumps(self.categories)
return ctxt.eval("w.apps = %s; w.categories = %s; w.driver.data = %s; w.driver.init();" % (apps, categories, json.dumps(data)))
if __name__ == '__main__':
try:
w = Wappalyzer(sys.argv[1])
print w.analyze()
except IndexError:
print ('Usage: python %s <url>' % sys.argv[0])
|
Add python driver (depend on PyV8)#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import PyV8
import urllib
from urlparse import urlparse
try:
import json
except ImportError:
import simplejson as json
class Wappalyzer(object):
def __init__(self, url):
self.file_dir = os.path.dirname(__file__)
f = open(os.path.join(self.file_dir, '../../share/apps.json'))
data = json.loads(f.read())
f.close()
self.categories = data['categories']
self.apps = data['apps']
self.url = url
def analyze(self):
ctxt = PyV8.JSContext()
ctxt.enter()
f1 = open(os.path.join(self.file_dir, '../php/js/wappalyzer.js'))
f2 = open(os.path.join(self.file_dir, '../php/js/driver.js'))
ctxt.eval(f1.read())
ctxt.eval(f2.read())
f1.close()
f2.close()
host = urlparse(self.url).hostname
html = urllib.urlopen(self.url).read()
data = {'host': host, 'url': self.url, 'html': html, 'headers': {}}
apps = json.dumps(self.apps)
categories = json.dumps(self.categories)
return ctxt.eval("w.apps = %s; w.categories = %s; w.driver.data = %s; w.driver.init();" % (apps, categories, json.dumps(data)))
if __name__ == '__main__':
try:
w = Wappalyzer(sys.argv[1])
print w.analyze()
except IndexError:
print ('Usage: python %s <url>' % sys.argv[0])
|
<commit_before><commit_msg>Add python driver (depend on PyV8)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import PyV8
import urllib
from urlparse import urlparse
try:
import json
except ImportError:
import simplejson as json
class Wappalyzer(object):
def __init__(self, url):
self.file_dir = os.path.dirname(__file__)
f = open(os.path.join(self.file_dir, '../../share/apps.json'))
data = json.loads(f.read())
f.close()
self.categories = data['categories']
self.apps = data['apps']
self.url = url
def analyze(self):
ctxt = PyV8.JSContext()
ctxt.enter()
f1 = open(os.path.join(self.file_dir, '../php/js/wappalyzer.js'))
f2 = open(os.path.join(self.file_dir, '../php/js/driver.js'))
ctxt.eval(f1.read())
ctxt.eval(f2.read())
f1.close()
f2.close()
host = urlparse(self.url).hostname
html = urllib.urlopen(self.url).read()
data = {'host': host, 'url': self.url, 'html': html, 'headers': {}}
apps = json.dumps(self.apps)
categories = json.dumps(self.categories)
return ctxt.eval("w.apps = %s; w.categories = %s; w.driver.data = %s; w.driver.init();" % (apps, categories, json.dumps(data)))
if __name__ == '__main__':
try:
w = Wappalyzer(sys.argv[1])
print w.analyze()
except IndexError:
print ('Usage: python %s <url>' % sys.argv[0])
|
|
4a360a4d678d561bb74e37f857cfd09d35747db7
|
core/migrations/0002_profile_image.py
|
core/migrations/0002_profile_image.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_profile'),
]
operations = [
migrations.AddField(
model_name='profile',
name='image',
field=models.ImageField(null=True, upload_to=b'', blank=True),
preserve_default=True,
),
]
|
Add the migration for the image field
|
Add the migration for the image field
|
Python
|
mit
|
mauricioabreu/speakerfight,luanfonceca/speakerfight,luanfonceca/speakerfight,luanfonceca/speakerfight,mauricioabreu/speakerfight,mauricioabreu/speakerfight
|
Add the migration for the image field
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_profile'),
]
operations = [
migrations.AddField(
model_name='profile',
name='image',
field=models.ImageField(null=True, upload_to=b'', blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add the migration for the image field<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_profile'),
]
operations = [
migrations.AddField(
model_name='profile',
name='image',
field=models.ImageField(null=True, upload_to=b'', blank=True),
preserve_default=True,
),
]
|
Add the migration for the image field# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_profile'),
]
operations = [
migrations.AddField(
model_name='profile',
name='image',
field=models.ImageField(null=True, upload_to=b'', blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add the migration for the image field<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_profile'),
]
operations = [
migrations.AddField(
model_name='profile',
name='image',
field=models.ImageField(null=True, upload_to=b'', blank=True),
preserve_default=True,
),
]
|
|
2901763a10aa318112efecc2b5a56d83391f4de5
|
pipelines/genome_scanner.py
|
pipelines/genome_scanner.py
|
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Pipeline to search each genome individually. This pipeline also parallelises the step
of splitting and indexing the genome files in smaller chunks
"""
# ---------------------------------IMPORTS-------------------------------------
import os
import luigi
import subprocess
from config import gen_config as gc
from utils import genome_search_utils as gsu
# add parent directory to path
if __name__ == '__main__' and __package__ is None:
os.sys.path.append(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# ----------------------------------TASKS--------------------------------------
class ScanGenome(luigi.Task):
"""
Add comment here
"""
updir = luigi.Parameter()
upid = luigi.Parameter()
tool = luigi.Parameter()
lsf = luigi.Parameter()
def run(self):
"""
Add comment here
"""
if self.lsf is True:
gsu.single_genome_scan_from_download_directory(self.updir,
self.upid, tool=self.tool)
# -----------------------------------------------------------------------------
class GenomeSearchEngine(luigi.Task):
"""
Launches genome search directly from the download directory which should
have the following structure project_dir/xxx/UPYYYYYYxxx
"""
project_dir = luigi.Parameter(description="Genome download project directory")
genome_list = luigi.Parameter(default=None,
description="A list of upids to search for ncRNAs")
tool = luigi.Parameter(default=None,
description="Infernal search tool (cmsearch/cmscan)")
lsf = luigi.BoolParameter(default=False,
description="Run pipeline on lsf, otherwise run locally")
def run(self):
"""
Add comment here
"""
# load upids
upid_fp = open(self.genome_list, 'r')
upids = [x.strip() for x in upid_fp]
upid_fp.close()
for upid in upids:
# get updir location
subdir = os.path.join(self.project_dir, upid[-3:])
updir = os.path.join(subdir, upid)
yield ScanGenome(updir, upid, self.tool.lower(), self.lsf)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# defining pipeline's main task
luigi.run()
|
Add simple scanner pipeline to help with job submission
|
Add simple scanner pipeline to help with job submission
|
Python
|
apache-2.0
|
Rfam/rfam-production,Rfam/rfam-production,Rfam/rfam-production
|
Add simple scanner pipeline to help with job submission
|
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Pipeline to search each genome individually. This pipeline also parallelises the step
of splitting and indexing the genome files in smaller chunks
"""
# ---------------------------------IMPORTS-------------------------------------
import os
import luigi
import subprocess
from config import gen_config as gc
from utils import genome_search_utils as gsu
# add parent directory to path
if __name__ == '__main__' and __package__ is None:
os.sys.path.append(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# ----------------------------------TASKS--------------------------------------
class ScanGenome(luigi.Task):
"""
Add comment here
"""
updir = luigi.Parameter()
upid = luigi.Parameter()
tool = luigi.Parameter()
lsf = luigi.Parameter()
def run(self):
"""
Add comment here
"""
if self.lsf is True:
gsu.single_genome_scan_from_download_directory(self.updir,
self.upid, tool=self.tool)
# -----------------------------------------------------------------------------
class GenomeSearchEngine(luigi.Task):
"""
Launches genome search directly from the download directory which should
have the following structure project_dir/xxx/UPYYYYYYxxx
"""
project_dir = luigi.Parameter(description="Genome download project directory")
genome_list = luigi.Parameter(default=None,
description="A list of upids to search for ncRNAs")
tool = luigi.Parameter(default=None,
description="Infernal search tool (cmsearch/cmscan)")
lsf = luigi.BoolParameter(default=False,
description="Run pipeline on lsf, otherwise run locally")
def run(self):
"""
Add comment here
"""
# load upids
upid_fp = open(self.genome_list, 'r')
upids = [x.strip() for x in upid_fp]
upid_fp.close()
for upid in upids:
# get updir location
subdir = os.path.join(self.project_dir, upid[-3:])
updir = os.path.join(subdir, upid)
yield ScanGenome(updir, upid, self.tool.lower(), self.lsf)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# defining pipeline's main task
luigi.run()
|
<commit_before><commit_msg>Add simple scanner pipeline to help with job submission<commit_after>
|
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Pipeline to search each genome individually. This pipeline also parallelises the step
of splitting and indexing the genome files in smaller chunks
"""
# ---------------------------------IMPORTS-------------------------------------
import os
import luigi
import subprocess
from config import gen_config as gc
from utils import genome_search_utils as gsu
# add parent directory to path
if __name__ == '__main__' and __package__ is None:
os.sys.path.append(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# ----------------------------------TASKS--------------------------------------
class ScanGenome(luigi.Task):
"""
Add comment here
"""
updir = luigi.Parameter()
upid = luigi.Parameter()
tool = luigi.Parameter()
lsf = luigi.Parameter()
def run(self):
"""
Add comment here
"""
if self.lsf is True:
gsu.single_genome_scan_from_download_directory(self.updir,
self.upid, tool=self.tool)
# -----------------------------------------------------------------------------
class GenomeSearchEngine(luigi.Task):
"""
Launches genome search directly from the download directory which should
have the following structure project_dir/xxx/UPYYYYYYxxx
"""
project_dir = luigi.Parameter(description="Genome download project directory")
genome_list = luigi.Parameter(default=None,
description="A list of upids to search for ncRNAs")
tool = luigi.Parameter(default=None,
description="Infernal search tool (cmsearch/cmscan)")
lsf = luigi.BoolParameter(default=False,
description="Run pipeline on lsf, otherwise run locally")
def run(self):
"""
Add comment here
"""
# load upids
upid_fp = open(self.genome_list, 'r')
upids = [x.strip() for x in upid_fp]
upid_fp.close()
for upid in upids:
# get updir location
subdir = os.path.join(self.project_dir, upid[-3:])
updir = os.path.join(subdir, upid)
yield ScanGenome(updir, upid, self.tool.lower(), self.lsf)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# defining pipeline's main task
luigi.run()
|
Add simple scanner pipeline to help with job submission"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Pipeline to search each genome individually. This pipeline also parallelises the step
of splitting and indexing the genome files in smaller chunks
"""
# ---------------------------------IMPORTS-------------------------------------
import os
import luigi
import subprocess
from config import gen_config as gc
from utils import genome_search_utils as gsu
# add parent directory to path
if __name__ == '__main__' and __package__ is None:
os.sys.path.append(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# ----------------------------------TASKS--------------------------------------
class ScanGenome(luigi.Task):
"""
Add comment here
"""
updir = luigi.Parameter()
upid = luigi.Parameter()
tool = luigi.Parameter()
lsf = luigi.Parameter()
def run(self):
"""
Add comment here
"""
if self.lsf is True:
gsu.single_genome_scan_from_download_directory(self.updir,
self.upid, tool=self.tool)
# -----------------------------------------------------------------------------
class GenomeSearchEngine(luigi.Task):
"""
Launches genome search directly from the download directory which should
have the following structure project_dir/xxx/UPYYYYYYxxx
"""
project_dir = luigi.Parameter(description="Genome download project directory")
genome_list = luigi.Parameter(default=None,
description="A list of upids to search for ncRNAs")
tool = luigi.Parameter(default=None,
description="Infernal search tool (cmsearch/cmscan)")
lsf = luigi.BoolParameter(default=False,
description="Run pipeline on lsf, otherwise run locally")
def run(self):
"""
Add comment here
"""
# load upids
upid_fp = open(self.genome_list, 'r')
upids = [x.strip() for x in upid_fp]
upid_fp.close()
for upid in upids:
# get updir location
subdir = os.path.join(self.project_dir, upid[-3:])
updir = os.path.join(subdir, upid)
yield ScanGenome(updir, upid, self.tool.lower(), self.lsf)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# defining pipeline's main task
luigi.run()
|
<commit_before><commit_msg>Add simple scanner pipeline to help with job submission<commit_after>"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Pipeline to search each genome individually. This pipeline also parallelises the step
of splitting and indexing the genome files in smaller chunks
"""
# ---------------------------------IMPORTS-------------------------------------
import os
import luigi
import subprocess
from config import gen_config as gc
from utils import genome_search_utils as gsu
# add parent directory to path
if __name__ == '__main__' and __package__ is None:
os.sys.path.append(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# ----------------------------------TASKS--------------------------------------
class ScanGenome(luigi.Task):
"""
Add comment here
"""
updir = luigi.Parameter()
upid = luigi.Parameter()
tool = luigi.Parameter()
lsf = luigi.Parameter()
def run(self):
"""
Add comment here
"""
if self.lsf is True:
gsu.single_genome_scan_from_download_directory(self.updir,
self.upid, tool=self.tool)
# -----------------------------------------------------------------------------
class GenomeSearchEngine(luigi.Task):
"""
Launches genome search directly from the download directory which should
have the following structure project_dir/xxx/UPYYYYYYxxx
"""
project_dir = luigi.Parameter(description="Genome download project directory")
genome_list = luigi.Parameter(default=None,
description="A list of upids to search for ncRNAs")
tool = luigi.Parameter(default=None,
description="Infernal search tool (cmsearch/cmscan)")
lsf = luigi.BoolParameter(default=False,
description="Run pipeline on lsf, otherwise run locally")
def run(self):
"""
Add comment here
"""
# load upids
upid_fp = open(self.genome_list, 'r')
upids = [x.strip() for x in upid_fp]
upid_fp.close()
for upid in upids:
# get updir location
subdir = os.path.join(self.project_dir, upid[-3:])
updir = os.path.join(subdir, upid)
yield ScanGenome(updir, upid, self.tool.lower(), self.lsf)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# defining pipeline's main task
luigi.run()
|
|
0a3431a597cb0ff000431dcc79ceeeec3048a3c1
|
migrations/versions/953c5e0cebad_fix_the_type_of_the_updated_at_column_.py
|
migrations/versions/953c5e0cebad_fix_the_type_of_the_updated_at_column_.py
|
"""fix the type of the updated_at column for shelters
Revision ID: 953c5e0cebad
Revises: 9bfedc780ac5
Create Date: 2016-06-17 09:00:35.875583
"""
# revision identifiers, used by Alembic.
revision = '953c5e0cebad'
down_revision = '9bfedc780ac5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.DateTime(),
default=datetime.now))
def downgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.Boolean(),
default=datetime.now))
|
Add alembic script to fix the type of the column updated_at of shelters
|
Add alembic script to fix the type of the column updated_at of shelters
|
Python
|
mit
|
cedricbonhomme/shelter-database,cedricbonhomme/shelter-database,cedricbonhomme/shelter-database,cedricbonhomme/shelter-database
|
Add alembic script to fix the type of the column updated_at of shelters
|
"""fix the type of the updated_at column for shelters
Revision ID: 953c5e0cebad
Revises: 9bfedc780ac5
Create Date: 2016-06-17 09:00:35.875583
"""
# revision identifiers, used by Alembic.
revision = '953c5e0cebad'
down_revision = '9bfedc780ac5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.DateTime(),
default=datetime.now))
def downgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.Boolean(),
default=datetime.now))
|
<commit_before><commit_msg>Add alembic script to fix the type of the column updated_at of shelters<commit_after>
|
"""fix the type of the updated_at column for shelters
Revision ID: 953c5e0cebad
Revises: 9bfedc780ac5
Create Date: 2016-06-17 09:00:35.875583
"""
# revision identifiers, used by Alembic.
revision = '953c5e0cebad'
down_revision = '9bfedc780ac5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.DateTime(),
default=datetime.now))
def downgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.Boolean(),
default=datetime.now))
|
Add alembic script to fix the type of the column updated_at of shelters"""fix the type of the updated_at column for shelters
Revision ID: 953c5e0cebad
Revises: 9bfedc780ac5
Create Date: 2016-06-17 09:00:35.875583
"""
# revision identifiers, used by Alembic.
revision = '953c5e0cebad'
down_revision = '9bfedc780ac5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.DateTime(),
default=datetime.now))
def downgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.Boolean(),
default=datetime.now))
|
<commit_before><commit_msg>Add alembic script to fix the type of the column updated_at of shelters<commit_after>"""fix the type of the updated_at column for shelters
Revision ID: 953c5e0cebad
Revises: 9bfedc780ac5
Create Date: 2016-06-17 09:00:35.875583
"""
# revision identifiers, used by Alembic.
revision = '953c5e0cebad'
down_revision = '9bfedc780ac5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.DateTime(),
default=datetime.now))
def downgrade():
op.drop_column('shelter', 'updated_at')
op.add_column('shelter', sa.Column('updated_at', sa.Boolean(),
default=datetime.now))
|
|
4a18d55f6d4785bc5e3f38b95ade854d5412b0df
|
fib-seq-recur.py
|
fib-seq-recur.py
|
# Implement fibonacci sequence function using recursion
def get_fib(position):
if position < 2: # base case
return position
else:
return get_fib(position-1) + get_fib(position-2) # add two previous numbers
|
Add function for python implementation of fibonacci sequence
|
Add function for python implementation of fibonacci sequence
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
Add function for python implementation of fibonacci sequence
|
# Implement fibonacci sequence function using recursion
def get_fib(position):
if position < 2: # base case
return position
else:
return get_fib(position-1) + get_fib(position-2) # add two previous numbers
|
<commit_before><commit_msg>Add function for python implementation of fibonacci sequence<commit_after>
|
# Implement fibonacci sequence function using recursion
def get_fib(position):
if position < 2: # base case
return position
else:
return get_fib(position-1) + get_fib(position-2) # add two previous numbers
|
Add function for python implementation of fibonacci sequence# Implement fibonacci sequence function using recursion
def get_fib(position):
if position < 2: # base case
return position
else:
return get_fib(position-1) + get_fib(position-2) # add two previous numbers
|
<commit_before><commit_msg>Add function for python implementation of fibonacci sequence<commit_after># Implement fibonacci sequence function using recursion
def get_fib(position):
if position < 2: # base case
return position
else:
return get_fib(position-1) + get_fib(position-2) # add two previous numbers
|
|
94855b850f9ad71375ce0792a7b94f0d0662b9c0
|
examples/with-descartes.py
|
examples/with-descartes.py
|
import logging
import sys
from matplotlib import pyplot
from descartes import PolygonPatch
from fiona import collection
BLUE = '#6699cc'
fig = pyplot.figure(1, figsize=(6, 6), dpi=90)
ax = fig.add_subplot(111)
input = collection("docs/data/test_uk.shp", "r")
for f in input:
patch = PolygonPatch(f['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2)
ax.add_patch(patch)
# Should be able to get extents from the collection in a future version
# of Fiona.
ax.set_xlim(-9.25, 2.75)
ax.set_ylim(49.5, 61.5)
fig.savefig('test_uk.png')
|
Add an example of interop with descartes and matplotlib
|
Add an example of interop with descartes and matplotlib
|
Python
|
bsd-3-clause
|
Toblerity/Fiona,rbuffat/Fiona,sgillies/Fiona,Toblerity/Fiona,johanvdw/Fiona,perrygeo/Fiona,rbuffat/Fiona,perrygeo/Fiona
|
Add an example of interop with descartes and matplotlib
|
import logging
import sys
from matplotlib import pyplot
from descartes import PolygonPatch
from fiona import collection
BLUE = '#6699cc'
fig = pyplot.figure(1, figsize=(6, 6), dpi=90)
ax = fig.add_subplot(111)
input = collection("docs/data/test_uk.shp", "r")
for f in input:
patch = PolygonPatch(f['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2)
ax.add_patch(patch)
# Should be able to get extents from the collection in a future version
# of Fiona.
ax.set_xlim(-9.25, 2.75)
ax.set_ylim(49.5, 61.5)
fig.savefig('test_uk.png')
|
<commit_before><commit_msg>Add an example of interop with descartes and matplotlib<commit_after>
|
import logging
import sys
from matplotlib import pyplot
from descartes import PolygonPatch
from fiona import collection
BLUE = '#6699cc'
fig = pyplot.figure(1, figsize=(6, 6), dpi=90)
ax = fig.add_subplot(111)
input = collection("docs/data/test_uk.shp", "r")
for f in input:
patch = PolygonPatch(f['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2)
ax.add_patch(patch)
# Should be able to get extents from the collection in a future version
# of Fiona.
ax.set_xlim(-9.25, 2.75)
ax.set_ylim(49.5, 61.5)
fig.savefig('test_uk.png')
|
Add an example of interop with descartes and matplotlib
import logging
import sys
from matplotlib import pyplot
from descartes import PolygonPatch
from fiona import collection
BLUE = '#6699cc'
fig = pyplot.figure(1, figsize=(6, 6), dpi=90)
ax = fig.add_subplot(111)
input = collection("docs/data/test_uk.shp", "r")
for f in input:
patch = PolygonPatch(f['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2)
ax.add_patch(patch)
# Should be able to get extents from the collection in a future version
# of Fiona.
ax.set_xlim(-9.25, 2.75)
ax.set_ylim(49.5, 61.5)
fig.savefig('test_uk.png')
|
<commit_before><commit_msg>Add an example of interop with descartes and matplotlib<commit_after>
import logging
import sys
from matplotlib import pyplot
from descartes import PolygonPatch
from fiona import collection
BLUE = '#6699cc'
fig = pyplot.figure(1, figsize=(6, 6), dpi=90)
ax = fig.add_subplot(111)
input = collection("docs/data/test_uk.shp", "r")
for f in input:
patch = PolygonPatch(f['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2)
ax.add_patch(patch)
# Should be able to get extents from the collection in a future version
# of Fiona.
ax.set_xlim(-9.25, 2.75)
ax.set_ylim(49.5, 61.5)
fig.savefig('test_uk.png')
|
|
b670f82fa6cd9781b325a7906238e4beee0975ab
|
examples/cortesi-config.py
|
examples/cortesi-config.py
|
import libqtile
keys = [
libqtile.Key(
["mod1"], "k",
libqtile.command.Call("max_next").when(layout="max"),
libqtile.command.Call("stack_down").when(layout="stack"),
),
libqtile.Key(
["mod1"], "j",
libqtile.command.Call("max_previous").when(layout="max"),
libqtile.command.Call("stack_up").when(layout="stack"),
),
libqtile.Key(
["mod1"], "space",
libqtile.command.Call("stack_next").when(layout="stack")
),
libqtile.Key(
["mod1", "shift"], "space",
libqtile.command.Call("stack_rotate").when(layout="stack")
),
libqtile.Key(["mod1"], "n", libqtile.command.Call("spawn", "firefox")),
libqtile.Key(["mod1"], "h", libqtile.command.Call("to_screen", 0)),
libqtile.Key(["mod1"], "l", libqtile.command.Call("to_screen", 1)),
libqtile.Key(["mod1"], "Return", libqtile.command.Call("spawn", "~/bin/x")),
libqtile.Key(["mod1"], "Tab", libqtile.command.Call("nextlayout")),
libqtile.Key(["mod1"], "w", libqtile.command.Call("kill")),
libqtile.Key(
["mod1", "shift"], "k",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB+")
),
libqtile.Key(
["mod1", "shift"], "j",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB-")
),
libqtile.Key(
["mod1", "shift"], "n",
libqtile.command.Call("spawn", "amarok -t")
),
libqtile.Key(
["mod1", "shift"], "l",
libqtile.command.Call("spawn", "amarok -f")
),
libqtile.Key(
["mod1", "shift"], "h",
libqtile.command.Call("spawn", "amarok -r")
),
]
groups = ["a", "s", "d", "f", "u", "i", "o", "p"]
for i in groups:
keys.append(
libqtile.Key(["mod1"], i, libqtile.command.Call("pullgroup", i))
)
layouts = [
libqtile.layout.Max(),
libqtile.layout.Stack()
]
commands = []
screens = [
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
),
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
)
]
|
Add my configuration file as an example.
|
Add my configuration file as an example.
|
Python
|
mit
|
kseistrup/qtile,kynikos/qtile,nxnfufunezn/qtile,kseistrup/qtile,jdowner/qtile,kynikos/qtile,jdowner/qtile,himaaaatti/qtile,StephenBarnes/qtile,qtile/qtile,soulchainer/qtile,tych0/qtile,cortesi/qtile,bavardage/qtile,dequis/qtile,xplv/qtile,aniruddhkanojia/qtile,farebord/qtile,kopchik/qtile,kopchik/qtile,xplv/qtile,andrewyoung1991/qtile,de-vri-es/qtile,cortesi/qtile,flacjacket/qtile,nxnfufunezn/qtile,zordsdavini/qtile,w1ndy/qtile,aniruddhkanojia/qtile,zordsdavini/qtile,andrewyoung1991/qtile,apinsard/qtile,soulchainer/qtile,tych0/qtile,flacjacket/qtile,kiniou/qtile,StephenBarnes/qtile,dequis/qtile,frostidaho/qtile,ramnes/qtile,encukou/qtile,encukou/qtile,qtile/qtile,w1ndy/qtile,apinsard/qtile,ramnes/qtile,himaaaatti/qtile,de-vri-es/qtile,EndPointCorp/qtile,rxcomm/qtile,kiniou/qtile,rxcomm/qtile,frostidaho/qtile,EndPointCorp/qtile,farebord/qtile
|
Add my configuration file as an example.
|
import libqtile
keys = [
libqtile.Key(
["mod1"], "k",
libqtile.command.Call("max_next").when(layout="max"),
libqtile.command.Call("stack_down").when(layout="stack"),
),
libqtile.Key(
["mod1"], "j",
libqtile.command.Call("max_previous").when(layout="max"),
libqtile.command.Call("stack_up").when(layout="stack"),
),
libqtile.Key(
["mod1"], "space",
libqtile.command.Call("stack_next").when(layout="stack")
),
libqtile.Key(
["mod1", "shift"], "space",
libqtile.command.Call("stack_rotate").when(layout="stack")
),
libqtile.Key(["mod1"], "n", libqtile.command.Call("spawn", "firefox")),
libqtile.Key(["mod1"], "h", libqtile.command.Call("to_screen", 0)),
libqtile.Key(["mod1"], "l", libqtile.command.Call("to_screen", 1)),
libqtile.Key(["mod1"], "Return", libqtile.command.Call("spawn", "~/bin/x")),
libqtile.Key(["mod1"], "Tab", libqtile.command.Call("nextlayout")),
libqtile.Key(["mod1"], "w", libqtile.command.Call("kill")),
libqtile.Key(
["mod1", "shift"], "k",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB+")
),
libqtile.Key(
["mod1", "shift"], "j",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB-")
),
libqtile.Key(
["mod1", "shift"], "n",
libqtile.command.Call("spawn", "amarok -t")
),
libqtile.Key(
["mod1", "shift"], "l",
libqtile.command.Call("spawn", "amarok -f")
),
libqtile.Key(
["mod1", "shift"], "h",
libqtile.command.Call("spawn", "amarok -r")
),
]
groups = ["a", "s", "d", "f", "u", "i", "o", "p"]
for i in groups:
keys.append(
libqtile.Key(["mod1"], i, libqtile.command.Call("pullgroup", i))
)
layouts = [
libqtile.layout.Max(),
libqtile.layout.Stack()
]
commands = []
screens = [
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
),
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
)
]
|
<commit_before><commit_msg>Add my configuration file as an example.<commit_after>
|
import libqtile
keys = [
libqtile.Key(
["mod1"], "k",
libqtile.command.Call("max_next").when(layout="max"),
libqtile.command.Call("stack_down").when(layout="stack"),
),
libqtile.Key(
["mod1"], "j",
libqtile.command.Call("max_previous").when(layout="max"),
libqtile.command.Call("stack_up").when(layout="stack"),
),
libqtile.Key(
["mod1"], "space",
libqtile.command.Call("stack_next").when(layout="stack")
),
libqtile.Key(
["mod1", "shift"], "space",
libqtile.command.Call("stack_rotate").when(layout="stack")
),
libqtile.Key(["mod1"], "n", libqtile.command.Call("spawn", "firefox")),
libqtile.Key(["mod1"], "h", libqtile.command.Call("to_screen", 0)),
libqtile.Key(["mod1"], "l", libqtile.command.Call("to_screen", 1)),
libqtile.Key(["mod1"], "Return", libqtile.command.Call("spawn", "~/bin/x")),
libqtile.Key(["mod1"], "Tab", libqtile.command.Call("nextlayout")),
libqtile.Key(["mod1"], "w", libqtile.command.Call("kill")),
libqtile.Key(
["mod1", "shift"], "k",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB+")
),
libqtile.Key(
["mod1", "shift"], "j",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB-")
),
libqtile.Key(
["mod1", "shift"], "n",
libqtile.command.Call("spawn", "amarok -t")
),
libqtile.Key(
["mod1", "shift"], "l",
libqtile.command.Call("spawn", "amarok -f")
),
libqtile.Key(
["mod1", "shift"], "h",
libqtile.command.Call("spawn", "amarok -r")
),
]
groups = ["a", "s", "d", "f", "u", "i", "o", "p"]
for i in groups:
keys.append(
libqtile.Key(["mod1"], i, libqtile.command.Call("pullgroup", i))
)
layouts = [
libqtile.layout.Max(),
libqtile.layout.Stack()
]
commands = []
screens = [
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
),
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
)
]
|
Add my configuration file as an example.import libqtile
keys = [
libqtile.Key(
["mod1"], "k",
libqtile.command.Call("max_next").when(layout="max"),
libqtile.command.Call("stack_down").when(layout="stack"),
),
libqtile.Key(
["mod1"], "j",
libqtile.command.Call("max_previous").when(layout="max"),
libqtile.command.Call("stack_up").when(layout="stack"),
),
libqtile.Key(
["mod1"], "space",
libqtile.command.Call("stack_next").when(layout="stack")
),
libqtile.Key(
["mod1", "shift"], "space",
libqtile.command.Call("stack_rotate").when(layout="stack")
),
libqtile.Key(["mod1"], "n", libqtile.command.Call("spawn", "firefox")),
libqtile.Key(["mod1"], "h", libqtile.command.Call("to_screen", 0)),
libqtile.Key(["mod1"], "l", libqtile.command.Call("to_screen", 1)),
libqtile.Key(["mod1"], "Return", libqtile.command.Call("spawn", "~/bin/x")),
libqtile.Key(["mod1"], "Tab", libqtile.command.Call("nextlayout")),
libqtile.Key(["mod1"], "w", libqtile.command.Call("kill")),
libqtile.Key(
["mod1", "shift"], "k",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB+")
),
libqtile.Key(
["mod1", "shift"], "j",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB-")
),
libqtile.Key(
["mod1", "shift"], "n",
libqtile.command.Call("spawn", "amarok -t")
),
libqtile.Key(
["mod1", "shift"], "l",
libqtile.command.Call("spawn", "amarok -f")
),
libqtile.Key(
["mod1", "shift"], "h",
libqtile.command.Call("spawn", "amarok -r")
),
]
groups = ["a", "s", "d", "f", "u", "i", "o", "p"]
for i in groups:
keys.append(
libqtile.Key(["mod1"], i, libqtile.command.Call("pullgroup", i))
)
layouts = [
libqtile.layout.Max(),
libqtile.layout.Stack()
]
commands = []
screens = [
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
),
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
)
]
|
<commit_before><commit_msg>Add my configuration file as an example.<commit_after>import libqtile
keys = [
libqtile.Key(
["mod1"], "k",
libqtile.command.Call("max_next").when(layout="max"),
libqtile.command.Call("stack_down").when(layout="stack"),
),
libqtile.Key(
["mod1"], "j",
libqtile.command.Call("max_previous").when(layout="max"),
libqtile.command.Call("stack_up").when(layout="stack"),
),
libqtile.Key(
["mod1"], "space",
libqtile.command.Call("stack_next").when(layout="stack")
),
libqtile.Key(
["mod1", "shift"], "space",
libqtile.command.Call("stack_rotate").when(layout="stack")
),
libqtile.Key(["mod1"], "n", libqtile.command.Call("spawn", "firefox")),
libqtile.Key(["mod1"], "h", libqtile.command.Call("to_screen", 0)),
libqtile.Key(["mod1"], "l", libqtile.command.Call("to_screen", 1)),
libqtile.Key(["mod1"], "Return", libqtile.command.Call("spawn", "~/bin/x")),
libqtile.Key(["mod1"], "Tab", libqtile.command.Call("nextlayout")),
libqtile.Key(["mod1"], "w", libqtile.command.Call("kill")),
libqtile.Key(
["mod1", "shift"], "k",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB+")
),
libqtile.Key(
["mod1", "shift"], "j",
libqtile.command.Call("spawn", "amixer -qc 0 set PCM 2dB-")
),
libqtile.Key(
["mod1", "shift"], "n",
libqtile.command.Call("spawn", "amarok -t")
),
libqtile.Key(
["mod1", "shift"], "l",
libqtile.command.Call("spawn", "amarok -f")
),
libqtile.Key(
["mod1", "shift"], "h",
libqtile.command.Call("spawn", "amarok -r")
),
]
groups = ["a", "s", "d", "f", "u", "i", "o", "p"]
for i in groups:
keys.append(
libqtile.Key(["mod1"], i, libqtile.command.Call("pullgroup", i))
)
layouts = [
libqtile.layout.Max(),
libqtile.layout.Stack()
]
commands = []
screens = [
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
),
libqtile.Screen(
bottom = libqtile.bar.Bar(
[
libqtile.bar.GroupBox(),
libqtile.bar.WindowName()
],
30,
),
)
]
|
|
49fbd4c43465888d706d336c78f187c3849539e4
|
hiora_cartpole/fourier_fa.py
|
hiora_cartpole/fourier_fa.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import itertools
import numpy as np
def make_feature_vec(state_ranges, order):
"""
Arguments:
state_ranges – (2, n_dims) minima and maxima of possible state values
n_acts – int, number of actions that can happen
order – int, order of the Fourier basis
Credits:
- http://psthomas.com/papers/Konidaris2011a.pdf
- https://github.com/amarack/python-rl/blob/master/pyrl/basis/fourier.py
"""
n_dims = state_ranges[1]
n_entries = (n_dims + 1)**order
# All entries from cartesian product {0, …, order+1}^n_dims.
c_matrix = np.array(
list( itertools.product(range(order+1), repeat=n_dims) ),
dtype=np.int32)
def feature_vec_dot_inner(state, action, weights):
"""
Arguments:
action - int in {0, …, number of possible actions}
"""
# Note: With the default C/row-major format it should be faster to put
# the c₁, c₂, … in the rows of the matrix.
# Bring all state input into the range [0, 1], the input range of the
# Fourier basis functions.
normalized_state = (state - state_ranges[0]) \
/ np.diff(state_ranges, axis=1)
# Dot products of the feature vector with every c. → shape (n_entries,)
dot_prods = np.dot(c_matrix, normalized_state.transpose())[:,0]
# Apply Fourier basis functions.
feature_v = np.cos(np.pi * dot_prods)
# Sum up results, weighted, to give Fourier val.
return np.dot(weights[action * n_entries:(action + 1) * n_entries - 1],\
feature_v)
return feature_vec_dot_inner
|
Add untested Fourier linear function approximator
|
Add untested Fourier linear function approximator
|
Python
|
mit
|
rmoehn/cartpole
|
Add untested Fourier linear function approximator
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import itertools
import numpy as np
def make_feature_vec(state_ranges, order):
"""
Arguments:
state_ranges – (2, n_dims) minima and maxima of possible state values
n_acts – int, number of actions that can happen
order – int, order of the Fourier basis
Credits:
- http://psthomas.com/papers/Konidaris2011a.pdf
- https://github.com/amarack/python-rl/blob/master/pyrl/basis/fourier.py
"""
n_dims = state_ranges[1]
n_entries = (n_dims + 1)**order
# All entries from cartesian product {0, …, order+1}^n_dims.
c_matrix = np.array(
list( itertools.product(range(order+1), repeat=n_dims) ),
dtype=np.int32)
def feature_vec_dot_inner(state, action, weights):
"""
Arguments:
action - int in {0, …, number of possible actions}
"""
# Note: With the default C/row-major format it should be faster to put
# the c₁, c₂, … in the rows of the matrix.
# Bring all state input into the range [0, 1], the input range of the
# Fourier basis functions.
normalized_state = (state - state_ranges[0]) \
/ np.diff(state_ranges, axis=1)
# Dot products of the feature vector with every c. → shape (n_entries,)
dot_prods = np.dot(c_matrix, normalized_state.transpose())[:,0]
# Apply Fourier basis functions.
feature_v = np.cos(np.pi * dot_prods)
# Sum up results, weighted, to give Fourier val.
return np.dot(weights[action * n_entries:(action + 1) * n_entries - 1],\
feature_v)
return feature_vec_dot_inner
|
<commit_before><commit_msg>Add untested Fourier linear function approximator<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import itertools
import numpy as np
def make_feature_vec(state_ranges, order):
"""
Arguments:
state_ranges – (2, n_dims) minima and maxima of possible state values
n_acts – int, number of actions that can happen
order – int, order of the Fourier basis
Credits:
- http://psthomas.com/papers/Konidaris2011a.pdf
- https://github.com/amarack/python-rl/blob/master/pyrl/basis/fourier.py
"""
n_dims = state_ranges[1]
n_entries = (n_dims + 1)**order
# All entries from cartesian product {0, …, order+1}^n_dims.
c_matrix = np.array(
list( itertools.product(range(order+1), repeat=n_dims) ),
dtype=np.int32)
def feature_vec_dot_inner(state, action, weights):
"""
Arguments:
action - int in {0, …, number of possible actions}
"""
# Note: With the default C/row-major format it should be faster to put
# the c₁, c₂, … in the rows of the matrix.
# Bring all state input into the range [0, 1], the input range of the
# Fourier basis functions.
normalized_state = (state - state_ranges[0]) \
/ np.diff(state_ranges, axis=1)
# Dot products of the feature vector with every c. → shape (n_entries,)
dot_prods = np.dot(c_matrix, normalized_state.transpose())[:,0]
# Apply Fourier basis functions.
feature_v = np.cos(np.pi * dot_prods)
# Sum up results, weighted, to give Fourier val.
return np.dot(weights[action * n_entries:(action + 1) * n_entries - 1],\
feature_v)
return feature_vec_dot_inner
|
Add untested Fourier linear function approximator# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import itertools
import numpy as np
def make_feature_vec(state_ranges, order):
"""
Arguments:
state_ranges – (2, n_dims) minima and maxima of possible state values
n_acts – int, number of actions that can happen
order – int, order of the Fourier basis
Credits:
- http://psthomas.com/papers/Konidaris2011a.pdf
- https://github.com/amarack/python-rl/blob/master/pyrl/basis/fourier.py
"""
n_dims = state_ranges[1]
n_entries = (n_dims + 1)**order
# All entries from cartesian product {0, …, order+1}^n_dims.
c_matrix = np.array(
list( itertools.product(range(order+1), repeat=n_dims) ),
dtype=np.int32)
def feature_vec_dot_inner(state, action, weights):
"""
Arguments:
action - int in {0, …, number of possible actions}
"""
# Note: With the default C/row-major format it should be faster to put
# the c₁, c₂, … in the rows of the matrix.
# Bring all state input into the range [0, 1], the input range of the
# Fourier basis functions.
normalized_state = (state - state_ranges[0]) \
/ np.diff(state_ranges, axis=1)
# Dot products of the feature vector with every c. → shape (n_entries,)
dot_prods = np.dot(c_matrix, normalized_state.transpose())[:,0]
# Apply Fourier basis functions.
feature_v = np.cos(np.pi * dot_prods)
# Sum up results, weighted, to give Fourier val.
return np.dot(weights[action * n_entries:(action + 1) * n_entries - 1],\
feature_v)
return feature_vec_dot_inner
|
<commit_before><commit_msg>Add untested Fourier linear function approximator<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import itertools
import numpy as np
def make_feature_vec(state_ranges, order):
"""
Arguments:
state_ranges – (2, n_dims) minima and maxima of possible state values
n_acts – int, number of actions that can happen
order – int, order of the Fourier basis
Credits:
- http://psthomas.com/papers/Konidaris2011a.pdf
- https://github.com/amarack/python-rl/blob/master/pyrl/basis/fourier.py
"""
n_dims = state_ranges[1]
n_entries = (n_dims + 1)**order
# All entries from cartesian product {0, …, order+1}^n_dims.
c_matrix = np.array(
list( itertools.product(range(order+1), repeat=n_dims) ),
dtype=np.int32)
def feature_vec_dot_inner(state, action, weights):
"""
Arguments:
action - int in {0, …, number of possible actions}
"""
# Note: With the default C/row-major format it should be faster to put
# the c₁, c₂, … in the rows of the matrix.
# Bring all state input into the range [0, 1], the input range of the
# Fourier basis functions.
normalized_state = (state - state_ranges[0]) \
/ np.diff(state_ranges, axis=1)
# Dot products of the feature vector with every c. → shape (n_entries,)
dot_prods = np.dot(c_matrix, normalized_state.transpose())[:,0]
# Apply Fourier basis functions.
feature_v = np.cos(np.pi * dot_prods)
# Sum up results, weighted, to give Fourier val.
return np.dot(weights[action * n_entries:(action + 1) * n_entries - 1],\
feature_v)
return feature_vec_dot_inner
|
|
5fc353fed4c839a050ef986a39fbc63d5464f492
|
scripts/tmp/rewrite.py
|
scripts/tmp/rewrite.py
|
import json
import os
import re
def is_enum(pattern):
return pattern[0] == '^' and pattern[-1] == '$' and '[' not in pattern
def unpack_enum(pattern):
if '(' not in pattern:
return [pattern[1:-1]]
add_empty = False
if pattern[-2] == '?':
pattern = pattern[:-1]
add_empty = True
parts = pattern[2:-2].split('|')
# Add empty strings where allowed
if '' in parts:
add_empty = True
parts = filter(None, parts)
if add_empty:
parts = [''] + parts
return parts
def match_pattern(pattern):
return r'"type": "string",\s+"pattern": "{}"'.format(re.escape(pattern))
def fix_field(raw_data, value):
"""Detect enums and replace them in the raw JSON to avoid reordering"""
if 'pattern' in value and is_enum(value['pattern']):
enum_json = json.dumps(unpack_enum(value['pattern']))
raw_data = re.sub(match_pattern(value['pattern']),
'"enum": {}'.format(enum_json),
raw_data,
flags=re.DOTALL)
return raw_data
def fix_properties(raw_data, properties):
for key, value in properties.items():
raw_data = fix_field(raw_data, value)
if 'oneOf' in value:
for item in value['oneOf']:
if 'items' in item:
raw_data = fix_field(raw_data, item['items'])
elif 'items' in value:
raw_data = fix_field(raw_data, value['items'])
elif 'properties' in value:
raw_data = fix_properties(raw_data, value['properties'])
return raw_data
for filename in os.listdir('json_schemas'):
if not filename.endswith('.json'):
continue
path = os.path.join('json_schemas', filename)
with open(path) as f:
raw_data = f.read()
data = json.loads(raw_data)
print(path)
raw_data = fix_properties(raw_data, data['properties'])
with open(path, 'w') as f:
f.write(raw_data)
|
Add script to convert regex to enum in schemas
|
Add script to convert regex to enum in schemas
This script is just to record how regular expression patterns describing
choices were converted into enums in the json schemas.
|
Python
|
mit
|
RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api
|
Add script to convert regex to enum in schemas
This script is just to record how regular expression patterns describing
choices were converted into enums in the json schemas.
|
import json
import os
import re
def is_enum(pattern):
return pattern[0] == '^' and pattern[-1] == '$' and '[' not in pattern
def unpack_enum(pattern):
if '(' not in pattern:
return [pattern[1:-1]]
add_empty = False
if pattern[-2] == '?':
pattern = pattern[:-1]
add_empty = True
parts = pattern[2:-2].split('|')
# Add empty strings where allowed
if '' in parts:
add_empty = True
parts = filter(None, parts)
if add_empty:
parts = [''] + parts
return parts
def match_pattern(pattern):
return r'"type": "string",\s+"pattern": "{}"'.format(re.escape(pattern))
def fix_field(raw_data, value):
"""Detect enums and replace them in the raw JSON to avoid reordering"""
if 'pattern' in value and is_enum(value['pattern']):
enum_json = json.dumps(unpack_enum(value['pattern']))
raw_data = re.sub(match_pattern(value['pattern']),
'"enum": {}'.format(enum_json),
raw_data,
flags=re.DOTALL)
return raw_data
def fix_properties(raw_data, properties):
for key, value in properties.items():
raw_data = fix_field(raw_data, value)
if 'oneOf' in value:
for item in value['oneOf']:
if 'items' in item:
raw_data = fix_field(raw_data, item['items'])
elif 'items' in value:
raw_data = fix_field(raw_data, value['items'])
elif 'properties' in value:
raw_data = fix_properties(raw_data, value['properties'])
return raw_data
for filename in os.listdir('json_schemas'):
if not filename.endswith('.json'):
continue
path = os.path.join('json_schemas', filename)
with open(path) as f:
raw_data = f.read()
data = json.loads(raw_data)
print(path)
raw_data = fix_properties(raw_data, data['properties'])
with open(path, 'w') as f:
f.write(raw_data)
|
<commit_before><commit_msg>Add script to convert regex to enum in schemas
This script is just to record how regular expression patterns describing
choices were converted into enums in the json schemas.<commit_after>
|
import json
import os
import re
def is_enum(pattern):
return pattern[0] == '^' and pattern[-1] == '$' and '[' not in pattern
def unpack_enum(pattern):
if '(' not in pattern:
return [pattern[1:-1]]
add_empty = False
if pattern[-2] == '?':
pattern = pattern[:-1]
add_empty = True
parts = pattern[2:-2].split('|')
# Add empty strings where allowed
if '' in parts:
add_empty = True
parts = filter(None, parts)
if add_empty:
parts = [''] + parts
return parts
def match_pattern(pattern):
return r'"type": "string",\s+"pattern": "{}"'.format(re.escape(pattern))
def fix_field(raw_data, value):
"""Detect enums and replace them in the raw JSON to avoid reordering"""
if 'pattern' in value and is_enum(value['pattern']):
enum_json = json.dumps(unpack_enum(value['pattern']))
raw_data = re.sub(match_pattern(value['pattern']),
'"enum": {}'.format(enum_json),
raw_data,
flags=re.DOTALL)
return raw_data
def fix_properties(raw_data, properties):
for key, value in properties.items():
raw_data = fix_field(raw_data, value)
if 'oneOf' in value:
for item in value['oneOf']:
if 'items' in item:
raw_data = fix_field(raw_data, item['items'])
elif 'items' in value:
raw_data = fix_field(raw_data, value['items'])
elif 'properties' in value:
raw_data = fix_properties(raw_data, value['properties'])
return raw_data
for filename in os.listdir('json_schemas'):
if not filename.endswith('.json'):
continue
path = os.path.join('json_schemas', filename)
with open(path) as f:
raw_data = f.read()
data = json.loads(raw_data)
print(path)
raw_data = fix_properties(raw_data, data['properties'])
with open(path, 'w') as f:
f.write(raw_data)
|
Add script to convert regex to enum in schemas
This script is just to record how regular expression patterns describing
choices were converted into enums in the json schemas.import json
import os
import re
def is_enum(pattern):
return pattern[0] == '^' and pattern[-1] == '$' and '[' not in pattern
def unpack_enum(pattern):
if '(' not in pattern:
return [pattern[1:-1]]
add_empty = False
if pattern[-2] == '?':
pattern = pattern[:-1]
add_empty = True
parts = pattern[2:-2].split('|')
# Add empty strings where allowed
if '' in parts:
add_empty = True
parts = filter(None, parts)
if add_empty:
parts = [''] + parts
return parts
def match_pattern(pattern):
return r'"type": "string",\s+"pattern": "{}"'.format(re.escape(pattern))
def fix_field(raw_data, value):
"""Detect enums and replace them in the raw JSON to avoid reordering"""
if 'pattern' in value and is_enum(value['pattern']):
enum_json = json.dumps(unpack_enum(value['pattern']))
raw_data = re.sub(match_pattern(value['pattern']),
'"enum": {}'.format(enum_json),
raw_data,
flags=re.DOTALL)
return raw_data
def fix_properties(raw_data, properties):
for key, value in properties.items():
raw_data = fix_field(raw_data, value)
if 'oneOf' in value:
for item in value['oneOf']:
if 'items' in item:
raw_data = fix_field(raw_data, item['items'])
elif 'items' in value:
raw_data = fix_field(raw_data, value['items'])
elif 'properties' in value:
raw_data = fix_properties(raw_data, value['properties'])
return raw_data
for filename in os.listdir('json_schemas'):
if not filename.endswith('.json'):
continue
path = os.path.join('json_schemas', filename)
with open(path) as f:
raw_data = f.read()
data = json.loads(raw_data)
print(path)
raw_data = fix_properties(raw_data, data['properties'])
with open(path, 'w') as f:
f.write(raw_data)
|
<commit_before><commit_msg>Add script to convert regex to enum in schemas
This script is just to record how regular expression patterns describing
choices were converted into enums in the json schemas.<commit_after>import json
import os
import re
def is_enum(pattern):
return pattern[0] == '^' and pattern[-1] == '$' and '[' not in pattern
def unpack_enum(pattern):
if '(' not in pattern:
return [pattern[1:-1]]
add_empty = False
if pattern[-2] == '?':
pattern = pattern[:-1]
add_empty = True
parts = pattern[2:-2].split('|')
# Add empty strings where allowed
if '' in parts:
add_empty = True
parts = filter(None, parts)
if add_empty:
parts = [''] + parts
return parts
def match_pattern(pattern):
return r'"type": "string",\s+"pattern": "{}"'.format(re.escape(pattern))
def fix_field(raw_data, value):
"""Detect enums and replace them in the raw JSON to avoid reordering"""
if 'pattern' in value and is_enum(value['pattern']):
enum_json = json.dumps(unpack_enum(value['pattern']))
raw_data = re.sub(match_pattern(value['pattern']),
'"enum": {}'.format(enum_json),
raw_data,
flags=re.DOTALL)
return raw_data
def fix_properties(raw_data, properties):
for key, value in properties.items():
raw_data = fix_field(raw_data, value)
if 'oneOf' in value:
for item in value['oneOf']:
if 'items' in item:
raw_data = fix_field(raw_data, item['items'])
elif 'items' in value:
raw_data = fix_field(raw_data, value['items'])
elif 'properties' in value:
raw_data = fix_properties(raw_data, value['properties'])
return raw_data
for filename in os.listdir('json_schemas'):
if not filename.endswith('.json'):
continue
path = os.path.join('json_schemas', filename)
with open(path) as f:
raw_data = f.read()
data = json.loads(raw_data)
print(path)
raw_data = fix_properties(raw_data, data['properties'])
with open(path, 'w') as f:
f.write(raw_data)
|
|
1beb325fb2bf61689db6fc20ccb829285165227d
|
helper/git_clean_keep_ide_settings.py
|
helper/git_clean_keep_ide_settings.py
|
import subprocess
# Keep the following directories
# - .vscode (Visual Studio code project settings)
# - .vs (Visual Studio project files)
# - .idea (PyCharm project files)
subprocess.run(["git", "clean", "-dfx", "-e", ".vscode", "-e", ".idea", "-e", ".vs"], check=True)
|
Add helper for git clean that keeps the IDE folders
|
Add helper for git clean that keeps the IDE folders
|
Python
|
apache-2.0
|
MSeifert04/iteration_utilities,MSeifert04/iteration_utilities,MSeifert04/iteration_utilities,MSeifert04/iteration_utilities
|
Add helper for git clean that keeps the IDE folders
|
import subprocess
# Keep the following directories
# - .vscode (Visual Studio code project settings)
# - .vs (Visual Studio project files)
# - .idea (PyCharm project files)
subprocess.run(["git", "clean", "-dfx", "-e", ".vscode", "-e", ".idea", "-e", ".vs"], check=True)
|
<commit_before><commit_msg>Add helper for git clean that keeps the IDE folders<commit_after>
|
import subprocess
# Keep the following directories
# - .vscode (Visual Studio code project settings)
# - .vs (Visual Studio project files)
# - .idea (PyCharm project files)
subprocess.run(["git", "clean", "-dfx", "-e", ".vscode", "-e", ".idea", "-e", ".vs"], check=True)
|
Add helper for git clean that keeps the IDE foldersimport subprocess
# Keep the following directories
# - .vscode (Visual Studio code project settings)
# - .vs (Visual Studio project files)
# - .idea (PyCharm project files)
subprocess.run(["git", "clean", "-dfx", "-e", ".vscode", "-e", ".idea", "-e", ".vs"], check=True)
|
<commit_before><commit_msg>Add helper for git clean that keeps the IDE folders<commit_after>import subprocess
# Keep the following directories
# - .vscode (Visual Studio code project settings)
# - .vs (Visual Studio project files)
# - .idea (PyCharm project files)
subprocess.run(["git", "clean", "-dfx", "-e", ".vscode", "-e", ".idea", "-e", ".vs"], check=True)
|
|
73111cf0f1ab101ad0c75ab23de6e121a8eb656f
|
scripts/profile_generate.py
|
scripts/profile_generate.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from logya.generate import Generate
import cProfile, pstats, io
from pstats import SortKey
pr = cProfile.Profile()
pr.enable()
Generate(verbose=True)
pr.disable()
s = io.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats(SortKey.CUMULATIVE)
ps.print_stats()
print(s.getvalue())
|
Add script for profiling the generate process.
|
Add script for profiling the generate process.
|
Python
|
mit
|
elaOnMars/logya,elaOnMars/logya,yaph/logya,yaph/logya,elaOnMars/logya
|
Add script for profiling the generate process.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from logya.generate import Generate
import cProfile, pstats, io
from pstats import SortKey
pr = cProfile.Profile()
pr.enable()
Generate(verbose=True)
pr.disable()
s = io.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats(SortKey.CUMULATIVE)
ps.print_stats()
print(s.getvalue())
|
<commit_before><commit_msg>Add script for profiling the generate process.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from logya.generate import Generate
import cProfile, pstats, io
from pstats import SortKey
pr = cProfile.Profile()
pr.enable()
Generate(verbose=True)
pr.disable()
s = io.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats(SortKey.CUMULATIVE)
ps.print_stats()
print(s.getvalue())
|
Add script for profiling the generate process.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from logya.generate import Generate
import cProfile, pstats, io
from pstats import SortKey
pr = cProfile.Profile()
pr.enable()
Generate(verbose=True)
pr.disable()
s = io.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats(SortKey.CUMULATIVE)
ps.print_stats()
print(s.getvalue())
|
<commit_before><commit_msg>Add script for profiling the generate process.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from logya.generate import Generate
import cProfile, pstats, io
from pstats import SortKey
pr = cProfile.Profile()
pr.enable()
Generate(verbose=True)
pr.disable()
s = io.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats(SortKey.CUMULATIVE)
ps.print_stats()
print(s.getvalue())
|
|
5073d3da07cff017cc567ebc73f06d862c21c839
|
tests/extmod/zlib_decompress.py
|
tests/extmod/zlib_decompress.py
|
try:
import zlib
except ImportError:
import zlibd as zlib
PATTERNS = [
# Packed results produced by CPy's zlib.compress()
(b'0', b'x\x9c3\x00\x00\x001\x001'),
(b'a', b'x\x9cK\x04\x00\x00b\x00b'),
(b'0' * 100, b'x\x9c30\xa0=\x00\x00\xb3q\x12\xc1'),
(bytes(range(64)), b'x\x9cc`dbfaec\xe7\xe0\xe4\xe2\xe6\xe1\xe5\xe3\x17\x10\x14\x12\x16\x11\x15\x13\x97\x90\x94\x92\x96\x91\x95\x93WPTRVQUS\xd7\xd0\xd4\xd2\xd6\xd1\xd5\xd370426153\xb7\xb0\xb4\xb2\xb6\xb1\xb5\xb3\x07\x00\xaa\xe0\x07\xe1'),
]
for unpacked, packed in PATTERNS:
assert zlib.decompress(packed) == unpacked
print(unpacked)
|
Add test for zlibd module.
|
tests: Add test for zlibd module.
|
Python
|
mit
|
turbinenreiter/micropython,cwyark/micropython,ryannathans/micropython,alex-robbins/micropython,pfalcon/micropython,dhylands/micropython,warner83/micropython,henriknelson/micropython,AriZuu/micropython,mhoffma/micropython,adafruit/micropython,noahchense/micropython,adafruit/micropython,AriZuu/micropython,tobbad/micropython,ruffy91/micropython,kostyll/micropython,rubencabrera/micropython,slzatz/micropython,oopy/micropython,lowRISC/micropython,henriknelson/micropython,KISSMonX/micropython,KISSMonX/micropython,ChuckM/micropython,omtinez/micropython,chrisdearman/micropython,HenrikSolver/micropython,feilongfl/micropython,dmazzella/micropython,noahchense/micropython,mhoffma/micropython,adamkh/micropython,cloudformdesign/micropython,drrk/micropython,torwag/micropython,selste/micropython,micropython/micropython-esp32,trezor/micropython,pozetroninc/micropython,ruffy91/micropython,adamkh/micropython,EcmaXp/micropython,toolmacher/micropython,warner83/micropython,micropython/micropython-esp32,SHA2017-badge/micropython-esp32,skybird6672/micropython,supergis/micropython,ahotam/micropython,xhat/micropython,methoxid/micropystat,skybird6672/micropython,trezor/micropython,swegener/micropython,kostyll/micropython,oopy/micropython,mgyenik/micropython,lbattraw/micropython,matthewelse/micropython,pfalcon/micropython,pramasoul/micropython,danicampora/micropython,neilh10/micropython,micropython/micropython-esp32,vitiral/micropython,aethaniel/micropython,kostyll/micropython,kerneltask/micropython,praemdonck/micropython,chrisdearman/micropython,ganshun666/micropython,MrSurly/micropython-esp32,deshipu/micropython,tralamazza/micropython,TDAbboud/micropython,ceramos/micropython,rubencabrera/micropython,warner83/micropython,bvernoux/micropython,PappaPeppar/micropython,bvernoux/micropython,lowRISC/micropython,adafruit/micropython,omtinez/micropython,turbinenreiter/micropython,tralamazza/micropython,HenrikSolver/micropython,pramasoul/micropython,mpalomer/micropython,lbattraw/micropython,alex-march/micropython,heisewangluo/micropython,xyb/micropython,henriknelson/micropython,swegener/micropython,firstval/micropython,Timmenem/micropython,KISSMonX/micropython,henriknelson/micropython,mhoffma/micropython,pfalcon/micropython,SungEun-Steve-Kim/test-mp,slzatz/micropython,martinribelotta/micropython,tuc-osg/micropython,matthewelse/micropython,SungEun-Steve-Kim/test-mp,lbattraw/micropython,ernesto-g/micropython,kostyll/micropython,paul-xxx/micropython,ganshun666/micropython,mianos/micropython,ahotam/micropython,MrSurly/micropython-esp32,orionrobots/micropython,pramasoul/micropython,MrSurly/micropython-esp32,PappaPeppar/micropython,Vogtinator/micropython,blmorris/micropython,omtinez/micropython,PappaPeppar/micropython,martinribelotta/micropython,xuxiaoxin/micropython,MrSurly/micropython,lbattraw/micropython,aethaniel/micropython,noahwilliamsson/micropython,galenhz/micropython,adamkh/micropython,AriZuu/micropython,galenhz/micropython,mpalomer/micropython,adamkh/micropython,danicampora/micropython,cnoviello/micropython,EcmaXp/micropython,redbear/micropython,alex-march/micropython,kerneltask/micropython,ganshun666/micropython,adafruit/circuitpython,martinribelotta/micropython,dxxb/micropython,ruffy91/micropython,heisewangluo/micropython,jlillest/micropython,tralamazza/micropython,jlillest/micropython,aethaniel/micropython,mpalomer/micropython,ruffy91/micropython,dxxb/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,TDAbboud/micropython,TDAbboud/micropython,neilh10/micropython,oopy/micropython,vitiral/micropython,TDAbboud/micropython,selste/micropython,infinnovation/micropython,puuu/micropython,blazewicz/micropython,vriera/micropython,praemdonck/micropython,cwyark/micropython,paul-xxx/micropython,blmorris/micropython,Peetz0r/micropython-esp32,xuxiaoxin/micropython,Peetz0r/micropython-esp32,ryannathans/micropython,infinnovation/micropython,supergis/micropython,ernesto-g/micropython,hiway/micropython,EcmaXp/micropython,stonegithubs/micropython,adafruit/micropython,ericsnowcurrently/micropython,skybird6672/micropython,MrSurly/micropython,warner83/micropython,matthewelse/micropython,dhylands/micropython,ryannathans/micropython,omtinez/micropython,dhylands/micropython,misterdanb/micropython,HenrikSolver/micropython,oopy/micropython,noahchense/micropython,praemdonck/micropython,ceramos/micropython,ernesto-g/micropython,firstval/micropython,mpalomer/micropython,neilh10/micropython,martinribelotta/micropython,mgyenik/micropython,MrSurly/micropython,vriera/micropython,PappaPeppar/micropython,xyb/micropython,utopiaprince/micropython,danicampora/micropython,slzatz/micropython,xuxiaoxin/micropython,feilongfl/micropython,SungEun-Steve-Kim/test-mp,alex-march/micropython,cwyark/micropython,dhylands/micropython,trezor/micropython,redbear/micropython,ericsnowcurrently/micropython,methoxid/micropystat,noahwilliamsson/micropython,MrSurly/micropython-esp32,feilongfl/micropython,redbear/micropython,utopiaprince/micropython,galenhz/micropython,deshipu/micropython,jmarcelino/pycom-micropython,emfcamp/micropython,paul-xxx/micropython,pfalcon/micropython,alex-march/micropython,ericsnowcurrently/micropython,swegener/micropython,pozetroninc/micropython,bvernoux/micropython,deshipu/micropython,blmorris/micropython,ChuckM/micropython,warner83/micropython,aethaniel/micropython,Timmenem/micropython,ganshun666/micropython,hosaka/micropython,swegener/micropython,dhylands/micropython,stonegithubs/micropython,henriknelson/micropython,Timmenem/micropython,adafruit/circuitpython,lowRISC/micropython,tuc-osg/micropython,mgyenik/micropython,emfcamp/micropython,drrk/micropython,dinau/micropython,noahwilliamsson/micropython,kerneltask/micropython,jlillest/micropython,dmazzella/micropython,Vogtinator/micropython,micropython/micropython-esp32,HenrikSolver/micropython,danicampora/micropython,ruffy91/micropython,adafruit/circuitpython,pramasoul/micropython,suda/micropython,mianos/micropython,hiway/micropython,TDAbboud/micropython,ernesto-g/micropython,tdautc19841202/micropython,dmazzella/micropython,MrSurly/micropython,mhoffma/micropython,infinnovation/micropython,hosaka/micropython,methoxid/micropystat,hosaka/micropython,orionrobots/micropython,xuxiaoxin/micropython,alex-robbins/micropython,KISSMonX/micropython,emfcamp/micropython,dxxb/micropython,ChuckM/micropython,SungEun-Steve-Kim/test-mp,vitiral/micropython,suda/micropython,tuc-osg/micropython,trezor/micropython,pozetroninc/micropython,galenhz/micropython,tralamazza/micropython,oopy/micropython,SHA2017-badge/micropython-esp32,dmazzella/micropython,selste/micropython,matthewelse/micropython,deshipu/micropython,praemdonck/micropython,danicampora/micropython,skybird6672/micropython,adafruit/circuitpython,blazewicz/micropython,ganshun666/micropython,dinau/micropython,selste/micropython,tdautc19841202/micropython,EcmaXp/micropython,tobbad/micropython,feilongfl/micropython,ernesto-g/micropython,utopiaprince/micropython,cwyark/micropython,cnoviello/micropython,KISSMonX/micropython,turbinenreiter/micropython,galenhz/micropython,MrSurly/micropython,bvernoux/micropython,EcmaXp/micropython,tobbad/micropython,ahotam/micropython,PappaPeppar/micropython,neilh10/micropython,dxxb/micropython,xhat/micropython,hiway/micropython,orionrobots/micropython,torwag/micropython,toolmacher/micropython,trezor/micropython,turbinenreiter/micropython,jimkmc/micropython,suda/micropython,utopiaprince/micropython,rubencabrera/micropython,ChuckM/micropython,lowRISC/micropython,infinnovation/micropython,AriZuu/micropython,xyb/micropython,toolmacher/micropython,mgyenik/micropython,hosaka/micropython,cnoviello/micropython,tuc-osg/micropython,supergis/micropython,cloudformdesign/micropython,alex-march/micropython,firstval/micropython,ericsnowcurrently/micropython,jmarcelino/pycom-micropython,redbear/micropython,firstval/micropython,noahchense/micropython,misterdanb/micropython,selste/micropython,SHA2017-badge/micropython-esp32,dinau/micropython,emfcamp/micropython,ericsnowcurrently/micropython,mhoffma/micropython,vriera/micropython,ChuckM/micropython,matthewelse/micropython,ceramos/micropython,redbear/micropython,skybird6672/micropython,cloudformdesign/micropython,noahchense/micropython,kerneltask/micropython,ahotam/micropython,supergis/micropython,jlillest/micropython,matthewelse/micropython,xhat/micropython,slzatz/micropython,slzatz/micropython,tdautc19841202/micropython,puuu/micropython,Vogtinator/micropython,hosaka/micropython,kerneltask/micropython,adamkh/micropython,orionrobots/micropython,ceramos/micropython,pfalcon/micropython,dxxb/micropython,rubencabrera/micropython,SungEun-Steve-Kim/test-mp,alex-robbins/micropython,turbinenreiter/micropython,kostyll/micropython,drrk/micropython,drrk/micropython,jimkmc/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,mianos/micropython,micropython/micropython-esp32,puuu/micropython,praemdonck/micropython,Peetz0r/micropython-esp32,mianos/micropython,ahotam/micropython,infinnovation/micropython,torwag/micropython,tuc-osg/micropython,adafruit/circuitpython,MrSurly/micropython-esp32,blazewicz/micropython,chrisdearman/micropython,mpalomer/micropython,blazewicz/micropython,jimkmc/micropython,chrisdearman/micropython,AriZuu/micropython,emfcamp/micropython,jimkmc/micropython,firstval/micropython,methoxid/micropystat,jmarcelino/pycom-micropython,paul-xxx/micropython,hiway/micropython,jlillest/micropython,dinau/micropython,adafruit/circuitpython,stonegithubs/micropython,vriera/micropython,Timmenem/micropython,suda/micropython,Peetz0r/micropython-esp32,ryannathans/micropython,jimkmc/micropython,lbattraw/micropython,rubencabrera/micropython,jmarcelino/pycom-micropython,Vogtinator/micropython,suda/micropython,omtinez/micropython,pramasoul/micropython,xhat/micropython,adafruit/micropython,heisewangluo/micropython,SHA2017-badge/micropython-esp32,paul-xxx/micropython,Vogtinator/micropython,xyb/micropython,hiway/micropython,methoxid/micropystat,supergis/micropython,toolmacher/micropython,blmorris/micropython,heisewangluo/micropython,xyb/micropython,Timmenem/micropython,heisewangluo/micropython,swegener/micropython,noahwilliamsson/micropython,jmarcelino/pycom-micropython,cwyark/micropython,puuu/micropython,pozetroninc/micropython,cloudformdesign/micropython,tobbad/micropython,ceramos/micropython,vriera/micropython,blmorris/micropython,stonegithubs/micropython,vitiral/micropython,tdautc19841202/micropython,cloudformdesign/micropython,alex-robbins/micropython,torwag/micropython,cnoviello/micropython,noahwilliamsson/micropython,mgyenik/micropython,orionrobots/micropython,misterdanb/micropython,xhat/micropython,tobbad/micropython,bvernoux/micropython,alex-robbins/micropython,tdautc19841202/micropython,chrisdearman/micropython,misterdanb/micropython,blazewicz/micropython,stonegithubs/micropython,utopiaprince/micropython,aethaniel/micropython,drrk/micropython,vitiral/micropython,lowRISC/micropython,cnoviello/micropython,toolmacher/micropython,puuu/micropython,dinau/micropython,pozetroninc/micropython,mianos/micropython,misterdanb/micropython,torwag/micropython,ryannathans/micropython,neilh10/micropython,feilongfl/micropython,xuxiaoxin/micropython,martinribelotta/micropython
|
tests: Add test for zlibd module.
|
try:
import zlib
except ImportError:
import zlibd as zlib
PATTERNS = [
# Packed results produced by CPy's zlib.compress()
(b'0', b'x\x9c3\x00\x00\x001\x001'),
(b'a', b'x\x9cK\x04\x00\x00b\x00b'),
(b'0' * 100, b'x\x9c30\xa0=\x00\x00\xb3q\x12\xc1'),
(bytes(range(64)), b'x\x9cc`dbfaec\xe7\xe0\xe4\xe2\xe6\xe1\xe5\xe3\x17\x10\x14\x12\x16\x11\x15\x13\x97\x90\x94\x92\x96\x91\x95\x93WPTRVQUS\xd7\xd0\xd4\xd2\xd6\xd1\xd5\xd370426153\xb7\xb0\xb4\xb2\xb6\xb1\xb5\xb3\x07\x00\xaa\xe0\x07\xe1'),
]
for unpacked, packed in PATTERNS:
assert zlib.decompress(packed) == unpacked
print(unpacked)
|
<commit_before><commit_msg>tests: Add test for zlibd module.<commit_after>
|
try:
import zlib
except ImportError:
import zlibd as zlib
PATTERNS = [
# Packed results produced by CPy's zlib.compress()
(b'0', b'x\x9c3\x00\x00\x001\x001'),
(b'a', b'x\x9cK\x04\x00\x00b\x00b'),
(b'0' * 100, b'x\x9c30\xa0=\x00\x00\xb3q\x12\xc1'),
(bytes(range(64)), b'x\x9cc`dbfaec\xe7\xe0\xe4\xe2\xe6\xe1\xe5\xe3\x17\x10\x14\x12\x16\x11\x15\x13\x97\x90\x94\x92\x96\x91\x95\x93WPTRVQUS\xd7\xd0\xd4\xd2\xd6\xd1\xd5\xd370426153\xb7\xb0\xb4\xb2\xb6\xb1\xb5\xb3\x07\x00\xaa\xe0\x07\xe1'),
]
for unpacked, packed in PATTERNS:
assert zlib.decompress(packed) == unpacked
print(unpacked)
|
tests: Add test for zlibd module.try:
import zlib
except ImportError:
import zlibd as zlib
PATTERNS = [
# Packed results produced by CPy's zlib.compress()
(b'0', b'x\x9c3\x00\x00\x001\x001'),
(b'a', b'x\x9cK\x04\x00\x00b\x00b'),
(b'0' * 100, b'x\x9c30\xa0=\x00\x00\xb3q\x12\xc1'),
(bytes(range(64)), b'x\x9cc`dbfaec\xe7\xe0\xe4\xe2\xe6\xe1\xe5\xe3\x17\x10\x14\x12\x16\x11\x15\x13\x97\x90\x94\x92\x96\x91\x95\x93WPTRVQUS\xd7\xd0\xd4\xd2\xd6\xd1\xd5\xd370426153\xb7\xb0\xb4\xb2\xb6\xb1\xb5\xb3\x07\x00\xaa\xe0\x07\xe1'),
]
for unpacked, packed in PATTERNS:
assert zlib.decompress(packed) == unpacked
print(unpacked)
|
<commit_before><commit_msg>tests: Add test for zlibd module.<commit_after>try:
import zlib
except ImportError:
import zlibd as zlib
PATTERNS = [
# Packed results produced by CPy's zlib.compress()
(b'0', b'x\x9c3\x00\x00\x001\x001'),
(b'a', b'x\x9cK\x04\x00\x00b\x00b'),
(b'0' * 100, b'x\x9c30\xa0=\x00\x00\xb3q\x12\xc1'),
(bytes(range(64)), b'x\x9cc`dbfaec\xe7\xe0\xe4\xe2\xe6\xe1\xe5\xe3\x17\x10\x14\x12\x16\x11\x15\x13\x97\x90\x94\x92\x96\x91\x95\x93WPTRVQUS\xd7\xd0\xd4\xd2\xd6\xd1\xd5\xd370426153\xb7\xb0\xb4\xb2\xb6\xb1\xb5\xb3\x07\x00\xaa\xe0\x07\xe1'),
]
for unpacked, packed in PATTERNS:
assert zlib.decompress(packed) == unpacked
print(unpacked)
|
|
1d2463d7aa476608b95dc1ca37ced23e7dcb13d4
|
tests/handlers/test_analyses.py
|
tests/handlers/test_analyses.py
|
import pytest
@pytest.mark.parametrize("not_found", [False, True], ids=["200", "404"])
async def test_get(mocker, not_found, spawn_client):
client = await spawn_client(authorize=True)
document = {
"_id": "foobar",
"formatted": False
}
if not not_found:
await client.db.analyses.insert_one(document)
m = mocker.stub(name="format_analysis")
return_value = dict(document, formatted=True)
async def format_analysis(db, document):
m(db, document)
return return_value
mocker.patch("virtool.sample_analysis.format_analysis", new=format_analysis)
resp = await client.get("/api/analyses/foobar")
if not_found:
assert resp.status == 404
else:
assert resp.status == 200
assert await resp.json() == {
"id": "foobar",
"formatted": True
}
assert m.call_args[0] == (
client.db,
document
)
@pytest.mark.parametrize("has_sample", [True, False], ids=["with_sample", "without_sample"])
@pytest.mark.parametrize("status", [204, 404, 409])
async def test_remove(has_sample, status, spawn_client, resp_is, test_dispatch):
client = await spawn_client(authorize=True)
sample_document = None
if has_sample:
sample_document = {
"_id": "baz",
"name": "Baz"
}
await client.db.samples.insert_one(sample_document)
if status != 404:
analysis_document = {
"_id": "foobar",
"ready": status == 204,
"sample": {
"id": "baz",
"name": "Baz"
},
"job": {
"id": "hello"
}
}
await client.db.analyses.insert_one(analysis_document)
resp = await client.delete("/api/analyses/foobar")
assert resp.status == status
if status == 409:
assert await resp_is.conflict(resp, "Analysis is still running. Cancel job 'hello' instead")
elif status == 404:
assert await resp_is.not_found(resp)
else:
if has_sample:
assert test_dispatch.stub.call_args[0] == (
"samples",
"update",
{
"id": "baz",
"name": "Baz"
}
)
else:
assert not test_dispatch.stub.called
|
Add some tests for analyses handlers
|
Add some tests for analyses handlers
|
Python
|
mit
|
igboyes/virtool,virtool/virtool,igboyes/virtool,virtool/virtool
|
Add some tests for analyses handlers
|
import pytest
@pytest.mark.parametrize("not_found", [False, True], ids=["200", "404"])
async def test_get(mocker, not_found, spawn_client):
client = await spawn_client(authorize=True)
document = {
"_id": "foobar",
"formatted": False
}
if not not_found:
await client.db.analyses.insert_one(document)
m = mocker.stub(name="format_analysis")
return_value = dict(document, formatted=True)
async def format_analysis(db, document):
m(db, document)
return return_value
mocker.patch("virtool.sample_analysis.format_analysis", new=format_analysis)
resp = await client.get("/api/analyses/foobar")
if not_found:
assert resp.status == 404
else:
assert resp.status == 200
assert await resp.json() == {
"id": "foobar",
"formatted": True
}
assert m.call_args[0] == (
client.db,
document
)
@pytest.mark.parametrize("has_sample", [True, False], ids=["with_sample", "without_sample"])
@pytest.mark.parametrize("status", [204, 404, 409])
async def test_remove(has_sample, status, spawn_client, resp_is, test_dispatch):
client = await spawn_client(authorize=True)
sample_document = None
if has_sample:
sample_document = {
"_id": "baz",
"name": "Baz"
}
await client.db.samples.insert_one(sample_document)
if status != 404:
analysis_document = {
"_id": "foobar",
"ready": status == 204,
"sample": {
"id": "baz",
"name": "Baz"
},
"job": {
"id": "hello"
}
}
await client.db.analyses.insert_one(analysis_document)
resp = await client.delete("/api/analyses/foobar")
assert resp.status == status
if status == 409:
assert await resp_is.conflict(resp, "Analysis is still running. Cancel job 'hello' instead")
elif status == 404:
assert await resp_is.not_found(resp)
else:
if has_sample:
assert test_dispatch.stub.call_args[0] == (
"samples",
"update",
{
"id": "baz",
"name": "Baz"
}
)
else:
assert not test_dispatch.stub.called
|
<commit_before><commit_msg>Add some tests for analyses handlers<commit_after>
|
import pytest
@pytest.mark.parametrize("not_found", [False, True], ids=["200", "404"])
async def test_get(mocker, not_found, spawn_client):
client = await spawn_client(authorize=True)
document = {
"_id": "foobar",
"formatted": False
}
if not not_found:
await client.db.analyses.insert_one(document)
m = mocker.stub(name="format_analysis")
return_value = dict(document, formatted=True)
async def format_analysis(db, document):
m(db, document)
return return_value
mocker.patch("virtool.sample_analysis.format_analysis", new=format_analysis)
resp = await client.get("/api/analyses/foobar")
if not_found:
assert resp.status == 404
else:
assert resp.status == 200
assert await resp.json() == {
"id": "foobar",
"formatted": True
}
assert m.call_args[0] == (
client.db,
document
)
@pytest.mark.parametrize("has_sample", [True, False], ids=["with_sample", "without_sample"])
@pytest.mark.parametrize("status", [204, 404, 409])
async def test_remove(has_sample, status, spawn_client, resp_is, test_dispatch):
client = await spawn_client(authorize=True)
sample_document = None
if has_sample:
sample_document = {
"_id": "baz",
"name": "Baz"
}
await client.db.samples.insert_one(sample_document)
if status != 404:
analysis_document = {
"_id": "foobar",
"ready": status == 204,
"sample": {
"id": "baz",
"name": "Baz"
},
"job": {
"id": "hello"
}
}
await client.db.analyses.insert_one(analysis_document)
resp = await client.delete("/api/analyses/foobar")
assert resp.status == status
if status == 409:
assert await resp_is.conflict(resp, "Analysis is still running. Cancel job 'hello' instead")
elif status == 404:
assert await resp_is.not_found(resp)
else:
if has_sample:
assert test_dispatch.stub.call_args[0] == (
"samples",
"update",
{
"id": "baz",
"name": "Baz"
}
)
else:
assert not test_dispatch.stub.called
|
Add some tests for analyses handlersimport pytest
@pytest.mark.parametrize("not_found", [False, True], ids=["200", "404"])
async def test_get(mocker, not_found, spawn_client):
client = await spawn_client(authorize=True)
document = {
"_id": "foobar",
"formatted": False
}
if not not_found:
await client.db.analyses.insert_one(document)
m = mocker.stub(name="format_analysis")
return_value = dict(document, formatted=True)
async def format_analysis(db, document):
m(db, document)
return return_value
mocker.patch("virtool.sample_analysis.format_analysis", new=format_analysis)
resp = await client.get("/api/analyses/foobar")
if not_found:
assert resp.status == 404
else:
assert resp.status == 200
assert await resp.json() == {
"id": "foobar",
"formatted": True
}
assert m.call_args[0] == (
client.db,
document
)
@pytest.mark.parametrize("has_sample", [True, False], ids=["with_sample", "without_sample"])
@pytest.mark.parametrize("status", [204, 404, 409])
async def test_remove(has_sample, status, spawn_client, resp_is, test_dispatch):
client = await spawn_client(authorize=True)
sample_document = None
if has_sample:
sample_document = {
"_id": "baz",
"name": "Baz"
}
await client.db.samples.insert_one(sample_document)
if status != 404:
analysis_document = {
"_id": "foobar",
"ready": status == 204,
"sample": {
"id": "baz",
"name": "Baz"
},
"job": {
"id": "hello"
}
}
await client.db.analyses.insert_one(analysis_document)
resp = await client.delete("/api/analyses/foobar")
assert resp.status == status
if status == 409:
assert await resp_is.conflict(resp, "Analysis is still running. Cancel job 'hello' instead")
elif status == 404:
assert await resp_is.not_found(resp)
else:
if has_sample:
assert test_dispatch.stub.call_args[0] == (
"samples",
"update",
{
"id": "baz",
"name": "Baz"
}
)
else:
assert not test_dispatch.stub.called
|
<commit_before><commit_msg>Add some tests for analyses handlers<commit_after>import pytest
@pytest.mark.parametrize("not_found", [False, True], ids=["200", "404"])
async def test_get(mocker, not_found, spawn_client):
client = await spawn_client(authorize=True)
document = {
"_id": "foobar",
"formatted": False
}
if not not_found:
await client.db.analyses.insert_one(document)
m = mocker.stub(name="format_analysis")
return_value = dict(document, formatted=True)
async def format_analysis(db, document):
m(db, document)
return return_value
mocker.patch("virtool.sample_analysis.format_analysis", new=format_analysis)
resp = await client.get("/api/analyses/foobar")
if not_found:
assert resp.status == 404
else:
assert resp.status == 200
assert await resp.json() == {
"id": "foobar",
"formatted": True
}
assert m.call_args[0] == (
client.db,
document
)
@pytest.mark.parametrize("has_sample", [True, False], ids=["with_sample", "without_sample"])
@pytest.mark.parametrize("status", [204, 404, 409])
async def test_remove(has_sample, status, spawn_client, resp_is, test_dispatch):
client = await spawn_client(authorize=True)
sample_document = None
if has_sample:
sample_document = {
"_id": "baz",
"name": "Baz"
}
await client.db.samples.insert_one(sample_document)
if status != 404:
analysis_document = {
"_id": "foobar",
"ready": status == 204,
"sample": {
"id": "baz",
"name": "Baz"
},
"job": {
"id": "hello"
}
}
await client.db.analyses.insert_one(analysis_document)
resp = await client.delete("/api/analyses/foobar")
assert resp.status == status
if status == 409:
assert await resp_is.conflict(resp, "Analysis is still running. Cancel job 'hello' instead")
elif status == 404:
assert await resp_is.not_found(resp)
else:
if has_sample:
assert test_dispatch.stub.call_args[0] == (
"samples",
"update",
{
"id": "baz",
"name": "Baz"
}
)
else:
assert not test_dispatch.stub.called
|
|
06906b820f312bbc0a59eea7518470856df478ac
|
pytoon/tests/test_brick_connection.py
|
pytoon/tests/test_brick_connection.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from pytoon.connection import BrickConnection
@patch('pytoon.connection.IPConnection')
class TestConnection(unittest.TestCase):
def test_main_loop(self, mock_class):
host = "192.168.178.35"
port = 4223
bc = BrickConnection(host, port)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from tinkerforge.ip_connection import IPConnection
from tinkerforge.bricklet_hall_effect import HallEffect
from tinkerforge.bricklet_ambient_light import AmbientLight
from tinkerforge.bricklet_line import Line
from pytoon.connection import BrickConnection
class TestConnection(unittest.TestCase):
def setUp(self):
self.patch_ip_connection = patch('pytoon.connection.IPConnection')
self.patch_ip_connection.start()
host = "192.168.178.35"
port = 4223
self.bc = BrickConnection(host, port)
def test_cb_connected(self):
self.bc.connection.reset_mock()
self.bc.cb_connected('testing')
self.bc.connection.enumerate.assert_called_once_with()
def test_cb_enumerate_hall(self):
self.assertIsNone(self.bc.hall)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('1', None, None, None, None, device_identifier=240,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.hall)
self.patch_ip_connection.start()
def test_cb_enumerate_line(self):
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('2', None, None, None, None, device_identifier=241,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.line)
self.patch_ip_connection.start()
def test_cb_enumerate_ambient(self):
self.bc.connection.reset_mock()
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.ambient)
self.patch_ip_connection.start()
def tearDown(self):
self.patch_ip_connection.stop()
if __name__ == '__main__':
unittest.main()
|
Add tests for connecting to three different sensors
|
Add tests for connecting to three different sensors
|
Python
|
bsd-3-clause
|
marcofinalist/pytoon,marcoplaisier/pytoon,marcoplaisier/pytoon,marcofinalist/pytoon
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from pytoon.connection import BrickConnection
@patch('pytoon.connection.IPConnection')
class TestConnection(unittest.TestCase):
def test_main_loop(self, mock_class):
host = "192.168.178.35"
port = 4223
bc = BrickConnection(host, port)
if __name__ == '__main__':
unittest.main()Add tests for connecting to three different sensors
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from tinkerforge.ip_connection import IPConnection
from tinkerforge.bricklet_hall_effect import HallEffect
from tinkerforge.bricklet_ambient_light import AmbientLight
from tinkerforge.bricklet_line import Line
from pytoon.connection import BrickConnection
class TestConnection(unittest.TestCase):
def setUp(self):
self.patch_ip_connection = patch('pytoon.connection.IPConnection')
self.patch_ip_connection.start()
host = "192.168.178.35"
port = 4223
self.bc = BrickConnection(host, port)
def test_cb_connected(self):
self.bc.connection.reset_mock()
self.bc.cb_connected('testing')
self.bc.connection.enumerate.assert_called_once_with()
def test_cb_enumerate_hall(self):
self.assertIsNone(self.bc.hall)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('1', None, None, None, None, device_identifier=240,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.hall)
self.patch_ip_connection.start()
def test_cb_enumerate_line(self):
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('2', None, None, None, None, device_identifier=241,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.line)
self.patch_ip_connection.start()
def test_cb_enumerate_ambient(self):
self.bc.connection.reset_mock()
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.ambient)
self.patch_ip_connection.start()
def tearDown(self):
self.patch_ip_connection.stop()
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from pytoon.connection import BrickConnection
@patch('pytoon.connection.IPConnection')
class TestConnection(unittest.TestCase):
def test_main_loop(self, mock_class):
host = "192.168.178.35"
port = 4223
bc = BrickConnection(host, port)
if __name__ == '__main__':
unittest.main()<commit_msg>Add tests for connecting to three different sensors<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from tinkerforge.ip_connection import IPConnection
from tinkerforge.bricklet_hall_effect import HallEffect
from tinkerforge.bricklet_ambient_light import AmbientLight
from tinkerforge.bricklet_line import Line
from pytoon.connection import BrickConnection
class TestConnection(unittest.TestCase):
def setUp(self):
self.patch_ip_connection = patch('pytoon.connection.IPConnection')
self.patch_ip_connection.start()
host = "192.168.178.35"
port = 4223
self.bc = BrickConnection(host, port)
def test_cb_connected(self):
self.bc.connection.reset_mock()
self.bc.cb_connected('testing')
self.bc.connection.enumerate.assert_called_once_with()
def test_cb_enumerate_hall(self):
self.assertIsNone(self.bc.hall)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('1', None, None, None, None, device_identifier=240,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.hall)
self.patch_ip_connection.start()
def test_cb_enumerate_line(self):
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('2', None, None, None, None, device_identifier=241,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.line)
self.patch_ip_connection.start()
def test_cb_enumerate_ambient(self):
self.bc.connection.reset_mock()
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.ambient)
self.patch_ip_connection.start()
def tearDown(self):
self.patch_ip_connection.stop()
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from pytoon.connection import BrickConnection
@patch('pytoon.connection.IPConnection')
class TestConnection(unittest.TestCase):
def test_main_loop(self, mock_class):
host = "192.168.178.35"
port = 4223
bc = BrickConnection(host, port)
if __name__ == '__main__':
unittest.main()Add tests for connecting to three different sensors#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from tinkerforge.ip_connection import IPConnection
from tinkerforge.bricklet_hall_effect import HallEffect
from tinkerforge.bricklet_ambient_light import AmbientLight
from tinkerforge.bricklet_line import Line
from pytoon.connection import BrickConnection
class TestConnection(unittest.TestCase):
def setUp(self):
self.patch_ip_connection = patch('pytoon.connection.IPConnection')
self.patch_ip_connection.start()
host = "192.168.178.35"
port = 4223
self.bc = BrickConnection(host, port)
def test_cb_connected(self):
self.bc.connection.reset_mock()
self.bc.cb_connected('testing')
self.bc.connection.enumerate.assert_called_once_with()
def test_cb_enumerate_hall(self):
self.assertIsNone(self.bc.hall)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('1', None, None, None, None, device_identifier=240,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.hall)
self.patch_ip_connection.start()
def test_cb_enumerate_line(self):
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('2', None, None, None, None, device_identifier=241,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.line)
self.patch_ip_connection.start()
def test_cb_enumerate_ambient(self):
self.bc.connection.reset_mock()
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.ambient)
self.patch_ip_connection.start()
def tearDown(self):
self.patch_ip_connection.stop()
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from pytoon.connection import BrickConnection
@patch('pytoon.connection.IPConnection')
class TestConnection(unittest.TestCase):
def test_main_loop(self, mock_class):
host = "192.168.178.35"
port = 4223
bc = BrickConnection(host, port)
if __name__ == '__main__':
unittest.main()<commit_msg>Add tests for connecting to three different sensors<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pytoon
----------------------------------
Tests for `pytoon` module.
"""
import unittest
from mock import patch, Mock
from tinkerforge.ip_connection import IPConnection
from tinkerforge.bricklet_hall_effect import HallEffect
from tinkerforge.bricklet_ambient_light import AmbientLight
from tinkerforge.bricklet_line import Line
from pytoon.connection import BrickConnection
class TestConnection(unittest.TestCase):
def setUp(self):
self.patch_ip_connection = patch('pytoon.connection.IPConnection')
self.patch_ip_connection.start()
host = "192.168.178.35"
port = 4223
self.bc = BrickConnection(host, port)
def test_cb_connected(self):
self.bc.connection.reset_mock()
self.bc.cb_connected('testing')
self.bc.connection.enumerate.assert_called_once_with()
def test_cb_enumerate_hall(self):
self.assertIsNone(self.bc.hall)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('1', None, None, None, None, device_identifier=240,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.hall)
self.patch_ip_connection.start()
def test_cb_enumerate_line(self):
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('2', None, None, None, None, device_identifier=241,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.line)
self.patch_ip_connection.start()
def test_cb_enumerate_ambient(self):
self.bc.connection.reset_mock()
self.assertIsNone(self.bc.line)
self.patch_ip_connection.stop()
self.bc.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
self.assertIsNotNone(self.bc.ambient)
self.patch_ip_connection.start()
def tearDown(self):
self.patch_ip_connection.stop()
if __name__ == '__main__':
unittest.main()
|
0b9771c782394af2850161fab1e4947fc3c40cca
|
qregexeditor/api/match_highlighter.py
|
qregexeditor/api/match_highlighter.py
|
import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog:
match = self.prog.search(text)
while match:
start, end = match.span()
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
|
import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog and text:
match = self.prog.search(text)
if match:
start, end = match.span()
while match and end > start:
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
if match:
start, end = match.span()
|
Fix infinite loop in highlighter
|
Fix infinite loop in highlighter
|
Python
|
mit
|
ColinDuquesnoy/QRegexEditor
|
import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog:
match = self.prog.search(text)
while match:
start, end = match.span()
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
Fix infinite loop in highlighter
|
import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog and text:
match = self.prog.search(text)
if match:
start, end = match.span()
while match and end > start:
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
if match:
start, end = match.span()
|
<commit_before>import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog:
match = self.prog.search(text)
while match:
start, end = match.span()
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
<commit_msg>Fix infinite loop in highlighter<commit_after>
|
import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog and text:
match = self.prog.search(text)
if match:
start, end = match.span()
while match and end > start:
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
if match:
start, end = match.span()
|
import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog:
match = self.prog.search(text)
while match:
start, end = match.span()
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
Fix infinite loop in highlighterimport re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog and text:
match = self.prog.search(text)
if match:
start, end = match.span()
while match and end > start:
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
if match:
start, end = match.span()
|
<commit_before>import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog:
match = self.prog.search(text)
while match:
start, end = match.span()
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
<commit_msg>Fix infinite loop in highlighter<commit_after>import re
from pyqode.core.qt import QtGui
class MatchHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, document):
super().__init__(document)
self.prog = None
self._format = QtGui.QTextCharFormat()
self._format.setBackground(QtGui.QBrush(QtGui.QColor('#bbfcbb')))
def highlightBlock(self, text):
if self.prog and text:
match = self.prog.search(text)
if match:
start, end = match.span()
while match and end > start:
self.setFormat(start, end - start, self._format)
match = self.prog.search(text, match.end())
if match:
start, end = match.span()
|
696be529ceaef9a9ab6fa43d599456d92336c083
|
lglass/database/mongodb.py
|
lglass/database/mongodb.py
|
# coding: utf-8
import urllib.parse
import pymongo
import pymongo.database
import pymongo.uri_parser
import lglass.database.base
import lglass.rpsl
@lglass.database.base.register("mongodb")
class MongoDBDatabase(lglass.database.base.Database):
def __init__(self, mongo, database="lglass"):
if isinstance(mongo, str):
mongo = pymongo.MongoClient(mongo)
elif isinstance(mongo, (pymongo.MongoClient, pymongo.database.Database)):
pass
else:
raise TypeError("Expected mongo to be str, pymongo.MongoClient or pymongo.Database, got {}".format(type(mongo)))
if isinstance(mongo, pymongo.database.Database):
self.db = mongo
else:
self.db = mongo[database]
def get(self, type, primary_key):
col = self._get_col(type)
mobj = col.find_one({"_id": primary_key})
if mobj is None:
raise KeyError(type, primary_key)
obj = lglass.rpsl.Object(mobj["data"])
return obj
def list(self):
for type in self.object_types:
col = self._get_col(type)
for obj in col.find():
yield (type, obj["_id"])
def find(self, primary_key, types=None):
if types is None:
types = self.object_types
objects = []
for type in types:
try:
objects.append(self.get(type, primary_key))
except KeyError:
pass
return objects
def save(self, object):
col = self._get_col(object.type)
col.save({"_id": object.primary_key, "data": object.to_json_form()})
def delete(self, type, primary_key):
col = self._get_col(type)
col.remove({"_id": primary_key})
def _get_col(self, type):
return self.db[type.replace("-", "_")]
@classmethod
def from_url(cls, url):
rurl = list(url)
rurl[0] = "mongodb"
rurl = urllib.parse.urlunparse(rurl)
return cls(rurl)
|
Implement database driver for MongoDB
|
Implement database driver for MongoDB
|
Python
|
mit
|
fritz0705/lglass
|
Implement database driver for MongoDB
|
# coding: utf-8
import urllib.parse
import pymongo
import pymongo.database
import pymongo.uri_parser
import lglass.database.base
import lglass.rpsl
@lglass.database.base.register("mongodb")
class MongoDBDatabase(lglass.database.base.Database):
def __init__(self, mongo, database="lglass"):
if isinstance(mongo, str):
mongo = pymongo.MongoClient(mongo)
elif isinstance(mongo, (pymongo.MongoClient, pymongo.database.Database)):
pass
else:
raise TypeError("Expected mongo to be str, pymongo.MongoClient or pymongo.Database, got {}".format(type(mongo)))
if isinstance(mongo, pymongo.database.Database):
self.db = mongo
else:
self.db = mongo[database]
def get(self, type, primary_key):
col = self._get_col(type)
mobj = col.find_one({"_id": primary_key})
if mobj is None:
raise KeyError(type, primary_key)
obj = lglass.rpsl.Object(mobj["data"])
return obj
def list(self):
for type in self.object_types:
col = self._get_col(type)
for obj in col.find():
yield (type, obj["_id"])
def find(self, primary_key, types=None):
if types is None:
types = self.object_types
objects = []
for type in types:
try:
objects.append(self.get(type, primary_key))
except KeyError:
pass
return objects
def save(self, object):
col = self._get_col(object.type)
col.save({"_id": object.primary_key, "data": object.to_json_form()})
def delete(self, type, primary_key):
col = self._get_col(type)
col.remove({"_id": primary_key})
def _get_col(self, type):
return self.db[type.replace("-", "_")]
@classmethod
def from_url(cls, url):
rurl = list(url)
rurl[0] = "mongodb"
rurl = urllib.parse.urlunparse(rurl)
return cls(rurl)
|
<commit_before><commit_msg>Implement database driver for MongoDB<commit_after>
|
# coding: utf-8
import urllib.parse
import pymongo
import pymongo.database
import pymongo.uri_parser
import lglass.database.base
import lglass.rpsl
@lglass.database.base.register("mongodb")
class MongoDBDatabase(lglass.database.base.Database):
def __init__(self, mongo, database="lglass"):
if isinstance(mongo, str):
mongo = pymongo.MongoClient(mongo)
elif isinstance(mongo, (pymongo.MongoClient, pymongo.database.Database)):
pass
else:
raise TypeError("Expected mongo to be str, pymongo.MongoClient or pymongo.Database, got {}".format(type(mongo)))
if isinstance(mongo, pymongo.database.Database):
self.db = mongo
else:
self.db = mongo[database]
def get(self, type, primary_key):
col = self._get_col(type)
mobj = col.find_one({"_id": primary_key})
if mobj is None:
raise KeyError(type, primary_key)
obj = lglass.rpsl.Object(mobj["data"])
return obj
def list(self):
for type in self.object_types:
col = self._get_col(type)
for obj in col.find():
yield (type, obj["_id"])
def find(self, primary_key, types=None):
if types is None:
types = self.object_types
objects = []
for type in types:
try:
objects.append(self.get(type, primary_key))
except KeyError:
pass
return objects
def save(self, object):
col = self._get_col(object.type)
col.save({"_id": object.primary_key, "data": object.to_json_form()})
def delete(self, type, primary_key):
col = self._get_col(type)
col.remove({"_id": primary_key})
def _get_col(self, type):
return self.db[type.replace("-", "_")]
@classmethod
def from_url(cls, url):
rurl = list(url)
rurl[0] = "mongodb"
rurl = urllib.parse.urlunparse(rurl)
return cls(rurl)
|
Implement database driver for MongoDB# coding: utf-8
import urllib.parse
import pymongo
import pymongo.database
import pymongo.uri_parser
import lglass.database.base
import lglass.rpsl
@lglass.database.base.register("mongodb")
class MongoDBDatabase(lglass.database.base.Database):
def __init__(self, mongo, database="lglass"):
if isinstance(mongo, str):
mongo = pymongo.MongoClient(mongo)
elif isinstance(mongo, (pymongo.MongoClient, pymongo.database.Database)):
pass
else:
raise TypeError("Expected mongo to be str, pymongo.MongoClient or pymongo.Database, got {}".format(type(mongo)))
if isinstance(mongo, pymongo.database.Database):
self.db = mongo
else:
self.db = mongo[database]
def get(self, type, primary_key):
col = self._get_col(type)
mobj = col.find_one({"_id": primary_key})
if mobj is None:
raise KeyError(type, primary_key)
obj = lglass.rpsl.Object(mobj["data"])
return obj
def list(self):
for type in self.object_types:
col = self._get_col(type)
for obj in col.find():
yield (type, obj["_id"])
def find(self, primary_key, types=None):
if types is None:
types = self.object_types
objects = []
for type in types:
try:
objects.append(self.get(type, primary_key))
except KeyError:
pass
return objects
def save(self, object):
col = self._get_col(object.type)
col.save({"_id": object.primary_key, "data": object.to_json_form()})
def delete(self, type, primary_key):
col = self._get_col(type)
col.remove({"_id": primary_key})
def _get_col(self, type):
return self.db[type.replace("-", "_")]
@classmethod
def from_url(cls, url):
rurl = list(url)
rurl[0] = "mongodb"
rurl = urllib.parse.urlunparse(rurl)
return cls(rurl)
|
<commit_before><commit_msg>Implement database driver for MongoDB<commit_after># coding: utf-8
import urllib.parse
import pymongo
import pymongo.database
import pymongo.uri_parser
import lglass.database.base
import lglass.rpsl
@lglass.database.base.register("mongodb")
class MongoDBDatabase(lglass.database.base.Database):
def __init__(self, mongo, database="lglass"):
if isinstance(mongo, str):
mongo = pymongo.MongoClient(mongo)
elif isinstance(mongo, (pymongo.MongoClient, pymongo.database.Database)):
pass
else:
raise TypeError("Expected mongo to be str, pymongo.MongoClient or pymongo.Database, got {}".format(type(mongo)))
if isinstance(mongo, pymongo.database.Database):
self.db = mongo
else:
self.db = mongo[database]
def get(self, type, primary_key):
col = self._get_col(type)
mobj = col.find_one({"_id": primary_key})
if mobj is None:
raise KeyError(type, primary_key)
obj = lglass.rpsl.Object(mobj["data"])
return obj
def list(self):
for type in self.object_types:
col = self._get_col(type)
for obj in col.find():
yield (type, obj["_id"])
def find(self, primary_key, types=None):
if types is None:
types = self.object_types
objects = []
for type in types:
try:
objects.append(self.get(type, primary_key))
except KeyError:
pass
return objects
def save(self, object):
col = self._get_col(object.type)
col.save({"_id": object.primary_key, "data": object.to_json_form()})
def delete(self, type, primary_key):
col = self._get_col(type)
col.remove({"_id": primary_key})
def _get_col(self, type):
return self.db[type.replace("-", "_")]
@classmethod
def from_url(cls, url):
rurl = list(url)
rurl[0] = "mongodb"
rurl = urllib.parse.urlunparse(rurl)
return cls(rurl)
|
|
63bda7d2b33c2c238ef16d5dc5df782e709f0f99
|
sqlobject/tests/test_md5.py
|
sqlobject/tests/test_md5.py
|
from md5 import md5
########################################
## md5.md5
########################################
def test_md5():
assert md5('').hexdigest() == 'd41d8cd98f00b204e9800998ecf8427e'
assert md5('\n').hexdigest() == '68b329da9893e34099c7d8ad5cb9c940'
assert md5('123').hexdigest() == '202cb962ac59075b964b07152d234b70'
assert md5('123\n').hexdigest() == 'ba1f2511fc30423bdbb183fe33f3dd0f'
|
Add a few tests for md5.md5
|
Add a few tests for md5.md5
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4673 95a46c32-92d2-0310-94a5-8d71aeb3d4b3
|
Python
|
lgpl-2.1
|
drnlm/sqlobject,drnlm/sqlobject,sqlobject/sqlobject,sqlobject/sqlobject
|
Add a few tests for md5.md5
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4673 95a46c32-92d2-0310-94a5-8d71aeb3d4b3
|
from md5 import md5
########################################
## md5.md5
########################################
def test_md5():
assert md5('').hexdigest() == 'd41d8cd98f00b204e9800998ecf8427e'
assert md5('\n').hexdigest() == '68b329da9893e34099c7d8ad5cb9c940'
assert md5('123').hexdigest() == '202cb962ac59075b964b07152d234b70'
assert md5('123\n').hexdigest() == 'ba1f2511fc30423bdbb183fe33f3dd0f'
|
<commit_before><commit_msg>Add a few tests for md5.md5
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4673 95a46c32-92d2-0310-94a5-8d71aeb3d4b3<commit_after>
|
from md5 import md5
########################################
## md5.md5
########################################
def test_md5():
assert md5('').hexdigest() == 'd41d8cd98f00b204e9800998ecf8427e'
assert md5('\n').hexdigest() == '68b329da9893e34099c7d8ad5cb9c940'
assert md5('123').hexdigest() == '202cb962ac59075b964b07152d234b70'
assert md5('123\n').hexdigest() == 'ba1f2511fc30423bdbb183fe33f3dd0f'
|
Add a few tests for md5.md5
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4673 95a46c32-92d2-0310-94a5-8d71aeb3d4b3from md5 import md5
########################################
## md5.md5
########################################
def test_md5():
assert md5('').hexdigest() == 'd41d8cd98f00b204e9800998ecf8427e'
assert md5('\n').hexdigest() == '68b329da9893e34099c7d8ad5cb9c940'
assert md5('123').hexdigest() == '202cb962ac59075b964b07152d234b70'
assert md5('123\n').hexdigest() == 'ba1f2511fc30423bdbb183fe33f3dd0f'
|
<commit_before><commit_msg>Add a few tests for md5.md5
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4673 95a46c32-92d2-0310-94a5-8d71aeb3d4b3<commit_after>from md5 import md5
########################################
## md5.md5
########################################
def test_md5():
assert md5('').hexdigest() == 'd41d8cd98f00b204e9800998ecf8427e'
assert md5('\n').hexdigest() == '68b329da9893e34099c7d8ad5cb9c940'
assert md5('123').hexdigest() == '202cb962ac59075b964b07152d234b70'
assert md5('123\n').hexdigest() == 'ba1f2511fc30423bdbb183fe33f3dd0f'
|
|
02512cb193372e02ac4e628631901f1ff7d0eab5
|
scripts/setup-do-node.py
|
scripts/setup-do-node.py
|
#!/usr/bin/env python
import subprocess
import re
import os.path
subprocess.check_call(['fallocate', '-l', '2G', '/swapfile'])
subprocess.check_call(['mkswap', '/swapfile'])
subprocess.check_call(['swapon', '/swapfile'])
with open('/etc/fstab', 'rb') as f:
fstab = f.read()
print(fstab)
with open('/etc/fstab', 'wb') as f:
if fstab.endswith('\n'):
fstab = fstab[:-1]
f.write(fstab + '\n/swapfile none swap sw 0 0' + '\n')
with open('/etc/default/grub', 'rb') as f:
grub_lines = f.readlines()
re_grub_line = re.compile(r'')
for i, line in enumerate(grub_lines[:]):
if line.startswith('GRUB_CMDLINE_LINUX_DEFAULT='):
m = re.match(r'^GRUB_CMDLINE_LINUX_DEFAULT="(.+)?"$', line)
previous = m.group(1)
if previous:
previous = previous + ' '
grub_lines[i] = 'GRUB_CMDLINE_LINUX_DEFAULT="{} {}"'.format(
previous, 'cgroup_enable=memory swapaccount=1')
with open('/etc/default/grub', 'wb') as f:
f.writelines(grub_lines)
subprocess.check_call('update-grub')
with open(os.path.expanduser('~/.bashrc'), 'rb') as f:
bashrc = f.read()
with open(os.path.expanduser('~/.bashrc'), 'wb') as f:
f.write("""{}
export LANGUAGE=en_US.UTF-8
export LC_ALL=en_US.UTF-8
export LANG=en_US.UTF-8
export LC_TYPE=en_US.UTF-8
export EDITOR=vim
""".format(bashrc))
subprocess.check_call('reboot')
|
Add script for setting up DO nodes
|
Add script for setting up DO nodes
|
Python
|
mit
|
muzhack/muzhack,muzhack/musitechhub,muzhack/musitechhub,muzhack/muzhack,muzhack/muzhack,muzhack/muzhack,muzhack/musitechhub,muzhack/musitechhub
|
Add script for setting up DO nodes
|
#!/usr/bin/env python
import subprocess
import re
import os.path
subprocess.check_call(['fallocate', '-l', '2G', '/swapfile'])
subprocess.check_call(['mkswap', '/swapfile'])
subprocess.check_call(['swapon', '/swapfile'])
with open('/etc/fstab', 'rb') as f:
fstab = f.read()
print(fstab)
with open('/etc/fstab', 'wb') as f:
if fstab.endswith('\n'):
fstab = fstab[:-1]
f.write(fstab + '\n/swapfile none swap sw 0 0' + '\n')
with open('/etc/default/grub', 'rb') as f:
grub_lines = f.readlines()
re_grub_line = re.compile(r'')
for i, line in enumerate(grub_lines[:]):
if line.startswith('GRUB_CMDLINE_LINUX_DEFAULT='):
m = re.match(r'^GRUB_CMDLINE_LINUX_DEFAULT="(.+)?"$', line)
previous = m.group(1)
if previous:
previous = previous + ' '
grub_lines[i] = 'GRUB_CMDLINE_LINUX_DEFAULT="{} {}"'.format(
previous, 'cgroup_enable=memory swapaccount=1')
with open('/etc/default/grub', 'wb') as f:
f.writelines(grub_lines)
subprocess.check_call('update-grub')
with open(os.path.expanduser('~/.bashrc'), 'rb') as f:
bashrc = f.read()
with open(os.path.expanduser('~/.bashrc'), 'wb') as f:
f.write("""{}
export LANGUAGE=en_US.UTF-8
export LC_ALL=en_US.UTF-8
export LANG=en_US.UTF-8
export LC_TYPE=en_US.UTF-8
export EDITOR=vim
""".format(bashrc))
subprocess.check_call('reboot')
|
<commit_before><commit_msg>Add script for setting up DO nodes<commit_after>
|
#!/usr/bin/env python
import subprocess
import re
import os.path
subprocess.check_call(['fallocate', '-l', '2G', '/swapfile'])
subprocess.check_call(['mkswap', '/swapfile'])
subprocess.check_call(['swapon', '/swapfile'])
with open('/etc/fstab', 'rb') as f:
fstab = f.read()
print(fstab)
with open('/etc/fstab', 'wb') as f:
if fstab.endswith('\n'):
fstab = fstab[:-1]
f.write(fstab + '\n/swapfile none swap sw 0 0' + '\n')
with open('/etc/default/grub', 'rb') as f:
grub_lines = f.readlines()
re_grub_line = re.compile(r'')
for i, line in enumerate(grub_lines[:]):
if line.startswith('GRUB_CMDLINE_LINUX_DEFAULT='):
m = re.match(r'^GRUB_CMDLINE_LINUX_DEFAULT="(.+)?"$', line)
previous = m.group(1)
if previous:
previous = previous + ' '
grub_lines[i] = 'GRUB_CMDLINE_LINUX_DEFAULT="{} {}"'.format(
previous, 'cgroup_enable=memory swapaccount=1')
with open('/etc/default/grub', 'wb') as f:
f.writelines(grub_lines)
subprocess.check_call('update-grub')
with open(os.path.expanduser('~/.bashrc'), 'rb') as f:
bashrc = f.read()
with open(os.path.expanduser('~/.bashrc'), 'wb') as f:
f.write("""{}
export LANGUAGE=en_US.UTF-8
export LC_ALL=en_US.UTF-8
export LANG=en_US.UTF-8
export LC_TYPE=en_US.UTF-8
export EDITOR=vim
""".format(bashrc))
subprocess.check_call('reboot')
|
Add script for setting up DO nodes#!/usr/bin/env python
import subprocess
import re
import os.path
subprocess.check_call(['fallocate', '-l', '2G', '/swapfile'])
subprocess.check_call(['mkswap', '/swapfile'])
subprocess.check_call(['swapon', '/swapfile'])
with open('/etc/fstab', 'rb') as f:
fstab = f.read()
print(fstab)
with open('/etc/fstab', 'wb') as f:
if fstab.endswith('\n'):
fstab = fstab[:-1]
f.write(fstab + '\n/swapfile none swap sw 0 0' + '\n')
with open('/etc/default/grub', 'rb') as f:
grub_lines = f.readlines()
re_grub_line = re.compile(r'')
for i, line in enumerate(grub_lines[:]):
if line.startswith('GRUB_CMDLINE_LINUX_DEFAULT='):
m = re.match(r'^GRUB_CMDLINE_LINUX_DEFAULT="(.+)?"$', line)
previous = m.group(1)
if previous:
previous = previous + ' '
grub_lines[i] = 'GRUB_CMDLINE_LINUX_DEFAULT="{} {}"'.format(
previous, 'cgroup_enable=memory swapaccount=1')
with open('/etc/default/grub', 'wb') as f:
f.writelines(grub_lines)
subprocess.check_call('update-grub')
with open(os.path.expanduser('~/.bashrc'), 'rb') as f:
bashrc = f.read()
with open(os.path.expanduser('~/.bashrc'), 'wb') as f:
f.write("""{}
export LANGUAGE=en_US.UTF-8
export LC_ALL=en_US.UTF-8
export LANG=en_US.UTF-8
export LC_TYPE=en_US.UTF-8
export EDITOR=vim
""".format(bashrc))
subprocess.check_call('reboot')
|
<commit_before><commit_msg>Add script for setting up DO nodes<commit_after>#!/usr/bin/env python
import subprocess
import re
import os.path
subprocess.check_call(['fallocate', '-l', '2G', '/swapfile'])
subprocess.check_call(['mkswap', '/swapfile'])
subprocess.check_call(['swapon', '/swapfile'])
with open('/etc/fstab', 'rb') as f:
fstab = f.read()
print(fstab)
with open('/etc/fstab', 'wb') as f:
if fstab.endswith('\n'):
fstab = fstab[:-1]
f.write(fstab + '\n/swapfile none swap sw 0 0' + '\n')
with open('/etc/default/grub', 'rb') as f:
grub_lines = f.readlines()
re_grub_line = re.compile(r'')
for i, line in enumerate(grub_lines[:]):
if line.startswith('GRUB_CMDLINE_LINUX_DEFAULT='):
m = re.match(r'^GRUB_CMDLINE_LINUX_DEFAULT="(.+)?"$', line)
previous = m.group(1)
if previous:
previous = previous + ' '
grub_lines[i] = 'GRUB_CMDLINE_LINUX_DEFAULT="{} {}"'.format(
previous, 'cgroup_enable=memory swapaccount=1')
with open('/etc/default/grub', 'wb') as f:
f.writelines(grub_lines)
subprocess.check_call('update-grub')
with open(os.path.expanduser('~/.bashrc'), 'rb') as f:
bashrc = f.read()
with open(os.path.expanduser('~/.bashrc'), 'wb') as f:
f.write("""{}
export LANGUAGE=en_US.UTF-8
export LC_ALL=en_US.UTF-8
export LANG=en_US.UTF-8
export LC_TYPE=en_US.UTF-8
export EDITOR=vim
""".format(bashrc))
subprocess.check_call('reboot')
|
|
1945a200cb8d517ce16eb039ecb4c3afc67acb9b
|
bin/checkpypi.py
|
bin/checkpypi.py
|
#!/usr/bin/env python
# Adapted from http://code.activestate.com/recipes/577708-check-for-package-updates-on-pypi-works-best-in-pi/
# Changelog:
# - patch to python 3.6
# - include hidden releases
import xmlrpc
import pip
pypi = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
for dist in pip.get_installed_distributions():
available = pypi.package_releases(dist.project_name, True)
if not available:
# Try to capitalize pkg name
available = pypi.package_releases(dist.project_name.capitalize())
if not available:
msg = 'no releases at pypi'
elif available[0] != dist.version:
msg = '{} available'.format(available[0])
else:
msg = 'up to date'
pkg_info = '{dist.project_name} {dist.version}'.format(dist=dist)
print('{pkg_info:40} {msg}'.format(pkg_info=pkg_info, msg=msg))
|
Check latest version of Python modules in Pypi
|
Check latest version of Python modules in Pypi
|
Python
|
apache-2.0
|
verdimrc/linuxcfg,verdimrc/linuxcfg,verdimrc/linuxcfg
|
Check latest version of Python modules in Pypi
|
#!/usr/bin/env python
# Adapted from http://code.activestate.com/recipes/577708-check-for-package-updates-on-pypi-works-best-in-pi/
# Changelog:
# - patch to python 3.6
# - include hidden releases
import xmlrpc
import pip
pypi = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
for dist in pip.get_installed_distributions():
available = pypi.package_releases(dist.project_name, True)
if not available:
# Try to capitalize pkg name
available = pypi.package_releases(dist.project_name.capitalize())
if not available:
msg = 'no releases at pypi'
elif available[0] != dist.version:
msg = '{} available'.format(available[0])
else:
msg = 'up to date'
pkg_info = '{dist.project_name} {dist.version}'.format(dist=dist)
print('{pkg_info:40} {msg}'.format(pkg_info=pkg_info, msg=msg))
|
<commit_before><commit_msg>Check latest version of Python modules in Pypi<commit_after>
|
#!/usr/bin/env python
# Adapted from http://code.activestate.com/recipes/577708-check-for-package-updates-on-pypi-works-best-in-pi/
# Changelog:
# - patch to python 3.6
# - include hidden releases
import xmlrpc
import pip
pypi = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
for dist in pip.get_installed_distributions():
available = pypi.package_releases(dist.project_name, True)
if not available:
# Try to capitalize pkg name
available = pypi.package_releases(dist.project_name.capitalize())
if not available:
msg = 'no releases at pypi'
elif available[0] != dist.version:
msg = '{} available'.format(available[0])
else:
msg = 'up to date'
pkg_info = '{dist.project_name} {dist.version}'.format(dist=dist)
print('{pkg_info:40} {msg}'.format(pkg_info=pkg_info, msg=msg))
|
Check latest version of Python modules in Pypi#!/usr/bin/env python
# Adapted from http://code.activestate.com/recipes/577708-check-for-package-updates-on-pypi-works-best-in-pi/
# Changelog:
# - patch to python 3.6
# - include hidden releases
import xmlrpc
import pip
pypi = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
for dist in pip.get_installed_distributions():
available = pypi.package_releases(dist.project_name, True)
if not available:
# Try to capitalize pkg name
available = pypi.package_releases(dist.project_name.capitalize())
if not available:
msg = 'no releases at pypi'
elif available[0] != dist.version:
msg = '{} available'.format(available[0])
else:
msg = 'up to date'
pkg_info = '{dist.project_name} {dist.version}'.format(dist=dist)
print('{pkg_info:40} {msg}'.format(pkg_info=pkg_info, msg=msg))
|
<commit_before><commit_msg>Check latest version of Python modules in Pypi<commit_after>#!/usr/bin/env python
# Adapted from http://code.activestate.com/recipes/577708-check-for-package-updates-on-pypi-works-best-in-pi/
# Changelog:
# - patch to python 3.6
# - include hidden releases
import xmlrpc
import pip
pypi = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
for dist in pip.get_installed_distributions():
available = pypi.package_releases(dist.project_name, True)
if not available:
# Try to capitalize pkg name
available = pypi.package_releases(dist.project_name.capitalize())
if not available:
msg = 'no releases at pypi'
elif available[0] != dist.version:
msg = '{} available'.format(available[0])
else:
msg = 'up to date'
pkg_info = '{dist.project_name} {dist.version}'.format(dist=dist)
print('{pkg_info:40} {msg}'.format(pkg_info=pkg_info, msg=msg))
|
|
8a9f558387fbed5442f141a70aaffee265684755
|
apps/network/tests/test_routes/test_roles.py
|
apps/network/tests/test_routes/test_roles.py
|
def test_create_role(client):
result = client.post("/roles/", data={"role": "admin"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role created succesfully!"}
def test_get_all_roles(client):
result = client.get("/roles/")
assert result.status_code == 200
assert result.get_json() == {"roles": ["RoleA", "RoleB", "RoleC"]}
def test_get_specific_role(client):
result = client.get("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"role": {"name": "Role A", "id": "654816"}}
def test_update_role(client):
result = client.put("/roles/654816", data={"role": "{new_role_configs}"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was updated succesfully!"}
def test_delete_role(client):
result = client.delete("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was deleted succesfully!"}
|
ADD Network roles unit tests
|
ADD Network roles unit tests
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
ADD Network roles unit tests
|
def test_create_role(client):
result = client.post("/roles/", data={"role": "admin"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role created succesfully!"}
def test_get_all_roles(client):
result = client.get("/roles/")
assert result.status_code == 200
assert result.get_json() == {"roles": ["RoleA", "RoleB", "RoleC"]}
def test_get_specific_role(client):
result = client.get("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"role": {"name": "Role A", "id": "654816"}}
def test_update_role(client):
result = client.put("/roles/654816", data={"role": "{new_role_configs}"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was updated succesfully!"}
def test_delete_role(client):
result = client.delete("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was deleted succesfully!"}
|
<commit_before><commit_msg>ADD Network roles unit tests<commit_after>
|
def test_create_role(client):
result = client.post("/roles/", data={"role": "admin"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role created succesfully!"}
def test_get_all_roles(client):
result = client.get("/roles/")
assert result.status_code == 200
assert result.get_json() == {"roles": ["RoleA", "RoleB", "RoleC"]}
def test_get_specific_role(client):
result = client.get("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"role": {"name": "Role A", "id": "654816"}}
def test_update_role(client):
result = client.put("/roles/654816", data={"role": "{new_role_configs}"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was updated succesfully!"}
def test_delete_role(client):
result = client.delete("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was deleted succesfully!"}
|
ADD Network roles unit testsdef test_create_role(client):
result = client.post("/roles/", data={"role": "admin"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role created succesfully!"}
def test_get_all_roles(client):
result = client.get("/roles/")
assert result.status_code == 200
assert result.get_json() == {"roles": ["RoleA", "RoleB", "RoleC"]}
def test_get_specific_role(client):
result = client.get("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"role": {"name": "Role A", "id": "654816"}}
def test_update_role(client):
result = client.put("/roles/654816", data={"role": "{new_role_configs}"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was updated succesfully!"}
def test_delete_role(client):
result = client.delete("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was deleted succesfully!"}
|
<commit_before><commit_msg>ADD Network roles unit tests<commit_after>def test_create_role(client):
result = client.post("/roles/", data={"role": "admin"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role created succesfully!"}
def test_get_all_roles(client):
result = client.get("/roles/")
assert result.status_code == 200
assert result.get_json() == {"roles": ["RoleA", "RoleB", "RoleC"]}
def test_get_specific_role(client):
result = client.get("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"role": {"name": "Role A", "id": "654816"}}
def test_update_role(client):
result = client.put("/roles/654816", data={"role": "{new_role_configs}"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was updated succesfully!"}
def test_delete_role(client):
result = client.delete("/roles/654816")
assert result.status_code == 200
assert result.get_json() == {"msg": "Role was deleted succesfully!"}
|
|
47bd7608cd67ab9657726ef024ac04a3c793aa82
|
build/android/adb_reverse_forwarder.py
|
build/android/adb_reverse_forwarder.py
|
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Command line tool for forwarding ports from a device to the host.
Allows an Android device to connect to services running on the host machine,
i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
to be built.
"""
import optparse
import sys
import time
from pylib import android_commands, forwarder
from pylib.utils import run_tests_helper
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
'host_port [device_port_2 host_port_2] ...',
description=__doc__)
parser.add_option('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more)')
parser.add_option('--device',
help='Serial number of device we should use.')
parser.add_option('--host',
help='Host address to forward to from the host machine. '
'127.0.0.1 by default', default='127.0.0.1')
parser.add_option('--debug', action='store_const', const='Debug',
dest='build_type', default='Release',
help='Use Debug build of host tools instead of Release.')
options, args = parser.parse_args(argv)
run_tests_helper.SetLogLevel(options.verbose_count)
if len(args) < 2 or not len(args) % 2:
parser.error('Need even number of port pairs')
sys.exit(1)
try:
port_pairs = map(int, args[1:])
port_pairs = zip(port_pairs[::2], port_pairs[1::2])
except ValueError:
parser.error('Bad port number')
sys.exit(1)
adb = android_commands.AndroidCommands(options.device)
forwarder_instance = forwarder.Forwarder(adb, options.build_type)
try:
forwarder_instance.Run(port_pairs, None, options.host)
while True:
time.sleep(60)
except KeyboardInterrupt:
sys.exit(0)
finally:
forwarder_instance.Close()
if __name__ == '__main__':
main(sys.argv)
|
Add a command line tool for reverse port forwarding
|
[Android] Add a command line tool for reverse port forwarding
This patch adds a command line interface to
build/android/pylib/forwarder.py. It allows an Android device to access
services running on the host machine or elsewhere. This is essentially
the reverse of "adb forward".
Review URL: https://chromiumcodereview.appspot.com/11828051
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@176372 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
zcbenz/cefode-chromium,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,M4sse/chromium.src,anirudhSK/chromium,chuan9/chromium-crosswalk,patrickm/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,ltilve/chromium,ondra-novak/chromium.src,hujiajie/pa-chromium,ondra-novak/chromium.src,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,zcbenz/cefode-chromium,anirudhSK/chromium,dushu1203/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,nacl-webkit/chrome_deps,Just-D/chromium-1,markYoungH/chromium.src,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,hujiajie/pa-chromium,dushu1203/chromium.src,littlstar/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,patrickm/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,ChromiumWebApps/chromium,Chilledheart/chromium,jaruba/chromium.src,littlstar/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,Just-D/chromium-1,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,dednal/chromium.src,mogoweb/chromium-crosswalk,anirudhSK/chromium,M4sse/chromium.src,jaruba/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,littlstar/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,nacl-webkit/chrome_deps,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,anirudhSK/chromium,mogoweb/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,Jonekee/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,anirudhSK/chromium,timopulkkinen/BubbleFish,dushu1203/chromium.src,ondra-novak/chromium.src,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,hujiajie/pa-chromium,Jonekee/chromium.src,hujiajie/pa-chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,bright-sparks/chromium-spacewalk,nacl-webkit/chrome_deps,M4sse/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,hgl888/chromium-crosswalk,Chilledheart/chromium,dednal/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,dednal/chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,hujiajie/pa-chromium,axinging/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,zcbenz/cefode-chromium,jaruba/chromium.src,hujiajie/pa-chromium,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk,dushu1203/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,dednal/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,zcbenz/cefode-chromium,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,zcbenz/cefode-chromium,Just-D/chromium-1,ltilve/chromium,krieger-od/nwjs_chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,dednal/chromium.src,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk,hujiajie/pa-chromium,axinging/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,M4sse/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,M4sse/chromium.src,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,jaruba/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,zcbenz/cefode-chromium,ltilve/chromium,jaruba/chromium.src,nacl-webkit/chrome_deps,littlstar/chromium.src,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,zcbenz/cefode-chromium,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,timopulkkinen/BubbleFish,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,ltilve/chromium,ChromiumWebApps/chromium
|
[Android] Add a command line tool for reverse port forwarding
This patch adds a command line interface to
build/android/pylib/forwarder.py. It allows an Android device to access
services running on the host machine or elsewhere. This is essentially
the reverse of "adb forward".
Review URL: https://chromiumcodereview.appspot.com/11828051
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@176372 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Command line tool for forwarding ports from a device to the host.
Allows an Android device to connect to services running on the host machine,
i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
to be built.
"""
import optparse
import sys
import time
from pylib import android_commands, forwarder
from pylib.utils import run_tests_helper
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
'host_port [device_port_2 host_port_2] ...',
description=__doc__)
parser.add_option('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more)')
parser.add_option('--device',
help='Serial number of device we should use.')
parser.add_option('--host',
help='Host address to forward to from the host machine. '
'127.0.0.1 by default', default='127.0.0.1')
parser.add_option('--debug', action='store_const', const='Debug',
dest='build_type', default='Release',
help='Use Debug build of host tools instead of Release.')
options, args = parser.parse_args(argv)
run_tests_helper.SetLogLevel(options.verbose_count)
if len(args) < 2 or not len(args) % 2:
parser.error('Need even number of port pairs')
sys.exit(1)
try:
port_pairs = map(int, args[1:])
port_pairs = zip(port_pairs[::2], port_pairs[1::2])
except ValueError:
parser.error('Bad port number')
sys.exit(1)
adb = android_commands.AndroidCommands(options.device)
forwarder_instance = forwarder.Forwarder(adb, options.build_type)
try:
forwarder_instance.Run(port_pairs, None, options.host)
while True:
time.sleep(60)
except KeyboardInterrupt:
sys.exit(0)
finally:
forwarder_instance.Close()
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>[Android] Add a command line tool for reverse port forwarding
This patch adds a command line interface to
build/android/pylib/forwarder.py. It allows an Android device to access
services running on the host machine or elsewhere. This is essentially
the reverse of "adb forward".
Review URL: https://chromiumcodereview.appspot.com/11828051
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@176372 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Command line tool for forwarding ports from a device to the host.
Allows an Android device to connect to services running on the host machine,
i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
to be built.
"""
import optparse
import sys
import time
from pylib import android_commands, forwarder
from pylib.utils import run_tests_helper
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
'host_port [device_port_2 host_port_2] ...',
description=__doc__)
parser.add_option('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more)')
parser.add_option('--device',
help='Serial number of device we should use.')
parser.add_option('--host',
help='Host address to forward to from the host machine. '
'127.0.0.1 by default', default='127.0.0.1')
parser.add_option('--debug', action='store_const', const='Debug',
dest='build_type', default='Release',
help='Use Debug build of host tools instead of Release.')
options, args = parser.parse_args(argv)
run_tests_helper.SetLogLevel(options.verbose_count)
if len(args) < 2 or not len(args) % 2:
parser.error('Need even number of port pairs')
sys.exit(1)
try:
port_pairs = map(int, args[1:])
port_pairs = zip(port_pairs[::2], port_pairs[1::2])
except ValueError:
parser.error('Bad port number')
sys.exit(1)
adb = android_commands.AndroidCommands(options.device)
forwarder_instance = forwarder.Forwarder(adb, options.build_type)
try:
forwarder_instance.Run(port_pairs, None, options.host)
while True:
time.sleep(60)
except KeyboardInterrupt:
sys.exit(0)
finally:
forwarder_instance.Close()
if __name__ == '__main__':
main(sys.argv)
|
[Android] Add a command line tool for reverse port forwarding
This patch adds a command line interface to
build/android/pylib/forwarder.py. It allows an Android device to access
services running on the host machine or elsewhere. This is essentially
the reverse of "adb forward".
Review URL: https://chromiumcodereview.appspot.com/11828051
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@176372 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Command line tool for forwarding ports from a device to the host.
Allows an Android device to connect to services running on the host machine,
i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
to be built.
"""
import optparse
import sys
import time
from pylib import android_commands, forwarder
from pylib.utils import run_tests_helper
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
'host_port [device_port_2 host_port_2] ...',
description=__doc__)
parser.add_option('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more)')
parser.add_option('--device',
help='Serial number of device we should use.')
parser.add_option('--host',
help='Host address to forward to from the host machine. '
'127.0.0.1 by default', default='127.0.0.1')
parser.add_option('--debug', action='store_const', const='Debug',
dest='build_type', default='Release',
help='Use Debug build of host tools instead of Release.')
options, args = parser.parse_args(argv)
run_tests_helper.SetLogLevel(options.verbose_count)
if len(args) < 2 or not len(args) % 2:
parser.error('Need even number of port pairs')
sys.exit(1)
try:
port_pairs = map(int, args[1:])
port_pairs = zip(port_pairs[::2], port_pairs[1::2])
except ValueError:
parser.error('Bad port number')
sys.exit(1)
adb = android_commands.AndroidCommands(options.device)
forwarder_instance = forwarder.Forwarder(adb, options.build_type)
try:
forwarder_instance.Run(port_pairs, None, options.host)
while True:
time.sleep(60)
except KeyboardInterrupt:
sys.exit(0)
finally:
forwarder_instance.Close()
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>[Android] Add a command line tool for reverse port forwarding
This patch adds a command line interface to
build/android/pylib/forwarder.py. It allows an Android device to access
services running on the host machine or elsewhere. This is essentially
the reverse of "adb forward".
Review URL: https://chromiumcodereview.appspot.com/11828051
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@176372 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Command line tool for forwarding ports from a device to the host.
Allows an Android device to connect to services running on the host machine,
i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
to be built.
"""
import optparse
import sys
import time
from pylib import android_commands, forwarder
from pylib.utils import run_tests_helper
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
'host_port [device_port_2 host_port_2] ...',
description=__doc__)
parser.add_option('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more)')
parser.add_option('--device',
help='Serial number of device we should use.')
parser.add_option('--host',
help='Host address to forward to from the host machine. '
'127.0.0.1 by default', default='127.0.0.1')
parser.add_option('--debug', action='store_const', const='Debug',
dest='build_type', default='Release',
help='Use Debug build of host tools instead of Release.')
options, args = parser.parse_args(argv)
run_tests_helper.SetLogLevel(options.verbose_count)
if len(args) < 2 or not len(args) % 2:
parser.error('Need even number of port pairs')
sys.exit(1)
try:
port_pairs = map(int, args[1:])
port_pairs = zip(port_pairs[::2], port_pairs[1::2])
except ValueError:
parser.error('Bad port number')
sys.exit(1)
adb = android_commands.AndroidCommands(options.device)
forwarder_instance = forwarder.Forwarder(adb, options.build_type)
try:
forwarder_instance.Run(port_pairs, None, options.host)
while True:
time.sleep(60)
except KeyboardInterrupt:
sys.exit(0)
finally:
forwarder_instance.Close()
if __name__ == '__main__':
main(sys.argv)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.