text stringlengths 4 1.02M | meta dict |
|---|---|
from django.contrib import admin
from .models import ConceptNode
import reversion
class ConceptNodeAdmin(reversion.VersionAdmin):
pass
admin.site.register(ConceptNode, ConceptNodeAdmin)
| {
"content_hash": "e891f8d918cfee040f0a4d3495621d51",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 50,
"avg_line_length": 17.727272727272727,
"alnum_prop": 0.8205128205128205,
"repo_name": "kevincwebb/conceptum",
"id": "437bb3a261cf2c6c0556cdcaca0f2c364377722b",
"size": "195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conceptum/nodemanager/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "6337"
},
{
"name": "HTML",
"bytes": "148901"
},
{
"name": "JavaScript",
"bytes": "396"
},
{
"name": "Python",
"bytes": "498126"
},
{
"name": "Shell",
"bytes": "753"
}
],
"symlink_target": ""
} |
import logging
from django.conf import settings
from django.contrib.auth.models import Group
from django.core.mail import send_mail
from django.db import transaction
from django.db.models.signals import post_save, post_delete
from guardian.shortcuts import assign
from .models import Cohort, Project, Batch, Sample
PROJECT_GROUP_TEMPLATE = '{0} Project Team'
AUTO_PUBLISH_BATCH = getattr(settings, 'VARIFY_AUTO_PUBLISH_BATCH', True)
log = logging.getLogger(__name__)
@transaction.commit_on_success
def update_sample_for_autocreated_cohorts(instance, created, **kwargs):
"Manages adding/removing samples from autocreated cohorts."
# World
lookup = {'batch': None, 'project': None, 'autocreated': True,
'name': 'World'}
try:
world_cohort = Cohort.objects.get(**lookup)
except Cohort.DoesNotExist:
if getattr(settings, 'VARIFY_AUTO_CREATE_COHORT', True):
world_cohort = Cohort(**lookup)
world_cohort.save()
else:
log.info("World cohort was not found and was not created because "
"VARIFY_AUTO_CREATE_COHORT setting is False.")
return
project = instance.project
# Project
lookup = {'batch': None, 'project': project, 'autocreated': True}
try:
project_cohort = Cohort.objects.get(**lookup)
except Cohort.DoesNotExist:
project_cohort = Cohort(name=unicode(project), **lookup)
project_cohort.save()
if instance.published:
world_cohort.add(instance, added=False)
project_cohort.add(instance, added=False)
else:
world_cohort.remove(instance, delete=True)
project_cohort.remove(instance, delete=True)
@transaction.commit_on_success
def auto_delete_cohort(instance, **kwargs):
"Deletes and auto-created cohort named after the instance."
cohorts = Cohort.objects.filter(autocreated=True)
if isinstance(instance, Project):
cohorts = cohorts.filter(project=instance)
elif isinstance(instance, Batch):
cohorts = cohorts.filter(batch=instance)
else:
return
count = cohorts.count()
cohorts.delete()
log.info('Delete {0} autocreated cohorts for {1}'.format(count, instance))
def auto_create_project_group(instance, created, **kwargs):
name = PROJECT_GROUP_TEMPLATE.format(instance.name)
group, created = Group.objects.get_or_create(name=name)
# TODO change this to queue up an email rather than doing it in process..
if created:
assign('view_project', group, instance)
try:
kwargs = {
'subject': '{0}Project "{1}" Created'.format(
settings.EMAIL_SUBJECT_PREFIX, name),
'message': 'The "{0}" Project Group has been created. This is '
'a reminder to setup any permissions for the '
'associated users.'.format(name),
'from_email': settings.NO_REPLY_EMAIL,
'recipient_list': [settings.SUPPORT_EMAIL],
}
# Since we aren't passing the fail_silently kwarg to the send_mail
# method, it will throw any errors back in our face so we catch
# them here and log them rather than letting it propogate.
send_mail(**kwargs)
except Exception:
log.exception("Error sending project group notification email")
log.info('Autocreate project group {0}'.format(group))
def auto_delete_project_group(instance, **kwargs):
name = PROJECT_GROUP_TEMPLATE.format(instance.name)
Group.objects.filter(name=name).delete()
log.info('Delete autocreated project group {0}'.format(name))
def update_batch_count(instance, **kwargs):
"""Sample post-save handler to update the sample's batch count.
Batches are unpublished by default (to prevent publishing empty batches).
If the `AUTO_PUBLISH_BATCH` setting is true, the batch will be published
automatically when at least one published sample is in the batch.
"""
batch = instance.batch
count = batch.samples.filter(published=True).count()
if count != batch.count:
batch.count = count
if AUTO_PUBLISH_BATCH:
batch.published = bool(count)
batch.save()
post_save.connect(update_sample_for_autocreated_cohorts, sender=Sample)
post_save.connect(update_batch_count, sender=Sample)
post_save.connect(auto_create_project_group, sender=Project)
post_delete.connect(auto_delete_cohort, sender=Batch)
post_delete.connect(auto_delete_cohort, sender=Project)
| {
"content_hash": "a8b857b33a48c1577393352be3dcfb97",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 79,
"avg_line_length": 36.68,
"alnum_prop": 0.6649945474372956,
"repo_name": "chop-dbhi/varify-data-warehouse",
"id": "5d87fe640f6a1f3c1c285671b2039e1d4af32d38",
"size": "4585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vdw/samples/receivers.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Puppet",
"bytes": "14864"
},
{
"name": "Python",
"bytes": "1796480"
},
{
"name": "Shell",
"bytes": "37"
}
],
"symlink_target": ""
} |
"""
FILE: php.py
AUTHOR: Cody Precord
@summary: Lexer configuration module for PHP.
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: _php.py 68798 2011-08-20 17:17:05Z CJP $"
__revision__ = "$Revision: 68798 $"
#-----------------------------------------------------------------------------#
# Imports
import wx.stc as stc
# Local Imports
import synglob
import syndata
import _html
from _cpp import AutoIndenter
#-----------------------------------------------------------------------------#
#---- Keyword Specifications ----#
# PHP Keywords
PHP_KEYWORDS = ("__LINE__ __FILE__ __FUNCTION__ __CLASS__ __METHOD__ declare "
"else enddeclare endswitch elseif endif if switch as do endfor "
"endforeach endwhile for foreach while case default switch "
"break continue var bool boolean int integer real "
"double float string array NULL extends global static "
"new true false function "
"class object self final public private protected try catch "
"throw abstract parent interface implements "
# Language Constructs
"die echo empty exit eval include include_once isset list "
"require require_once return print unset")
# PHP Standard Functions/Methods
# (roughly based off of PHP Pocket Reference by O'Reilly)
PHP_FUNC = ("__construct __autoload __destruct __get __set __isset __unset "
"__call __sleep __wakeup __toString __set_state __clone "
"apache_child_terminate apache_lookup_uri apache_note "
"apache_request_headers apache_response_headers apache_setenv "
"ascii2ebcdic ebcdic2ascii getallheaders virtual jewishtojd "
"array_change_key_case array_chunk array_count_values "
"array_diff_assoc array_diff array_fill array_filter array_flip "
"array_intersect_assoc array_intersect array_key_exists array_keys "
"array_map array_merge_recursive array_merge array_multisort "
"array_pad array_pop array_push array_rand array_reduce array "
"array_reverse array_search array_shift array_slice array_splice "
"array_sum array_unique array_unshift array_values array_walk "
"arsort asort compact count current each end extract in_array key "
"krsort ksort natcasesort natsort next pos prev range reset "
"rsort shuffle sizeof sort uasort uksort usort aspell_check "
"aspell_new aspell_suggest bcadd bccomp bcdiv bcmod bcmul bcpow "
"bcpowmod bcscale bcsqrt bcsub bzclose bzcompress bzdecompress "
"bzerrno bzerror bzerrstr bzflush bzopen bzread bzwrite "
"cal_days_in_month cal_from_jd cal_info cal_to_jd easter_date "
"easter_days frenchtojd gregoriantojd jddayofweek jdmonthname "
"jdtofrench jdtogregorian jdtojewish jdtojulian jdtounix "
"juliantojd unixtojd ccvs_add ccvs_auth ccvs_command ccvs_count "
"ccvs_delete ccvs_done ccvs_init ccvs_lookup ccvs_new ccvs_report "
"ccvs_return ccvs_reverse ccvs_sale ccvs_status ccvs_textvalue "
"ccvs_void call_user_method_array call_user_method class_exists "
"get_class_methods get_class_vars get_class get_declared_classes "
"get_object_vars get_parent_class is_a is_subclass_of com_load "
"com_addref com_get com_invoke com_isenum com_load_typelib "
"com_propget com_propput com_propset com_release com_set "
"cpdf_add_annotation cpdf_add_outline cpdf_arc cpdf_begin_text "
"cpdf_circle cpdf_clip cpdf_close cpdf_closepath_fill_stroke "
"cpdf_closepath_stroke cpdf_closepath cpdf_continue_text "
"cpdf_end_text cpdf_fill_stroke cpdf_fill cpdf_finalize_page "
"cpdf_finalize cpdf_global_set_document_limits cpdf_import_jpeg "
"cpdf_lineto cpdf_moveto cpdf_newpath cpdf_open cpdf_output_buffer "
"cpdf_page_init cpdf_place_inline_image cpdf_rect cpdf_restore "
"cpdf_rlineto cpdf_rmoveto cpdf_rotate_text cpdf_rotate "
"cpdf_save_to_file cpdf_save cpdf_scale cpdf_set_action_url "
"cpdf_set_char_spacing cpdf_set_creator cpdf_set_current_page "
"cpdf_set_font_directories cpdf_set_font_map_file cpdf_set_font "
"cpdf_set_horiz_scaling cpdf_set_keywords cpdf_set_leading "
"cpdf_set_page_animation cpdf_set_subject cpdf_set_text_matrix "
"cpdf_set_text_pos cpdf_set_text_rendering cpdf_set_text_rise "
"cpdf_set_title cpdf_set_viewer_preferences cpdf_set_word_spacing "
"cpdf_setdash cpdf_setflat cpdf_setgray_fill cpdf_setgray_stroke "
"cpdf_setgray cpdf_setlinecap cpdf_setlinejoin cpdf_setlinewidth "
"cpdf_setmiterlimit cpdf_setrgbcolor_fill cpdf_setrgbcolor_stroke "
"cpdf_setrgbcolor cpdf_show_xy cpdf_show cpdf_stringwidth "
"cpdf_text cpdf_translate crack_check crack_closedict cpdf_curveto "
"crack_getlastmessage crack_opendict ctype_alnum ctype_alpha "
"ctype_cntrl ctype_digit ctype_graph ctype_lower ctype_print "
"ctype_punct ctype_space ctype_upper ctype_xdigit curl_close "
"curl_errno curl_error curl_exec curl_getinfo curl_init chgrp "
"curl_version checkdate date getdate gettimeofday gmdate gmmktime "
"gmstrftime localtime microtime mktime strftime strtotime time "
"dba_close dba_delete dba_exists dba_fetch dba_firstkey filetype "
"dba_insert dba_list dba_nextkey dba_open dba_optimize dba_popen "
"dba_replace dba_sync dbase_add_record dbase_close dbase_create "
"dbase_delete_record dbase_get_record_with_names dbase_get_record "
"dbase_numfields dbase_numrecords dbase_open dbase_pack filectime "
"dbase_replace_record dblist dbmclose dbmdelete dbmexists dbmfetch "
"dbmfirstkey dbminsert dbmnextkey dbmopen dbmreplace basename "
"chmod chown clearstatcache copy delete dirname disk_free_space "
"disk_total_space diskfreespace fclose feof fflush fgetc fgetcsv "
"fgets fgetss file_exists file_get_contents file fileatime ftell "
"filegroup fileinode filemtime fileowner fileperms filesize popen "
"flock fnmatch fopen fpassthru fputs fread fscanf fseek fstat stat "
"ftruncate fwrite glob is_dir is_executable is_file is_link "
"is_readable is_uploaded_file is_writable is_writeable link "
"lstat mkdir move_uploaded_file parse_ini_file pathinfo pclose "
"readfile readlink realpath rename rewind rmdir set_file_buffer "
"symlink tempnam tmpfile touch umask unlink ftp_cdup ftp_chdir "
"ftp_close ftp_connect ftp_delete ftp_exec ftp_fget ftp_fput "
"ftp_get_option ftp_get ftp_login ftp_mdtm ftp_mkdir textdomain "
"ftp_nb_fget ftp_nb_fput ftp_nb_get ftp_nb_put ftp_nlist ftp_pasv "
"ftp_put ftp_pwd ftp_quit ftp_rawlist ftp_rename ftp_rmdir checkin "
"ftp_set_option ftp_site ftp_size ftp_ssl_connect ftp_systype "
"call_user_func_array call_user_func create_function func_get_arg "
"func_get_args func_num_args function_exists get_defined_functions "
"register_shutdown_function register_tick_function method_exists "
"unregister_tick_function bind_textdomain_codeset bindtextdomain "
"dcgettext dcngettext dgettext dngettext gettext ngettext "
"gmp_abs gmp_add gmp_and gmp_clrbit gmp_cmp gmp_com gmp_div_q "
"gmp_div_qr gmp_div_r gmp_div gmp_divexact gmp_fact gmp_gcd "
"gmp_hamdist gmp_init gmp_intval gmp_invert gmp_jacobi gmp_gcdext "
"gmp_mod gmp_mul gmp_neg gmp_or gmp_perfect_square gmp_popcount "
"gmp_pow gmp_powm gmp_prob_prime gmp_random gmp_scan0 gmp_scan1 "
"gmp_setbit gmp_sign gmp_sqrt gmp_sqrtrm gmp_strval gmp_sub "
"header headers_sent setcookie hw_api_attribute hwapi_hgcsp "
"hw_api_content hw_api_object key langdepvalue value values insert "
"checkout children mimetype read content copy dbstat dcstat "
"dstofsrcanchors count reason find ftstat hwstat identify info "
"insertanchor insertcollection insertdocument link lock move "
"attreditable count insert remove title value object dstanchors "
"parents description type remove replace setcommitedversion assign "
"srcanchors srcsofdst unlock user userlist iconv_get_encoding "
"iconv_set_encoding iconv ob_iconv_handler exif_imagetype gmp_xor "
"exif_read_data exif_thumbnail gd_info getimagesize cpdf_stroke "
"image_type_to_mime_type image2wbmp imagealphablending imagearc "
"imagechar imagecharup imagecolorallocate imagecolorallocatealpha "
"imagecolorat imagecolorclosest imagecolorclosestalpha curl_setopt "
"imagecolorclosesthwb imagecolordeallocate imagecolorexact "
"imagecolorexactalpha imagecolorresolve imagecolorresolvealpha "
"imagecolorset imagecolorsforindex imagecolorstotal dba_handlers "
"imagecolortransparent imagecopy imagecopymerge imagecopymergegray "
"imagecopyresampled imagecopyresized imagecreate objectbyanchor "
"imagecreatefromgd2part imagecreatefromgd imagecreatefromgif "
"imagecreatefromjpeg imagecreatefrompng imagecreatefromstring "
"imagecreatefromwbmp imagecreatefromxbm imagecreatefromxpm "
"imagecreatetruecolor imagedashedline imagedestroy imageellipse "
"imagefill imagefilledarc imagefilledellipse imagefilledpolygon "
"imagefilledrectangle imagefilltoborder imagefontheight "
"imageftbbox imagefttext imagegammacorrect imagegd2 imagegd "
"imageinterlace imagejpeg imageline imageloadfont imagepalettecopy "
"imagepng imagepolygon imagepsbbox imagepscopyfont imagefontwidth "
"imagepsextendfont imagepsfreefont imagepsloadfont gmp_legendre "
"imagepstext imagerectangle imagerotate imagesetbrush imagegif "
"imagesetstyle imagesetthickness imagesettile imagestring "
"imagestringup imagesx imagesy imagetruecolortopalette "
"imagettftext imagetypes imagewbmp iptcembed iptcparse jpeg2wbmp "
"png2wbmp read_exif_data imap_8bit imap_alerts imap_append "
"imap_binary imap_body imap_bodystruct imap_check imap_base64 "
"imap_close imap_createmailbox imap_delete imap_deletemailbox "
"imap_errors imap_expunge imap_fetch_overview imap_fetchbody "
"imap_fetchheader imap_fetchstructure imap_get_quota imagettfbbox "
"imap_getmailboxes imap_getsubscribed imap_header imap_headerinfo "
"imap_headers imap_last_error imap_list imap_listmailbox "
"imap_listsubscribed imap_lsub imap_mail_compose imap_mail_copy "
"imap_mail_move imap_mail imap_mailboxmsginfo imap_listscan "
"imap_msgno imap_num_msg imap_num_recent imap_open imap_ping "
"imap_renamemailbox imap_reopen imap_rfc822_parse_adrlist linkinfo "
"imap_rfc822_parse_headers imap_rfc822_write_address imap_qprint "
"imap_search imap_set_quota imap_setacl imap_setflag_full "
"imap_status imap_subscribe imap_thread imap_uid imap_undelete "
"imap_unsubscribe imap_utf7_decode imap_utf7_encode imap_utf8 "
"assert_options assert dl extension_loaded get_cfg_var imap_sort "
"get_defined_constants get_extension_funcs get_included_files "
"get_loaded_extensions get_magic_quotes_gpc get_current_user "
"get_required_files getenv getlastmod getmygid getmyinode getmypid "
"getmyuid getopt getrusage ini_alter ini_get_all ini_get "
"ini_set php_ini_scanned_files php_logo_guid php_sapi_name "
"phpcredits phpinfo phpversion putenv set_magic_quotes_runtime "
"set_time_limit version_compare zend_logo_guid zend_version "
"ldap_8859_to_t61 ldap_add ldap_bind ldap_close ldap_compare "
"ldap_connect ldap_count_entries ldap_delete ldap_dn2ufn php_uname "
"ldap_errno ldap_error ldap_explode_dn ldap_first_attribute "
"ldap_first_entry ldap_first_reference ldap_free_result "
"ldap_get_attributes ldap_get_dn ldap_get_entries ldap_get_option "
"ldap_get_values_len ldap_get_values ldap_list ldap_mod_add "
"ldap_mod_del ldap_mod_replace ldap_modify ldap_next_attribute "
"ldap_next_entry ldap_next_reference ldap_parse_reference hypot "
"ldap_parse_result ldap_read ldap_rename ldap_search ldap_err2str "
"ldap_set_option ldap_set_rebind_proc ldap_sort ldap_start_tls "
"ldap_t61_to_8859 ldap_unbind ezmlm_hash mail abs acos acosh asin "
"asinh atan2 atan atanh base_convert bindec ceil cos cosh decbin "
"dechex decoct deg2rad exp expm1 floor fmod getrandmax hexdec "
"is_finite is_infinite is_nan lcg_value log10 log1p log max min "
"mt_getrandmax mt_rand mt_srand octdec pi pow rad2deg rand round "
"sin sinh sqrt srand tan tanh mb_convert_case mb_convert_encoding "
"mb_convert_kana mb_convert_variables mb_decode_mimeheader "
"mb_decode_numericentity mb_detect_encoding mb_detect_order "
"mb_encode_mimeheader mb_encode_numericentity mb_ereg_match "
"mb_ereg_replace mb_ereg_search_getpos mb_ereg_search_getregs "
"mb_ereg_search_init mb_ereg_search_pos mb_ereg_search_regs "
"mb_ereg_search_setpos mb_ereg_search mb_ereg mb_eregi_replace "
"mb_eregi mb_get_info mb_http_input mb_http_output ini_restore "
"mb_internal_encoding mb_language mb_output_handler mb_parse_str "
"mb_preferred_mime_name mb_regex_encoding mb_regex_set_options "
"mb_send_mail mb_split mb_strcut mb_strimwidth mb_strlen mb_strpos "
"mb_strrpos mb_strtolower mb_strtoupper mb_strwidth imagesetpixel "
"mb_substitute_character mb_substr_count mb_substr mcrypt_cbc "
"mcrypt_cfb mcrypt_create_iv mcrypt_decrypt mcrypt_ecb "
"mcrypt_enc_get_algorithms_name mcrypt_enc_get_block_size "
"mcrypt_enc_get_iv_size mcrypt_enc_get_key_size ftp_nb_continue "
"mcrypt_enc_get_modes_name mcrypt_enc_get_supported_key_sizes "
"mcrypt_enc_is_block_algorithm_mode mcrypt_enc_is_block_algorithm "
"mcrypt_enc_is_block_mode mcrypt_enc_self_test mcrypt_encrypt "
"mcrypt_generic_deinit mcrypt_generic_end mcrypt_generic_init "
"mcrypt_generic mcrypt_get_block_size mcrypt_get_cipher_name "
"mcrypt_get_iv_size mcrypt_get_key_size mcrypt_list_algorithms "
"mcrypt_list_modes mcrypt_module_close imap_scanmailbox "
"mcrypt_module_get_algo_key_size imap_get_quotaroot "
"mcrypt_module_is_block_algorithm_mode imap_mime_header_decode "
"mcrypt_module_is_block_mode mcrypt_module_open imagecreatefromgd2 "
"mcrypt_ofb mdecrypt_generic mhash_count mhash_get_block_size "
"mhash_get_hash_name mhash_keygen_s2k mhash mime_content_type "
"connection_aborted connection_status connection_timeout constant "
"defined get_browser highlight_file highlight_string "
"ignore_user_abort pack show_source sleep uniqid unpack usleep "
"msql_affected_rows msql_close msql_connect msql_create_db define "
"msql_data_seek msql_dbname msql_drop_db msql_dropdb msql_error "
"msql_fetch_array msql_fetch_field msql_fetch_object msql_createdb "
"msql_field_seek msql_fieldflags msql_fieldlen msql_fieldname "
"msql_fieldtable msql_fieldtype msql_free_result msql_freeresult "
"msql_list_dbs msql_list_fields msql_list_tables msql_listdbs "
"msql_listfields msql_listtables msql_num_fields msql_num_rows "
"msql_numfields msql_numrows msql_pconnect msql_query msql_regcase "
"msql_result msql_select_db msql_selectdb msql_tablename msql "
"checkdnsrr closelog debugger_off debugger_on gethostbyaddr "
"dns_check_record dns_get_mx dns_get_record fsockopen "
"gethostbyname gethostbynamel getmxrr getprotobyname "
"getservbyname getservbyport ip2long long2ip openlog pfsockopen "
"socket_get_status socket_set_blocking socket_set_timeout syslog "
"ocibindbyname ocicancel OCICollAppend ocicollassign "
"ocicollgetelem ocicollmax ocicollsize ocicolltrim ocicolumnisnull "
"ocicolumnname ocicolumnprecision ocicolumnscale ocicolumnsize "
"ocicolumntype ocicolumntyperaw ocicommit ocidefinebyname ocierror "
"ociexecute ocifetch ocifetchinto ocifetchstatement msql_fetch_row "
"ocifreecursor OCIFreeDesc ocifreestatement ociinternaldebug "
"ocilogoff ocilogon ocinewcollection ocinewcursor ocinewdescriptor "
"ocinlogon ocinumcols ociparse ociplogon ociresult ocirollback "
"ocirowcount ocisavelob ocisavelobfile ociserverversion ociloadlob "
"ocisetprefetch ocistatementtype ociwritelobtofile flush ob_clean "
"ob_end_clean ob_end_flush ob_flush ob_get_contents ob_get_length "
"ob_get_level ob_get_status ob_gzhandler ob_implicit_flush "
"overload pcntl_exec pcntl_fork pcntl_signal pcntl_waitpid "
"pcntl_wexitstatus pcntl_wifexited pcntl_wifsignaled ob_start "
"pcntl_wstopsig pcntl_wtermsig preg_grep preg_match_all preg_match "
"preg_quote preg_replace_callback preg_replace preg_split "
"pdf_add_annotation pdf_add_bookmark pdf_add_launchlink "
"pdf_add_note pdf_add_outline pdf_add_pdflink pdf_add_thumbnail "
"pdf_add_weblink pdf_arc pdf_arcn pdf_attach_file pdf_begin_page "
"pdf_begin_pattern pdf_begin_template pdf_circle pdf_add_locallink "
"pdf_close_pdi_page pdf_close_pdi pdf_close pcntl_wifstopped "
"pdf_closepath_stroke pdf_closepath pdf_concat pdf_continue_text "
"pdf_curveto pdf_delete pdf_end_page pdf_end_pattern "
"pdf_endpath pdf_fill_stroke pdf_fill pdf_findfont pdf_get_buffer "
"pdf_get_font pdf_get_fontname pdf_get_fontsize pdf_open_pdi_page "
"pdf_get_image_width pdf_get_majorversion pdf_get_minorversion "
"pdf_get_parameter pdf_get_pdi_parameter pdf_get_pdi_value "
"pdf_initgraphics pdf_lineto pdf_makespotcolor pdf_moveto pdf_new "
"pdf_open_CCITT pdf_open_file pdf_open_gif pdf_open_image_file "
"pdf_open_image pdf_open_jpeg pdf_open_memory_image "
"pdf_open_pdi pdf_open_png pdf_open_tiff pdf_open pdf_place_image "
"pdf_place_pdi_page pdf_rect pdf_restore pdf_rotate pdf_get_value "
"pdf_set_border_color pdf_set_border_dash pdf_set_border_style "
"pdf_set_char_spacing pdf_set_duration pdf_set_font "
"pdf_set_info_author pdf_set_info_creator pdf_set_info_keywords "
"pdf_set_info_subject pdf_set_info_title pdf_set_info "
"pdf_set_parameter pdf_set_text_matrix pdf_set_text_pos "
"pdf_set_text_rendering pdf_set_text_rise pdf_set_value "
"pdf_set_word_spacing pdf_setcolor pdf_setdash pdf_setflat "
"pdf_setgray_fill pdf_setgray_stroke pdf_setgray pdf_setlinecap "
"pdf_setlinejoin pdf_setlinewidth pdf_setmatrix pdf_setmiterlimit "
"pdf_setpolydash pdf_setrgbcolor_fill pdf_setrgbcolor_stroke "
"pdf_setrgbcolor pdf_show_boxed pdf_show_xy pdf_show pdf_skew "
"pdf_stringwidth pdf_stroke pdf_translate pg_affected_rows "
"pg_cancel_query pg_client_encoding pg_close pg_connect "
"pg_connection_busy pg_connection_reset pg_connection_status "
"pg_copy_from pg_copy_to pg_dbname pg_delete pg_end_copy "
"pg_escape_string pg_fetch_all pg_fetch_array pg_fetch_assoc "
"pg_fetch_object pg_fetch_result pg_fetch_row pg_field_is_null "
"pg_field_name pg_field_num pg_field_prtlen pg_field_size "
"pg_free_result pg_get_notify pg_get_pid pg_get_result pg_host "
"pg_last_error pg_last_notice pg_last_oid pg_lo_close pg_lo_create "
"pg_lo_export pg_lo_import pg_lo_open pg_lo_read_all pg_lo_read "
"pg_lo_seek pg_lo_tell pg_lo_unlink pg_lo_write pg_meta_data "
"pg_num_fields pg_num_rows pg_options pg_pconnect pg_ping pg_port "
"pg_put_line pg_query pg_result_error pg_result_seek pg_field_type "
"pg_select pg_send_query pg_set_client_encoding pg_trace pg_tty "
"pg_unescape_bytea pg_untrace pg_update posix_ctermid posix_getcwd "
"posix_getegid posix_geteuid posix_getgid posix_getgrgid pg_insert "
"posix_getgroups posix_getlogin posix_getpgid posix_getpgrp "
"posix_getppid posix_getpwnam posix_getpwuid posix_getrlimit "
"posix_getuid posix_isatty posix_kill posix_mkfifo posix_setegid "
"posix_seteuid posix_setgid posix_setpgid posix_setsid pdf_setfont "
"posix_times posix_ttyname posix_uname pspell_add_to_personal "
"pspell_add_to_session pspell_check pspell_clear_session "
"pspell_config_create pspell_config_ignore pspell_config_mode "
"pspell_config_personal pspell_config_repl posix_setuid "
"pspell_config_save_repl pspell_new_config pspell_new_personal "
"pspell_new pspell_save_wordlist pspell_store_replacement "
"recode_file recode_string recode ereg_replace ereg eregi_replace "
"split spliti sql_regcase ftok msg_get_queue msg_receive "
"msg_send msg_set_queue msg_stat_queue sem_acquire sem_get "
"sem_remove shm_attach shm_detach shm_get_var shm_put_var "
"shm_remove session_cache_expire session_cache_limiter sem_release "
"session_destroy session_encode session_get_cookie_params eregi "
"session_is_registered session_module_name session_name session_id "
"session_register session_save_path session_set_cookie_params "
"session_set_save_handler session_start session_unregister "
"session_write_close snmp_get_quick_print snmp_set_quick_print "
"snmprealwalk snmpset snmpwalk snmpwalkoid socket_accept snmpget "
"socket_clear_error socket_close socket_connect session_unset "
"socket_create_pair socket_create socket_get_option socket_bind "
"socket_getsockname socket_iovec_add socket_iovec_alloc "
"socket_iovec_delete socket_iovec_fetch socket_iovec_free "
"socket_iovec_set socket_last_error socket_listen socket_read "
"socket_readv socket_recv socket_recvfrom socket_recvmsg "
"socket_send socket_sendmsg socket_sendto socket_set_nonblock "
"socket_set_option socket_shutdown socket_strerror socket_write "
"socket_writev stream_context_create stream_context_get_options "
"stream_context_set_option stream_context_set_params socket_select "
"stream_filter_prepend stream_get_filters stream_get_meta_data "
"stream_get_wrappers stream_register_filter stream_filter_append "
"stream_select stream_set_blocking stream_set_timeout posix_getpid "
"stream_set_write_buffer addcslashes addslashes bin2hex chop chr "
"chunk_split convert_cyr_string count_chars crc32 crypt "
"fprintf get_html_translation_table hebrev hebrevc explode "
"htmlentities htmlspecialchars implode join levenshtein localeconv "
"md5_file md5 metaphone money_format nl_langinfo nl2br pg_convert "
"parse_str printf quoted_printable_decode quotemeta rtrim "
"setlocale sha1_file sha1 similar_text soundex sprintf sscanf ord "
"str_repeat str_replace str_rot13 str_shuffle str_word_count ltrim "
"strchr strcmp strcoll strcspn strip_tags stripcslashes strcasecmp "
"stristr strlen strnatcasecmp strnatcmp strncasecmp strncmp strpos "
"strrchr strrev strrpos strspn strstr strtok strtolower strtoupper "
"substr_count substr_replace substr trim ucfirst ucwords vprintf "
"wordwrap base64_decode base64_encode get_meta_tags parse_url "
"rawurldecode rawurlencode urldecode urlencode doubleval "
"get_defined_vars get_resource_type gettype stripslashes str_pad "
"intval is_array is_bool is_callable is_double is_float is_int "
"is_long is_null is_numeric is_object is_real is_resource floatval "
"is_string print_r serialize settype strval unserialize "
"var_dump var_export utf8_decode utf8_encode xml_error_string "
"xml_get_current_byte_index xml_get_current_column_number "
"xml_get_current_line_number xml_get_error_code is_scalar vsprintf "
"xml_parse xml_parser_create_ns xml_parser_create xml_parser_free "
"xml_parser_get_option xml_parser_set_option number_format "
"xml_set_default_handler xml_set_element_handler is_integer "
"xml_set_end_namespace_decl_handler xml_parse_into_struct strtr "
"xml_set_notation_decl_handler xml_set_object html_entity_decode "
"xml_set_processing_instruction_handler stream_register_wrapper "
"xml_set_unparsed_entity_decl_handler xslt_create xslt_errno "
"xslt_free xslt_output_process xslt_set_base xslt_set_encoding "
"xslt_set_error_handler xslt_set_log xslt_set_sax_handler "
"xslt_set_sax_handlers xslt_set_scheme_handler socket_getpeername "
"zip_close zip_entry_close zip_entry_compressedsize xslt_error "
"zip_entry_compressionmethod zip_entry_filesize zip_entry_name "
"zip_entry_open zip_entry_read zip_open zip_read session_decode "
"get_magic_quotes_runtime xslt_set_scheme_handlers pspell_suggest "
"xml_set_start_namespace_decl_handler import_request_variables "
"xml_set_external_entity_ref_handler socket_create_listen "
"xml_set_character_data_handler session_readonly shm_remove_var "
"msg_remove_queue pspell_config_runtogether posix_getsid "
"posix_getgrnam pg_result_status pg_escape_bytea pdf_set_leading "
"pdf_set_horiz_scaling pdf_save pdf_scale pdf_get_image_height "
"pdf_end_template pdf_closepath_fill_stroke ocicollassignelem "
"pdf_clip pdf_close_image ocifreecollection getprotobynumber "
"mcrypt_module_self_test define_syslog_variables "
"mcrypt_module_get_supported_key_sizes imap_clearflag_full "
"mcrypt_module_is_block_algorithm imagepsencodefont "
"mcrypt_module_get_algo_block_size imagepsslantfont count ")
#---- Syntax Style Specs ----#
SYNTAX_ITEMS = [ (stc.STC_HPHP_DEFAULT, 'default_style'),
(stc.STC_HPHP_COMMENT, 'comment_style'),
(stc.STC_HPHP_COMMENTLINE, 'comment_style'),
(stc.STC_HPHP_COMPLEX_VARIABLE, 'pre_style'), #STYLE ME
(stc.STC_HPHP_HSTRING, 'string_style'),
(stc.STC_HPHP_HSTRING_VARIABLE, 'scalar_style'), # STYLE ME
(stc.STC_HPHP_NUMBER, 'number_style'),
(stc.STC_HPHP_OPERATOR, 'operator_style'),
(stc.STC_HPHP_SIMPLESTRING, 'string_style'),
(stc.STC_HPHP_VARIABLE, 'pre2_style'),
(stc.STC_HPHP_WORD, 'keyword_style') ]
#------------------------------------------------------------------------------#
class SyntaxData(syndata.SyntaxDataBase):
"""SyntaxData object for Php"""
def __init__(self, langid):
super(SyntaxData, self).__init__(langid)
# Setup
self.SetLexer(stc.STC_LEX_HTML)
self.RegisterFeature(synglob.FEATURE_AUTOINDENT, AutoIndenter)
def GetKeywords(self):
"""Returns Specified Keywords List """
# Support Embedded HTML highlighting
html = _html.SyntaxData(synglob.ID_LANG_HTML)
keywords = html.GetKeywords()
keywords.append((4, PHP_KEYWORDS))
return keywords
def GetSyntaxSpec(self):
"""Syntax Specifications """
return _html.SYNTAX_ITEMS + SYNTAX_ITEMS
def GetProperties(self):
"""Returns a list of Extra Properties to set """
return [_html.FOLD, _html.FLD_HTML]
def GetCommentPattern(self):
"""Returns a list of characters used to comment a block of code
@note: assuming pure php code for comment character(s)
"""
return [u'//']
#---- Syntax Modules Internal Functions ----#
def KeywordString(option=0):
"""Returns the specified Keyword String
@note: not used by most modules
"""
return PHP_KEYWORDS
#---- End Syntax Modules Internal Functions ----#
| {
"content_hash": "27fe1f192ed6844fdc3db10c0079eec0",
"timestamp": "",
"source": "github",
"line_count": 441,
"max_line_length": 80,
"avg_line_length": 68.28344671201815,
"alnum_prop": 0.6546674193869757,
"repo_name": "ktan2020/legacy-automation",
"id": "cb4f5721716d1420695cbd062dcab4b65b4f1a21",
"size": "30680",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "win/Lib/site-packages/wx-3.0-msw/wx/tools/Editra/src/syntax/_php.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "913"
},
{
"name": "Ada",
"bytes": "289"
},
{
"name": "Assembly",
"bytes": "687"
},
{
"name": "Boo",
"bytes": "540"
},
{
"name": "C",
"bytes": "40116"
},
{
"name": "C#",
"bytes": "474"
},
{
"name": "C++",
"bytes": "393"
},
{
"name": "CSS",
"bytes": "70883"
},
{
"name": "ColdFusion",
"bytes": "1012"
},
{
"name": "Common Lisp",
"bytes": "1034"
},
{
"name": "D",
"bytes": "1858"
},
{
"name": "Eiffel",
"bytes": "426"
},
{
"name": "Erlang",
"bytes": "9243"
},
{
"name": "FORTRAN",
"bytes": "1810"
},
{
"name": "Forth",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "2366"
},
{
"name": "Haskell",
"bytes": "816"
},
{
"name": "Haxe",
"bytes": "455"
},
{
"name": "Java",
"bytes": "1155"
},
{
"name": "JavaScript",
"bytes": "69444"
},
{
"name": "Lua",
"bytes": "795"
},
{
"name": "Matlab",
"bytes": "1278"
},
{
"name": "OCaml",
"bytes": "350"
},
{
"name": "Objective-C++",
"bytes": "885"
},
{
"name": "PHP",
"bytes": "1411"
},
{
"name": "Pascal",
"bytes": "388"
},
{
"name": "Perl",
"bytes": "252651"
},
{
"name": "Pike",
"bytes": "589"
},
{
"name": "Python",
"bytes": "42085780"
},
{
"name": "R",
"bytes": "1156"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "Scheme",
"bytes": "282"
},
{
"name": "Shell",
"bytes": "30518"
},
{
"name": "Smalltalk",
"bytes": "926"
},
{
"name": "Squirrel",
"bytes": "697"
},
{
"name": "Stata",
"bytes": "302"
},
{
"name": "SystemVerilog",
"bytes": "3145"
},
{
"name": "Tcl",
"bytes": "1039"
},
{
"name": "TeX",
"bytes": "1746"
},
{
"name": "VHDL",
"bytes": "985"
},
{
"name": "Vala",
"bytes": "664"
},
{
"name": "Verilog",
"bytes": "439"
},
{
"name": "Visual Basic",
"bytes": "2142"
},
{
"name": "XSLT",
"bytes": "152770"
},
{
"name": "ooc",
"bytes": "890"
},
{
"name": "xBase",
"bytes": "769"
}
],
"symlink_target": ""
} |
import mysql.connector
from model.group import Group
from model.contact import Contact
import re
class DbFixture:
def __init__(self, host, name, user, password):
self.host = host
self.name = name
self.user = user
self.password = password
self.connection = mysql.connector.connect(host=host, database=name, user=user, password=password)
self.connection.autocommit = True
def get_groups_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select group_id, group_name, group_header, group_footer from group_list")
for row in cursor:
(id, name, header, footer) = row
list.append(Group(id=str(id), name=name, header=header, footer=footer))
finally:
cursor.close()
return list
def get_contacts_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select id, firstname, lastname from addressbook where deprecated='0000-00-00 00:00:00'")
for row in cursor:
(id, firstname, lastname) = row
list.append(Contact(id=str(id), firstname=firstname, lastname=lastname))
finally:
cursor.close()
return list
def get_groups_with_contacts(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select group_id from address_in_groups")
for row in cursor:
(id,) = row
list.append(Group(id=str(id)))
finally:
cursor.close()
return list
def get_contacts_list_like_homepage(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute(
"select id, firstname, lastname, address, home, mobile, work, phone2, email, email2, email3 from addressbook where deprecated='0000-00-00 00:00:00'")
for row in cursor:
(id, firstname, lastname, address, home, mobile, work, phone2, email, email2, email3) = row
#print(row)
all_phones_from_home_page = "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[row[4], row[5], row[6], row[7]]))))
#print(all_phones_from_home_page)
all_emails_from_home_page = "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[row[8], row[9], row[10]]))))
#print(all_emails_from_home_page)
list.append(Contact(id=str(id), firstname=firstname, lastname=lastname, address=address,
all_emails_from_home_page=all_emails_from_home_page, all_phones_from_home_page=all_phones_from_home_page))
#print("\n", list)
finally:
cursor.close()
return list
def destroy(self):
self.connection.close()
def clear(s):
return re.sub("[() -]", "", s) | {
"content_hash": "cf73e577b19e14a3f17718fcb43465b9",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 165,
"avg_line_length": 40.51764705882353,
"alnum_prop": 0.502903600464576,
"repo_name": "Falschberg/python_testing_davydiuk",
"id": "b3c80a97c3e75188964aa4d4357adad2ffec6bfc",
"size": "3468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fixture/db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "54601"
}
],
"symlink_target": ""
} |
from maths import Arithmetic, Number
from search import *
print("Do some arithmetic")
print("2 + 3 = %d" % Arithmetic().Add(Number(2), Number(3)).value)
print("8 - 3 = %d" % Arithmetic().Subtract(Number(8), Number(3)).value)
print("4 * 5 = %d" % Arithmetic().Multiply(Number(4), Number(5)).value)
print("16 / 4 = %d" % Arithmetic().Divide(Number(16), Number(4)).value)
print("Equality Tests")
print("2 == 2 is %s" % Arithmetic().Equals(Number(2), Number(2)))
print("1 == 2 is %s" % Arithmetic().Equals(Number(1), Number(2)))
def print_graph_details(algorithm):
print("Vertices: ")
for n in algorithm.nodes:
v = algorithm.nodes[n]
print("{}: {} ({},{})".format(v.id, v.name, v.position.lat, v.position.lon))
print("Edges: ")
for a in algorithm.graph:
e = algorithm.graph[a]
print("[{}] {} ".format(algorithm.nodes[a].id, algorithm.nodes[a].name), e)
import time
try_again = True
while try_again:
try_again = False
algorithm = Backtrack()
print_graph_details(algorithm)
start = list(algorithm.nodes)[0]
finish = list(algorithm.nodes)[-1]
try:
start_time = time.clock()
print("Solution via Backtracking: {result} [{elapsed}]".format(result = algorithm.search(start, finish), elapsed = time.clock() - start_time))
algorithm = Dijkstra(algorithm.nodes, algorithm.graph)
start_time = time.clock()
print("Solution via Dijkstra: {result} [{elapsed}]".format(result = algorithm.search(start, finish)[1], elapsed = time.clock() - start_time))
algorithm = AStar(algorithm.nodes, algorithm.graph)
start_time = time.clock()
print("Solution via A*: {result} [{elapsed}]".format(result = algorithm.search(start, finish), elapsed = time.clock() - start_time))
except SearchError:
print("No solution to random data set. Retrying")
try_again = True
var = input("Run Search Speed Test: [y/n]")
while var is not 'y' and var is not 'n':
var = input("Run Search Speed Test: [y/n]")
nodes = 100
while(nodes < 1000 and var is 'y'):
print("Searching a Graph with {} nodes".format(nodes))
algorithm = Backtrack()
connectivity = 1 / (2 + math.sqrt((nodes - 100)))
algorithm.generate_graph(nodes, connectivity)
print("connectivity ", connectivity)
start = list(algorithm.nodes)[0]
finish = list(algorithm.nodes)[-1]
print("Start: {}, Finish: {}".format(algorithm.nodes[start], algorithm.nodes[finish]))
start_time = time.clock()
print("Solution via Backtracking: {result} [{elapsed}]".format(result = algorithm.search(start, finish), elapsed = time.clock() - start_time))
algorithm = Dijkstra(algorithm.nodes, algorithm.graph)
start_time = time.clock()
print("Solution via Dijkstra: {result} [{elapsed}]".format(result = algorithm.search(start, finish)[1], elapsed = time.clock() - start_time))
start_time = time.clock()
algorithm = AStar(algorithm.nodes, algorithm.graph)
print("Solution via A*: {result} [{elapsed}]".format(result = algorithm.search(start, finish), elapsed = time.clock() - start_time))
nodes = math.floor(nodes * 1.1)
algorithm = AStar()
algorithm.generate_graph(100, .1)
start = list(algorithm.nodes)[0]
finish = list(algorithm.nodes)[-1]
path_set = {}
path_nodes = []
## Solve
path = algorithm.search(start, finish)
for v in path:
path_nodes.append(algorithm.nodes[v])
## Add Result
path_set["AStar"] = path_nodes
## Solve
algorithm = Dijkstra(algorithm.nodes, algorithm.graph)
path = algorithm.search(start, finish)[1]
#Add result
path_nodes = []
for v in path:
path_nodes.append(algorithm.nodes[v])
path_set["Dijkstra"] = path_nodes
## Solve
algorithm = Backtrack(algorithm.nodes, algorithm.graph)
path = algorithm.search(start, finish)
#Add result
path_nodes = []
for v in path:
path_nodes.append(algorithm.nodes[v])
#path_set["Backtrack"] = path_nodes
output = CSVFile('search.csv')
output.write(list(algorithm.nodes.values()), path_set)
output = KMLFile('search.kml')
output.write(list(algorithm.nodes.values()), path_set)
#print("Get some projects from Github")
#from github_demo import github_list_repo
| {
"content_hash": "319c3fb486e47ad4aa88bcf31f801737",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 146,
"avg_line_length": 32.198412698412696,
"alnum_prop": 0.6837564702982499,
"repo_name": "kindasimple/play",
"id": "2a95cc3228a0aee8d641fe4b5a7b798ac29cfaba",
"size": "4076",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/demo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "0"
},
{
"name": "JavaScript",
"bytes": "1052"
},
{
"name": "Perl",
"bytes": "311"
},
{
"name": "Python",
"bytes": "1587121"
},
{
"name": "R",
"bytes": "6217"
},
{
"name": "SQL",
"bytes": "233"
},
{
"name": "Scala",
"bytes": "3222"
},
{
"name": "Shell",
"bytes": "2632"
}
],
"symlink_target": ""
} |
from sure import expect
from tests.integration import VCRIntegrationTest, vcr
class TestTimer(VCRIntegrationTest):
@vcr.use_cassette
def test_timer_and_cancel(self):
events = self.run_standalone("tests.integration.workflow.TimerWorkflow")
list_timer_started = [e for e in events if e["eventType"] == "TimerStarted"]
expect(len(list_timer_started)).to.equal(3)
expect(
[e["timerStartedEventAttributes"]["timerId"] for e in list_timer_started]
).to.equal(["timer 2", "timer 1", "_simpleflow_wake_up_timer"])
list_timer_fired = [e for e in events if e["eventType"] == "TimerFired"]
expect(len(list_timer_fired)).to.equal(2)
expect(list_timer_fired[0]["timerFiredEventAttributes"]["timerId"]).to.equal(
"timer 1"
)
list_timer_canceled = [e for e in events if e["eventType"] == "TimerCanceled"]
expect(len(list_timer_canceled)).to.equal(1)
expect(
list_timer_canceled[0]["timerCanceledEventAttributes"]["timerId"]
).to.equal("timer 2")
| {
"content_hash": "35169ceb6abcf0d5510e6f580e09393c",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 86,
"avg_line_length": 45.083333333333336,
"alnum_prop": 0.6414048059149723,
"repo_name": "botify-labs/simpleflow",
"id": "0e2d5fa0fa69bdb82875a431063daf18592003ec",
"size": "1082",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/integration/test_timer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "298"
},
{
"name": "Python",
"bytes": "801698"
},
{
"name": "Shell",
"bytes": "4481"
}
],
"symlink_target": ""
} |
import logging
from pyOlog.OlogHandler import OlogHandler
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
olog = OlogHandler()
olog.setLevel(logging.DEBUG)
olog.setFormatter(formatter)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(olog)
logger.addHandler(ch)
def main():
logger.debug('Debug message')
logger.info('Info Message')
logger.warn('Warn Message')
logger.error('Error Message')
logger.critical('Critical Message')
if __name__ == "__main__":
main()
| {
"content_hash": "866eff7718d729d987b50d277b5386ba",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 85,
"avg_line_length": 22.59259259259259,
"alnum_prop": 0.7262295081967213,
"repo_name": "ericdill/pyOlog",
"id": "b3b65c3b18df064a211f7184a7c1a2b653ef020b",
"size": "610",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/logger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "70852"
},
{
"name": "Shell",
"bytes": "64"
}
],
"symlink_target": ""
} |
import sys
from setuptools import setup, find_packages
from version import get_version
from commands import preconfigure, configure, create_bucket_types, \
setup_security, enable_security, disable_security
install_requires = ['six >= 1.8.0']
requires = ['six(>=1.8.0)']
if sys.version_info < (2, 7, 9):
install_requires.append("pyOpenSSL >= 0.14")
requires.append("pyOpenSSL(>=0.14)")
if sys.version_info < (3, ):
install_requires.append("riak_pb >=2.0.0")
requires.append("riak_pb(>=2.0.0)")
else:
install_requires.append("python3_riak_pb >=2.0.0")
requires.append("python3_riak_pb(>=2.0.0)")
tests_require = []
if sys.version_info < (2, 7):
tests_require.append("unittest2")
setup(
name='riak',
version=get_version(),
packages=find_packages(),
requires=requires,
install_requires=install_requires,
tests_require=tests_require,
package_data={'riak': ['erl_src/*']},
description='Python client for Riak',
zip_safe=True,
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
include_package_data=True,
license='Apache 2',
platforms='Platform Independent',
author='Basho Technologies',
author_email='clients@basho.com',
test_suite='riak.tests.suite',
url='https://github.com/basho/riak-python-client',
cmdclass={'create_bucket_types': create_bucket_types,
'setup_security': setup_security,
'preconfigure': preconfigure,
'configure': configure,
'enable_security': enable_security,
'disable_security': disable_security},
classifiers=['License :: OSI Approved :: Apache Software License',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Database']
)
| {
"content_hash": "e58e3ee8af94e18870f55e86d0d8ffaa",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 70,
"avg_line_length": 38.05555555555556,
"alnum_prop": 0.618978102189781,
"repo_name": "GabrielNicolasAvellaneda/riak-python-client",
"id": "549f279945d4aef752206050537d21c6311e2569",
"size": "2077",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Erlang",
"bytes": "32681"
},
{
"name": "Makefile",
"bytes": "5145"
},
{
"name": "Python",
"bytes": "583815"
},
{
"name": "Shell",
"bytes": "1786"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from awesome_thirdparty_library import AwesomeClass
import Pyro4
# create adapter class that only exposes what should be accessible,
# and calls into the library class from there:
class AwesomeAdapterClass(AwesomeClass):
@Pyro4.expose
def method(self, arg):
print("Adapter class is called...")
return super(AwesomeAdapterClass, self).method(arg)
@Pyro4.expose
def weird(self):
result = super(AwesomeAdapterClass, self).weird()
# we have full control over what is returned and can turn the custom
# result class into a normal string value that has no issues traveling over the wire
return "weird " + result.value
with Pyro4.Daemon() as daemon:
# register the adapter class instead of the library class itself:
uri = daemon.register(AwesomeAdapterClass, "example.thirdpartylib")
print("adapter class registered, uri: ", uri)
daemon.requestLoop()
| {
"content_hash": "d2d27f250c46906ef06dd6c869586d3f",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 92,
"avg_line_length": 34.607142857142854,
"alnum_prop": 0.718266253869969,
"repo_name": "irmen/Pyro4",
"id": "ed76174e26ab5b7febd20743bd01b5f4c29a6817",
"size": "969",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/thirdpartylib/server2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1283"
},
{
"name": "Python",
"bytes": "618799"
},
{
"name": "Shell",
"bytes": "2394"
}
],
"symlink_target": ""
} |
from __future__ import division, print_function, absolute_import
def configuration(parent_package='',top_path=None):
import numpy
from numpy.distutils.misc_util import Configuration
config = Configuration('sparsetools',parent_package,top_path)
for fmt in ['csr','csc','coo','bsr','dia','csgraph']:
sources = [ fmt + '_wrap.cxx' ]
depends = [ fmt + '.h' ]
config.add_extension('_' + fmt, sources=sources,
define_macros=[('__STDC_FORMAT_MACROS', 1)],
depends=depends)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| {
"content_hash": "117b91ccbb70a657e396ce5070600d57",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 65,
"avg_line_length": 33.6,
"alnum_prop": 0.6235119047619048,
"repo_name": "sargas/scipy",
"id": "0c754a1e3bf28fc03ed8181d19d777095d3b4996",
"size": "694",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "scipy/sparse/sparsetools/setup.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4420309"
},
{
"name": "C++",
"bytes": "7675576"
},
{
"name": "FORTRAN",
"bytes": "5900194"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "25240"
},
{
"name": "Python",
"bytes": "6611452"
},
{
"name": "Shell",
"bytes": "1793"
}
],
"symlink_target": ""
} |
import json
import re
import sys
import urllib3
from urllib3.util import retry
# List of projects having tempest plugin stale or unmaintained for a long time
# (6 months or more)
# TODO(masayukig): Some of these can be removed from BLACKLIST in the future
# when the patches are merged.
BLACKLIST = [
'x/gce-api', # It looks gce-api doesn't support python3 yet.
'x/glare', # To avoid sanity-job failure
'x/group-based-policy', # It looks this doesn't support python3 yet.
'x/intel-nfv-ci-tests', # https://review.opendev.org/#/c/634640/
'openstack/networking-generic-switch',
# https://review.opendev.org/#/c/634846/
'openstack/networking-l2gw-tempest-plugin',
# https://review.opendev.org/#/c/635093/
'openstack/networking-midonet', # https://review.opendev.org/#/c/635096/
'x/networking-plumgrid', # https://review.opendev.org/#/c/635096/
'x/networking-spp', # https://review.opendev.org/#/c/635098/
'openstack/neutron-dynamic-routing',
# https://review.opendev.org/#/c/637718/
'openstack/neutron-vpnaas', # https://review.opendev.org/#/c/637719/
'x/tap-as-a-service', # To avoid sanity-job failure
'x/valet', # https://review.opendev.org/#/c/638339/
'x/kingbird', # https://bugs.launchpad.net/kingbird/+bug/1869722
# vmware-nsx is blacklisted since https://review.opendev.org/#/c/736952
'x/vmware-nsx-tempest-plugin',
]
url = 'https://review.opendev.org/projects/'
# This is what a project looks like
'''
"openstack-attic/akanda": {
"id": "openstack-attic%2Fakanda",
"state": "READ_ONLY"
},
'''
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
retries = retry.Retry(status_forcelist=[500], backoff_factor=1.0)
def has_tempest_plugin(proj):
try:
r = http.request('GET', "https://opendev.org/%s/raw/branch/"
"master/setup.cfg" % proj, retries=retries)
if r.status == 404:
return False
except urllib3.exceptions.MaxRetryError as err:
# We should not ignore non 404 errors.
raise err
p = re.compile(r'^tempest\.test_plugins', re.M)
if p.findall(r.data.decode('utf-8')):
return True
else:
False
if len(sys.argv) > 1 and sys.argv[1] == 'blacklist':
for black_plugin in BLACKLIST:
print(black_plugin)
# We just need BLACKLIST when we use this `blacklist` option.
# So, this exits here.
sys.exit()
r = http.request('GET', url, retries=retries)
# Gerrit prepends 4 garbage octets to the JSON, in order to counter
# cross-site scripting attacks. Therefore we must discard it so the
# json library won't choke.
content = r.data.decode('utf-8')[4:]
projects = sorted(json.loads(content))
# Retrieve projects having no deployment tool repo (such as deb,
# puppet, ansible, etc.), infra repos, ui or spec namespace as those
# namespaces do not contains tempest plugins.
projects_list = [i for i in projects if not (
i.startswith('openstack-dev/') or
i.startswith('openstack-infra/') or
i.startswith('openstack/ansible-') or
i.startswith('openstack/charm-') or
i.startswith('openstack/cookbook-openstack-') or
i.startswith('openstack/devstack-') or
i.startswith('openstack/fuel-') or
i.startswith('openstack/deb-') or
i.startswith('openstack/puppet-') or
i.startswith('openstack/openstack-ansible-') or
i.startswith('x/deb-') or
i.startswith('x/fuel-') or
i.startswith('x/python-') or
i.startswith('zuul/') or
i.endswith('-ui') or
i.endswith('-specs'))]
found_plugins = list(filter(has_tempest_plugin, projects_list))
# We have tempest plugins not only in 'openstack/' namespace but also the
# other name spaces such as 'airship/', 'x/', etc.
# So, we print all of them here.
for project in found_plugins:
print(project)
| {
"content_hash": "42bb2d9d06ea96293baeb88ff4f1ab4a",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 78,
"avg_line_length": 36.41904761904762,
"alnum_prop": 0.6725941422594143,
"repo_name": "cisco-openstack/tempest",
"id": "530ce5eae860b8e8841c6c43bed93d417aa42ad2",
"size": "4796",
"binary": false,
"copies": "1",
"ref": "refs/heads/proposed",
"path": "tools/generate-tempest-plugins-list.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4431271"
},
{
"name": "Shell",
"bytes": "7435"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(name='brightway2-conda-installer',
packages=['brightway2_conda_installer'],
version='0.0.1',
author="Adrian Haas",
license=open('LICENSE.txt').read(),
description='Installation of brightway2 and all of its dependencies in a new anaconda environment.',
entry_points={
'console_scripts': [
'create_new_brightway2_env = brightway2_conda_installer.__main__:create_new_brightway2_env',
'bw_env = brightway2_conda_installer.__main__:create_new_brightway2_env'
]
},
) | {
"content_hash": "23cd49dac9a0a941c5a6a84d7637519c",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 106,
"avg_line_length": 39.46666666666667,
"alnum_prop": 0.6351351351351351,
"repo_name": "haasad/brightway2-conda-installer",
"id": "dfa66801c0253ec93accb38655ab7947584ca44f",
"size": "592",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "51"
},
{
"name": "Python",
"bytes": "2737"
},
{
"name": "Shell",
"bytes": "68"
}
],
"symlink_target": ""
} |
from model.group import Group
data_for_group = [
Group(name="name1n", header="header2", footer="footer1"),
Group(name="name2f", header="header2", footer="footer2")
] | {
"content_hash": "be9ea9838a25e07f4e7589f52d5a2a76",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 61,
"avg_line_length": 29,
"alnum_prop": 0.6839080459770115,
"repo_name": "kochetov-a/python_training",
"id": "f186aa22798a66ec39af0e71550e948104602020",
"size": "199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/groups.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "59129"
}
],
"symlink_target": ""
} |
import argparse
import imp
#import cv2
#import os
from tensorpack.dataflow import serve_data
parser = argparse.ArgumentParser()
parser.add_argument(dest='config')
parser.add_argument('-p', '--port', help='port', type=int, required=True)
args = parser.parse_args()
get_config_func = imp.load_source('config_script', args.config).get_config
config = get_config_func()
ds = config.dataset
serve_data(ds, "tcp://*:{}".format(args.port))
| {
"content_hash": "e3eed93b4d1928db31adfc6b6eaa1e8b",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 74,
"avg_line_length": 27.25,
"alnum_prop": 0.731651376146789,
"repo_name": "yinglanma/AI-project",
"id": "315ef39c9655d31e6100a5b66de62a415f510359",
"size": "546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/serve-data.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "319384"
}
],
"symlink_target": ""
} |
import json
import requests
HEADERS = {'Accept': 'application/json', 'Content-Type':
'application/json', 'Accept-encoding': 'application/json'}
def get_client_info_all(auth, url):
"""
function takes no input to RESTFUL call to HP IMC
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list of dictionaries where each element of the list represents one client as discovered by the HPE IMC
Wireless Services Management module.
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.wsm.clientinfo import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> all_client_info = get_client_info_all(auth.creds, auth.url)
>>> assert type(all_client_info) is list
>>> assert len(all_client_info[0]) == 16
>>> assert 'acDevId' in all_client_info[0]
>>> assert 'acLabel' in all_client_info[0]
>>> assert 'apIpAddress' in all_client_info[0]
>>> assert 'apLabel' in all_client_info[0]
>>> assert 'apMacAddress' in all_client_info[0]
>>> assert 'apSerialId' in all_client_info[0]
>>> assert 'channel' in all_client_info[0]
>>> assert 'ipAddress' in all_client_info[0]
>>> assert 'location' in all_client_info[0]
>>> assert 'mac' in all_client_info[0]
>>> assert 'radioType' in all_client_info[0]
>>> assert 'signalStrength' in all_client_info[0]
>>> assert 'ssid' in all_client_info[0]
>>> assert 'upTime' in all_client_info[0]
>>> assert 'userName' in all_client_info[0]
"""
get_client_info_all_url = "/imcrs/wlan/clientInfo/queryAllClientBasicInfo"
f_url = url + get_client_info_all_url
payload = None
r = requests.get(f_url, auth=auth,
headers=HEADERS) # creates the URL using the payload variable as the contents
# print(r.status_code)
try:
if r.status_code == 200:
if len(r.text) > 0:
return json.loads(r.text)['clientBasicInfo']
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + " get_client_info_all: An Error has occured"
def get_client_online_history_all(auth, url):
"""
function takes no input to RESTFUL call to HP IMC
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list of dictionaries where each element of the list represents one client as discovered by the HPE IMC
Wireless Services Management module.
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.wsm.clientinfo import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> online_client_info = get_client_online_history_all(auth.creds, auth.url)
>>> assert type(online_client_info) is list
>>> assert len(online_client_info[0]) == 21
>>> assert 'acDevId' in online_client_info[0]
>>> assert 'apName' in online_client_info[0]
>>> assert 'apSerialId' in online_client_info[0]
>>> assert 'authenMode' in online_client_info[0]
>>> assert 'channel' in online_client_info[0]
>>> assert 'ciphers' in online_client_info[0]
>>> assert 'faultTime' in online_client_info[0]
>>> assert 'ip' in online_client_info[0]
>>> assert 'loginTime' in online_client_info[0]
>>> assert 'mac' in online_client_info[0]
>>> assert 'onlineTime' in online_client_info[0]
>>> assert 'position' in online_client_info[0]
>>> assert 'radioId' in online_client_info[0]
>>> assert 'radioType' in online_client_info[0]
>>> assert 'rxByte' in online_client_info[0]
>>> assert 'rxNoise' in online_client_info[0]
>>> assert 'rxSnr' in online_client_info[0]
>>> assert 'singalStrength' in online_client_info[0]
>>> assert 'ssid' in online_client_info[0]
>>> assert 'txByte' in online_client_info[0]
>>> assert 'userName' in online_client_info[0]
"""
get_client_online_history_all_url = "/imcrs/wlan/clientInfo/queryClientOnlineHistoryInfo"
f_url = url + get_client_online_history_all_url
payload = None
r = requests.get(f_url, auth=auth,
headers=HEADERS) # creates the URL using the payload variable as the contents
# print(r.status_code)
try:
if r.status_code == 200:
if len(r.text) > 0:
return json.loads(r.text)['clientOnlineHistoryInfo']
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + " get_client_online_history_all: An Error has occured" | {
"content_hash": "9364e71568ef84c87cb772faaa292b12",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 115,
"avg_line_length": 30.0251572327044,
"alnum_prop": 0.6436950146627566,
"repo_name": "netmanchris/PYHPEIMC",
"id": "833f15d534c7aa80ffbd084aabd7ee365746e291",
"size": "4865",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "build/lib/pyhpeimc/wsm/clientinfo.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "772580"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
from argparse import ArgumentParser, ArgumentTypeError
from sqlalchemy.orm.exc import NoResultFound
from flexget import options
from flexget.event import event
from flexget.manager import Session
from flexget.terminal import TerminalTable, TerminalTableError, table_parser, console
from flexget.plugins.list.entry_list import get_entry_lists, get_list_by_exact_name, get_entries_by_list_id, \
get_entry_by_id, get_entry_by_title, EntryListList, EntryListEntry
def attribute_type(attribute):
if attribute.count('=') != 1:
raise ArgumentTypeError('Received attribute in wrong format: %s, '
'should be in keyword format like `imdb_id=tt1234567`' % attribute)
name, value = attribute.split('=', 2)
return {name: value}
def do_cli(manager, options):
"""Handle entry-list subcommand"""
if options.list_action == 'all':
entry_list_lists(options)
return
if options.list_action == 'list':
entry_list_list(options)
return
if options.list_action == 'show':
entry_list_show(options)
return
if options.list_action == 'add':
entry_list_add(options)
return
if options.list_action == 'del':
entry_list_del(options)
return
if options.list_action == 'purge':
entry_list_purge(options)
return
def entry_list_lists(options):
""" Show all entry lists """
with Session() as session:
lists = get_entry_lists(session=session)
header = ['#', 'List Name']
table_data = [header]
for entry_list in lists:
table_data.append([entry_list.id, entry_list.name])
table = TerminalTable(options.table_type, table_data)
try:
console(table.output)
except TerminalTableError as e:
console('ERROR: %s' % str(e))
def entry_list_list(options):
"""List entry list"""
with Session() as session:
try:
entry_list = get_list_by_exact_name(options.list_name, session=session)
except NoResultFound:
console('Could not find entry list with name {}'.format(options.list_name))
return
header = ['#', 'Title', '# of fields']
table_data = [header]
for entry in get_entries_by_list_id(entry_list.id, order_by='added', descending=True, session=session):
table_data.append([entry.id, entry.title, len(entry.entry)])
table = TerminalTable(options.table_type, table_data)
try:
console(table.output)
except TerminalTableError as e:
console('ERROR: %s' % str(e))
def entry_list_show(options):
with Session() as session:
try:
entry_list = get_list_by_exact_name(options.list_name, session=session)
except NoResultFound:
console('Could not find entry list with name {}'.format(options.list_name))
return
try:
entry = get_entry_by_id(entry_list.id, int(options.entry), session=session)
except NoResultFound:
console(
'Could not find matching entry with ID {} in list `{}`'.format(int(options.entry), options.list_name))
return
except ValueError:
entry = get_entry_by_title(entry_list.id, options.entry, session=session)
if not entry:
console(
'Could not find matching entry with title `{}` in list `{}`'.format(options.entry,
options.list_name))
return
header = ['Field name', 'Value']
table_data = [header]
for k, v in sorted(entry.entry.items()):
table_data.append([k, str(v)])
table = TerminalTable(options.table_type, table_data, wrap_columns=[1])
table.table.justify_columns[0] = 'center'
try:
console(table.output)
except TerminalTableError as e:
console('ERROR: %s' % str(e))
def entry_list_add(options):
with Session() as session:
try:
entry_list = get_list_by_exact_name(options.list_name, session=session)
except NoResultFound:
console('Could not find entry list with name `{}`, creating'.format(options.list_name))
entry_list = EntryListList(name=options.list_name)
session.add(entry_list)
session.merge(entry_list)
session.commit()
title = options.entry_title
entry = {'title': options.entry_title, 'original_url': options.original_url}
db_entry = get_entry_by_title(list_id=entry_list.id, title=title, session=session)
if db_entry:
console("Entry with the title `{}` already exist with list `{}`. Will replace identifiers if given".format(
title, entry_list.name))
output = 'Successfully updated entry `{}` to entry list `{}` '.format(title, entry_list.name)
else:
console("Adding entry with title `{}` to list `{}`".format(title, entry_list.name))
db_entry = EntryListEntry(entry=entry, entry_list_id=entry_list.id)
session.add(db_entry)
output = 'Successfully added entry `{}` to entry list `{}` '.format(title, entry_list.name)
if options.attributes:
console('Adding attributes to entry `{}`'.format(title))
for identifier in options.attributes:
for k, v in identifier.items():
entry[k] = v
db_entry.entry = entry
console(output)
def entry_list_del(options):
with Session() as session:
try:
entry_list = get_list_by_exact_name(options.list_name)
except NoResultFound:
console('Could not find entry list with name `{}`'.format(options.list_name))
return
try:
db_entry = get_entry_by_id(entry_list.id, int(options.entry), session=session)
except NoResultFound:
console(
'Could not find matching entry with ID {} in list `{}`'.format(int(options.entry), options.list_name))
return
except ValueError:
db_entry = get_entry_by_title(entry_list.id, options.entry, session=session)
if not db_entry:
console(
'Could not find matching entry with title `{}` in list `{}`'.format(options.entry,
options.list_name))
return
console('Removing entry `%s` from list %s' % (db_entry.title, options.list_name))
session.delete(db_entry)
def entry_list_purge(options):
with Session() as session:
try:
entry_list = get_list_by_exact_name(options.list_name)
except NoResultFound:
console('Could not find entry list with name `{}`'.format(options.list_name))
return
console('Deleting list %s' % options.list_name)
session.delete(entry_list)
@event('options.register')
def register_parser_arguments():
# Common option to be used in multiple subparsers
entry_parser = ArgumentParser(add_help=False)
entry_parser.add_argument('entry_title', help="Title of the entry")
entry_parser.add_argument('original_url', help="URL of the entry")
global_entry_parser = ArgumentParser(add_help=False)
global_entry_parser.add_argument('entry', help='Can be entry title or ID')
attributes_parser = ArgumentParser(add_help=False)
attributes_parser.add_argument('--attributes', metavar='<attributes>', nargs='+', type=attribute_type,
help='Can be a string or a list of string with the format imdb_id=XXX,'
' tmdb_id=XXX, etc')
list_name_parser = ArgumentParser(add_help=False)
list_name_parser.add_argument('list_name', nargs='?', default='entries', help='Name of entry list to operate on')
# Register subcommand
parser = options.register_command('entry-list', do_cli, help='view and manage entry lists')
# Set up our subparsers
subparsers = parser.add_subparsers(title='actions', metavar='<action>', dest='list_action')
subparsers.add_parser('all', help='Shows all existing entry lists', parents=[table_parser])
subparsers.add_parser('list', parents=[list_name_parser, table_parser], help='List entries from a list')
subparsers.add_parser('show', parents=[list_name_parser, global_entry_parser, table_parser],
help='Show entry fields.')
subparsers.add_parser('add', parents=[list_name_parser, entry_parser, attributes_parser],
help='Add an entry to a list')
subparsers.add_parser('del', parents=[list_name_parser, global_entry_parser],
help='Remove an entry from a list using its title or ID')
subparsers.add_parser('purge', parents=[list_name_parser],
help='Removes an entire list with all of its entries. Use this with caution')
| {
"content_hash": "af358ecd61caa4a8ebb77e5a9cec191d",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 119,
"avg_line_length": 43.201877934272304,
"alnum_prop": 0.6099760921538796,
"repo_name": "oxc/Flexget",
"id": "b1ed609482f0f7d8175fb4e4959a5e1aa7a143f7",
"size": "9202",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "flexget/plugins/cli/entry_list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9267"
},
{
"name": "HTML",
"bytes": "49610"
},
{
"name": "JavaScript",
"bytes": "239825"
},
{
"name": "Python",
"bytes": "2749010"
},
{
"name": "SRecode Template",
"bytes": "3"
}
],
"symlink_target": ""
} |
"""
.. module:: soap.transformer.utils
:synopsis: Useful utility functions to simplify calls to
ArithTreeTransformer.
"""
import collections
from soap import logger
from soap.expression import operators, is_expression
from soap.semantics import MetaState
from soap.transformer.core import TreeTransformer
from soap.transformer.arithmetic import (
associativity_addition, associativity_multiplication,
distributivity_distribute_multiplication,
distributivity_distribute_division, ArithTreeTransformer
)
from soap.analysis import frontier, thick_frontier
def closure(expr, **kwargs):
"""The full transitive closure."""
return ArithTreeTransformer(expr, **kwargs).closure()
def full_closure(expr, **kwargs):
"""The same as :func:`closure`, ignoring the `kwargs` stuff."""
return closure(expr)
def _plugin_closure(
plugin_func, expr, state, out_vars, recurrences=None, **kwargs):
def plugin(expr_set):
frontier_set = plugin_func(
expr_set, state, out_vars, recurrences=recurrences)
return [r.expression for r in frontier_set]
transformer = ArithTreeTransformer(expr, step_plugin=plugin, **kwargs)
return plugin(transformer.closure())
def greedy_frontier_closure(
expr, state, out_vars=None, recurrences=None, **kwargs):
"""Our greedy transitive closure.
:param expr: The expression(s) under transform.
:type expr:
:class:`soap.expression.Expression` or
:class:`soap.semantics.state.MetaState`
:param state: The ranges of input variables.
:type state: dictionary containing mappings from variables to
:class:`soap.semantics.error.Interval`
:param out_vars: The output variables of the metastate
:type out_vars: :class:`collections.Sequence`
:param recurrences: A dictionary containing information about
loop recurrences
:type recurrences: dict
"""
return _plugin_closure(
frontier, expr, state, out_vars, recurrences, **kwargs)
def thick_frontier_closure(
expr, state, out_vars=None, recurrences=None, **kwargs):
"""Our thick frontier transitive closure.
:param expr: The expression(s) under transform.
:type expr:
:class:`soap.expression.Expression` or
:class:`soap.semantics.state.MetaState`
:param state: The ranges of input variables.
:type state: dictionary containing mappings from variables to
:class:`soap.semantics.error.Interval`
:param out_vars: The output variables of the metastate
:type out_vars: :class:`collections.Sequence`
:param recurrences: A dictionary containing information about
loop recurrences
:type recurrences: dict
"""
return _plugin_closure(
thick_frontier, expr, state, out_vars, recurrences, **kwargs)
def transform(expr, reduction_rules=None, transform_rules=None,
step_plugin=None, reduce_plugin=None, depth=None,
multiprocessing=True):
"""One liner for :class:`soap.transformer.TreeTransformer`."""
return TreeTransformer(
expr, transform_rules=transform_rules, reduction_rules=reduction_rules,
step_plugin=step_plugin, reduce_plugin=reduce_plugin,
depth=depth, multiprocessing=multiprocessing).closure()
def expand(expr, *args, **kwargs):
"""Fully expands the expression expr by distributivity.
:param expr: The expression expr.
:type expr: :class:`soap.expression.Expression` or str
:returns: A fully expanded expr.
"""
def pop(s):
if s:
return [s.pop()]
return s
reduction_rules = [
distributivity_distribute_multiplication,
distributivity_distribute_division,
]
return transform(
expr, reduction_rules=reduction_rules, reduce_plugin=pop,
multiprocessing=False).pop()
def reduce(expr, *args, **kwargs):
"""Transforms expr by reduction rules only.
:param expr: The expression expr.
:type expr: :class:`soap.expression.Expression` or str
:returns: A new expression expr.
"""
if isinstance(expr, str) or is_expression(expr):
t = transform(expr, ArithTreeTransformer.reduction_rules,
multiprocessing=False)
s = set(t)
if len(s) > 1:
s.remove(expr)
if len(s) == 1:
return s.pop()
with logger.local_context(level=logger.levels.info):
if isinstance(expr, collections.Mapping):
return MetaState({v: reduce(e) for v, e in expr.items()})
if isinstance(expr, collections.Iterable):
return {reduce(t) for t in expr}
raise TypeError('Do not know how to reduce {!r}'.format(expr))
def parsings(expr, *args, **kwargs):
"""Generates all possible parsings of the same expr by associativity.
:param expr: The expression expr.
:type expr: :class:`soap.expression.Expression` or str
:returns: A set of exprs.
"""
return transform(
expr, None, {
operators.ADD_OP: [associativity_addition],
operators.MULTIPLY_OP: [associativity_multiplication]
}, depth=1000)
| {
"content_hash": "a5ee31fd14700abee050378dc2ad9b28",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 79,
"avg_line_length": 35.18493150684932,
"alnum_prop": 0.676464862760366,
"repo_name": "admk/soap",
"id": "5e868272071f0b0d64326ef65698b21e02b978bd",
"size": "5137",
"binary": false,
"copies": "1",
"ref": "refs/heads/xitong/master",
"path": "soap/transformer/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Isabelle",
"bytes": "8132"
},
{
"name": "Python",
"bytes": "461377"
},
{
"name": "VHDL",
"bytes": "1728"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(358, 751)
self.gridLayout_7 = QtGui.QGridLayout(Form)
self.gridLayout_7.setMargin(1)
self.gridLayout_7.setSpacing(3)
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.createSummaryBtn = QtGui.QPushButton(Form)
self.createSummaryBtn.setObjectName(_fromUtf8("createSummaryBtn"))
self.gridLayout_7.addWidget(self.createSummaryBtn, 5, 0, 1, 1)
self.traceDisplayGroup = QtGui.QGroupBox(Form)
self.traceDisplayGroup.setObjectName(_fromUtf8("traceDisplayGroup"))
self.gridLayout_3 = QtGui.QGridLayout(self.traceDisplayGroup)
self.gridLayout_3.setMargin(3)
self.gridLayout_3.setSpacing(1)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.averageCheck = QtGui.QCheckBox(self.traceDisplayGroup)
self.averageCheck.setObjectName(_fromUtf8("averageCheck"))
self.gridLayout_3.addWidget(self.averageCheck, 0, 0, 1, 2)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setSpacing(2)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.excludeAPsCheck = QtGui.QGroupBox(self.traceDisplayGroup)
self.excludeAPsCheck.setCheckable(True)
self.excludeAPsCheck.setObjectName(_fromUtf8("excludeAPsCheck"))
self.horizontalLayout = QtGui.QHBoxLayout(self.excludeAPsCheck)
self.horizontalLayout.setMargin(1)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.label_8 = QtGui.QLabel(self.excludeAPsCheck)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.horizontalLayout.addWidget(self.label_8)
self.startExcludeAPsSpin = SpinBox(self.excludeAPsCheck)
self.startExcludeAPsSpin.setObjectName(_fromUtf8("startExcludeAPsSpin"))
self.horizontalLayout.addWidget(self.startExcludeAPsSpin)
self.label_9 = QtGui.QLabel(self.excludeAPsCheck)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.horizontalLayout.addWidget(self.label_9)
self.endExcludeAPsSpin = SpinBox(self.excludeAPsCheck)
self.endExcludeAPsSpin.setProperty("value", 0.25)
self.endExcludeAPsSpin.setObjectName(_fromUtf8("endExcludeAPsSpin"))
self.horizontalLayout.addWidget(self.endExcludeAPsSpin)
self.verticalLayout.addWidget(self.excludeAPsCheck)
self.averageAnalysisCheck = QtGui.QCheckBox(self.traceDisplayGroup)
self.averageAnalysisCheck.setMinimumSize(QtCore.QSize(0, 20))
self.averageAnalysisCheck.setChecked(True)
self.averageAnalysisCheck.setObjectName(_fromUtf8("averageAnalysisCheck"))
self.verticalLayout.addWidget(self.averageAnalysisCheck)
self.displayTracesCheck = QtGui.QCheckBox(self.traceDisplayGroup)
self.displayTracesCheck.setMinimumSize(QtCore.QSize(0, 20))
self.displayTracesCheck.setObjectName(_fromUtf8("displayTracesCheck"))
self.verticalLayout.addWidget(self.displayTracesCheck)
self.gridLayout_3.addLayout(self.verticalLayout, 3, 1, 1, 1)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_3.addItem(spacerItem1, 4, 0, 1, 3)
self.gridLayout_8 = QtGui.QGridLayout()
self.gridLayout_8.setHorizontalSpacing(0)
self.gridLayout_8.setVerticalSpacing(2)
self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8"))
self.averageTimeRadio = QtGui.QRadioButton(self.traceDisplayGroup)
self.averageTimeRadio.setChecked(True)
self.averageTimeRadio.setObjectName(_fromUtf8("averageTimeRadio"))
self.gridLayout_8.addWidget(self.averageTimeRadio, 0, 0, 1, 1)
self.averageTimeSpin = SpinBox(self.traceDisplayGroup)
self.averageTimeSpin.setObjectName(_fromUtf8("averageTimeSpin"))
self.gridLayout_8.addWidget(self.averageTimeSpin, 0, 1, 1, 1)
self.averageNumberRadio = QtGui.QRadioButton(self.traceDisplayGroup)
self.averageNumberRadio.setObjectName(_fromUtf8("averageNumberRadio"))
self.gridLayout_8.addWidget(self.averageNumberRadio, 1, 0, 1, 1)
self.averageNumberSpin = SpinBox(self.traceDisplayGroup)
self.averageNumberSpin.setDecimals(0)
self.averageNumberSpin.setMaximum(1000.0)
self.averageNumberSpin.setProperty("value", 5.0)
self.averageNumberSpin.setObjectName(_fromUtf8("averageNumberSpin"))
self.gridLayout_8.addWidget(self.averageNumberSpin, 1, 1, 1, 1)
self.gridLayout_8.setColumnStretch(1, 3)
self.gridLayout_3.addLayout(self.gridLayout_8, 1, 1, 1, 1)
self.gridLayout_7.addWidget(self.traceDisplayGroup, 0, 0, 1, 1)
self.createBlindSummaryBtn = QtGui.QPushButton(Form)
self.createBlindSummaryBtn.setObjectName(_fromUtf8("createBlindSummaryBtn"))
self.gridLayout_7.addWidget(self.createBlindSummaryBtn, 6, 0, 1, 1)
spacerItem2 = QtGui.QSpacerItem(337, 47, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_7.addItem(spacerItem2, 4, 0, 1, 1)
self.storeToDBBtn = QtGui.QPushButton(Form)
self.storeToDBBtn.setObjectName(_fromUtf8("storeToDBBtn"))
self.gridLayout_7.addWidget(self.storeToDBBtn, 3, 0, 1, 1)
self.analysisGroup = QtGui.QGroupBox(Form)
self.analysisGroup.setObjectName(_fromUtf8("analysisGroup"))
self.gridLayout_4 = QtGui.QGridLayout(self.analysisGroup)
self.gridLayout_4.setMargin(1)
self.gridLayout_4.setSpacing(1)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.baselineCheck = QtGui.QCheckBox(self.analysisGroup)
self.baselineCheck.setChecked(True)
self.baselineCheck.setObjectName(_fromUtf8("baselineCheck"))
self.gridLayout_4.addWidget(self.baselineCheck, 0, 0, 1, 2)
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setSpacing(1)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label = QtGui.QLabel(self.analysisGroup)
self.label.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.baselineStartSpin = SpinBox(self.analysisGroup)
self.baselineStartSpin.setObjectName(_fromUtf8("baselineStartSpin"))
self.gridLayout.addWidget(self.baselineStartSpin, 0, 1, 1, 1)
self.label_2 = QtGui.QLabel(self.analysisGroup)
self.label_2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.baselineEndSpin = SpinBox(self.analysisGroup)
self.baselineEndSpin.setObjectName(_fromUtf8("baselineEndSpin"))
self.gridLayout.addWidget(self.baselineEndSpin, 1, 1, 1, 1)
self.gridLayout.setColumnStretch(0, 1)
self.gridLayout.setColumnStretch(1, 2)
self.gridLayout_4.addLayout(self.gridLayout, 1, 0, 1, 3)
self.pspCheck = QtGui.QCheckBox(self.analysisGroup)
self.pspCheck.setChecked(True)
self.pspCheck.setObjectName(_fromUtf8("pspCheck"))
self.gridLayout_4.addWidget(self.pspCheck, 2, 0, 1, 2)
self.gridLayout_5 = QtGui.QGridLayout()
self.gridLayout_5.setSpacing(1)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.label_3 = QtGui.QLabel(self.analysisGroup)
self.label_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_5.addWidget(self.label_3, 0, 0, 1, 1)
self.pspStartSpin = SpinBox(self.analysisGroup)
self.pspStartSpin.setObjectName(_fromUtf8("pspStartSpin"))
self.gridLayout_5.addWidget(self.pspStartSpin, 0, 1, 1, 2)
self.label_4 = QtGui.QLabel(self.analysisGroup)
self.label_4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_5.addWidget(self.label_4, 1, 0, 1, 1)
self.pspEndSpin = SpinBox(self.analysisGroup)
self.pspEndSpin.setObjectName(_fromUtf8("pspEndSpin"))
self.gridLayout_5.addWidget(self.pspEndSpin, 1, 1, 1, 2)
self.label_7 = QtGui.QLabel(self.analysisGroup)
self.label_7.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout_5.addWidget(self.label_7, 2, 0, 1, 1)
self.measureModeCombo = ComboBox(self.analysisGroup)
self.measureModeCombo.setObjectName(_fromUtf8("measureModeCombo"))
self.gridLayout_5.addWidget(self.measureModeCombo, 2, 1, 1, 2)
self.gridLayout_4.addLayout(self.gridLayout_5, 3, 0, 1, 3)
self.healthCheck = QtGui.QCheckBox(self.analysisGroup)
self.healthCheck.setChecked(True)
self.healthCheck.setObjectName(_fromUtf8("healthCheck"))
self.gridLayout_4.addWidget(self.healthCheck, 4, 0, 1, 2)
self.gridLayout_6 = QtGui.QGridLayout()
self.gridLayout_6.setSpacing(1)
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.healthStartSpin = SpinBox(self.analysisGroup)
self.healthStartSpin.setObjectName(_fromUtf8("healthStartSpin"))
self.gridLayout_6.addWidget(self.healthStartSpin, 0, 1, 1, 1)
self.label_6 = QtGui.QLabel(self.analysisGroup)
self.label_6.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.gridLayout_6.addWidget(self.label_6, 1, 0, 1, 1)
self.healthEndSpin = SpinBox(self.analysisGroup)
self.healthEndSpin.setObjectName(_fromUtf8("healthEndSpin"))
self.gridLayout_6.addWidget(self.healthEndSpin, 1, 1, 1, 1)
self.label_5 = QtGui.QLabel(self.analysisGroup)
self.label_5.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout_6.addWidget(self.label_5, 0, 0, 1, 1)
self.gridLayout_6.setColumnStretch(0, 1)
self.gridLayout_6.setColumnStretch(1, 2)
self.gridLayout_4.addLayout(self.gridLayout_6, 5, 0, 1, 3)
spacerItem3 = QtGui.QSpacerItem(327, 9, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_4.addItem(spacerItem3, 6, 0, 1, 2)
self.label_10 = QtGui.QLabel(self.analysisGroup)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.gridLayout_4.addWidget(self.label_10, 7, 0, 1, 1)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setSpacing(1)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.plasticityAdjustChk = QtGui.QCheckBox(self.analysisGroup)
self.plasticityAdjustChk.setObjectName(_fromUtf8("plasticityAdjustChk"))
self.gridLayout_2.addWidget(self.plasticityAdjustChk, 0, 0, 1, 1)
self.defaultRgnBtn = QtGui.QPushButton(self.analysisGroup)
self.defaultRgnBtn.setObjectName(_fromUtf8("defaultRgnBtn"))
self.gridLayout_2.addWidget(self.defaultRgnBtn, 0, 1, 1, 1)
self.label_11 = QtGui.QLabel(self.analysisGroup)
self.label_11.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.gridLayout_2.addWidget(self.label_11, 1, 0, 1, 1)
self.plasticityRgnStartSpin = SpinBox(self.analysisGroup)
self.plasticityRgnStartSpin.setDecimals(1)
self.plasticityRgnStartSpin.setProperty("value", 27.0)
self.plasticityRgnStartSpin.setObjectName(_fromUtf8("plasticityRgnStartSpin"))
self.gridLayout_2.addWidget(self.plasticityRgnStartSpin, 1, 1, 1, 1)
self.label_12 = QtGui.QLabel(self.analysisGroup)
self.label_12.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_12.setObjectName(_fromUtf8("label_12"))
self.gridLayout_2.addWidget(self.label_12, 2, 0, 1, 1)
self.plasticityRgnEndSpin = SpinBox(self.analysisGroup)
self.plasticityRgnEndSpin.setDecimals(1)
self.plasticityRgnEndSpin.setProperty("value", 47.0)
self.plasticityRgnEndSpin.setObjectName(_fromUtf8("plasticityRgnEndSpin"))
self.gridLayout_2.addWidget(self.plasticityRgnEndSpin, 2, 1, 1, 1)
self.gridLayout_2.setColumnStretch(0, 1)
self.gridLayout_2.setColumnStretch(1, 2)
self.gridLayout_4.addLayout(self.gridLayout_2, 8, 0, 2, 3)
self.analyzeBtn = QtGui.QPushButton(self.analysisGroup)
self.analyzeBtn.setObjectName(_fromUtf8("analyzeBtn"))
self.gridLayout_4.addWidget(self.analyzeBtn, 10, 0, 1, 3)
self.gridLayout_7.addWidget(self.analysisGroup, 1, 0, 1, 1)
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout_9 = QtGui.QGridLayout(self.groupBox)
self.gridLayout_9.setMargin(0)
self.gridLayout_9.setSpacing(1)
self.gridLayout_9.setObjectName(_fromUtf8("gridLayout_9"))
self.label_13 = QtGui.QLabel(self.groupBox)
self.label_13.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_13.setObjectName(_fromUtf8("label_13"))
self.gridLayout_9.addWidget(self.label_13, 0, 0, 1, 1)
self.label_14 = QtGui.QLabel(self.groupBox)
self.label_14.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.gridLayout_9.addWidget(self.label_14, 1, 0, 1, 1)
self.pspStartTimeSpin = SpinBox(self.groupBox)
self.pspStartTimeSpin.setObjectName(_fromUtf8("pspStartTimeSpin"))
self.gridLayout_9.addWidget(self.pspStartTimeSpin, 0, 1, 1, 1)
self.spikePeakSpin = SpinBox(self.groupBox)
self.spikePeakSpin.setObjectName(_fromUtf8("spikePeakSpin"))
self.gridLayout_9.addWidget(self.spikePeakSpin, 1, 1, 1, 1)
self.gridLayout_7.addWidget(self.groupBox, 2, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.createSummaryBtn.setText(_translate("Form", "Create Summary Sheet", None))
self.traceDisplayGroup.setTitle(_translate("Form", "Trace Display:", None))
self.averageCheck.setText(_translate("Form", "Average traces based on:", None))
self.excludeAPsCheck.setTitle(_translate("Form", "Exclude traces with action potentials", None))
self.label_8.setText(_translate("Form", "between", None))
self.label_9.setText(_translate("Form", "and", None))
self.averageAnalysisCheck.setText(_translate("Form", "Use averaged traces for analysis", None))
self.displayTracesCheck.setText(_translate("Form", "Display original traces (slow)", None))
self.averageTimeRadio.setText(_translate("Form", "Time:", None))
self.averageNumberRadio.setText(_translate("Form", "Number:", None))
self.createBlindSummaryBtn.setText(_translate("Form", "Create Blind Summary Sheet", None))
self.storeToDBBtn.setText(_translate("Form", "Store to Database", None))
self.analysisGroup.setTitle(_translate("Form", "PSP Analysis", None))
self.baselineCheck.setText(_translate("Form", "Baseline Region (green)", None))
self.label.setText(_translate("Form", "Start:", None))
self.label_2.setText(_translate("Form", "End:", None))
self.pspCheck.setText(_translate("Form", "Synaptic Event Region (red)", None))
self.label_3.setText(_translate("Form", "Start:", None))
self.label_4.setText(_translate("Form", "End:", None))
self.label_7.setText(_translate("Form", "Measure:", None))
self.healthCheck.setText(_translate("Form", "Cell Health Region (blue)", None))
self.label_6.setText(_translate("Form", "End:", None))
self.label_5.setText(_translate("Form", "Start:", None))
self.label_10.setText(_translate("Form", "Plasticity Region:", None))
self.plasticityAdjustChk.setText(_translate("Form", "Allow adjustment", None))
self.defaultRgnBtn.setText(_translate("Form", "Default", None))
self.label_11.setText(_translate("Form", "Start:", None))
self.plasticityRgnStartSpin.setSuffix(_translate("Form", " minutes", None))
self.label_12.setText(_translate("Form", "End:", None))
self.plasticityRgnEndSpin.setSuffix(_translate("Form", " minutes", None))
self.analyzeBtn.setText(_translate("Form", "Analyze!", None))
self.groupBox.setTitle(_translate("Form", "Conditioning Analysis", None))
self.label_13.setText(_translate("Form", "PSP Start time:", None))
self.label_14.setText(_translate("Form", "First Spike peak:", None))
from acq4.pyqtgraph.widgets.ComboBox import ComboBox
from acq4.pyqtgraph.widgets.SpinBox import SpinBox
| {
"content_hash": "626f9068705f74972fd0e056bd990b3d",
"timestamp": "",
"source": "github",
"line_count": 291,
"max_line_length": 104,
"avg_line_length": 62.23024054982818,
"alnum_prop": 0.7027444916892153,
"repo_name": "mgraupe/acq4",
"id": "90ef15c35d8c28aa2671840f4dfad075d77ab79a",
"size": "18352",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "acq4/analysis/modules/STDPAnalyzer/STDPControlTemplate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "3037"
},
{
"name": "Batchfile",
"bytes": "247"
},
{
"name": "C",
"bytes": "757367"
},
{
"name": "C++",
"bytes": "1222891"
},
{
"name": "CSS",
"bytes": "716"
},
{
"name": "Inno Setup",
"bytes": "1606"
},
{
"name": "MATLAB",
"bytes": "1752"
},
{
"name": "Makefile",
"bytes": "30"
},
{
"name": "Processing",
"bytes": "13403"
},
{
"name": "Python",
"bytes": "6110588"
},
{
"name": "Shell",
"bytes": "70"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TicketType.slug'
db.add_column(u'oscar_support_tickettype', 'slug',
self.gf('django_extensions.db.fields.AutoSlugField')(allow_duplicates=False, max_length=50, separator=u'-', blank=True, default='', populate_from='name', overwrite=False),
keep_default=False)
# Adding field 'TicketStatus.slug'
db.add_column(u'oscar_support_ticketstatus', 'slug',
self.gf('django_extensions.db.fields.AutoSlugField')(allow_duplicates=False, max_length=50, separator=u'-', blank=True, default='', populate_from='name', overwrite=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'TicketType.slug'
db.delete_column(u'oscar_support_tickettype', 'slug')
# Deleting field 'TicketStatus.slug'
db.delete_column(u'oscar_support_ticketstatus', 'slug')
models = {
u'address.country': {
'Meta': {'ordering': "('-display_order', 'name')", 'object_name': 'Country'},
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': u"orm['catalogue.AttributeEntityType']"})
},
u'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
u'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': u"orm['catalogue.AttributeOptionGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
u'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
u'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.ProductAttribute']", 'through': u"orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Category']", 'through': u"orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': u"orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Product']", 'symmetrical': 'False', 'through': u"orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': u"orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
u'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.ProductAttribute']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': u"orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Category']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
},
u'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': u"orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': u"orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'stockrecord': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['partner.StockRecord']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
u'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'GBP'", 'max_length': '12'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}),
'shipping_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': u"orm['{0}']".format(AUTH_USER_MODEL)})
},
u'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('oscar.models.fields.PhoneNumberField', [], {'max_length': '128', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'oscar_support.attachment': {
'Meta': {'object_name': 'Attachment'},
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': u"orm['oscar_support.Ticket']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'oscar_support.message': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Message'},
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'text': ('django.db.models.fields.TextField', [], {}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'to': u"orm['oscar_support.Ticket']"}),
'type': ('django.db.models.fields.CharField', [], {'default': "u'public'", 'max_length': '30'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'oscar_support.priority': {
'Meta': {'object_name': 'Priority'},
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'oscar_support.relatedorder': {
'Meta': {'object_name': 'RelatedOrder'},
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ticket_related_orders'", 'to': u"orm['order.Order']"}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relatedorders'", 'to': u"orm['oscar_support.Ticket']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relatedorders'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'oscar_support.relatedorderline': {
'Meta': {'object_name': 'RelatedOrderLine'},
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ticket_related_order_lines'", 'to': u"orm['order.Line']"}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relatedorderlines'", 'to': u"orm['oscar_support.Ticket']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relatedorderlines'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'oscar_support.relatedproduct': {
'Meta': {'object_name': 'RelatedProduct'},
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ticket_related_products'", 'to': u"orm['catalogue.Product']"}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relatedproducts'", 'to': u"orm['oscar_support.Ticket']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relatedproducts'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'oscar_support.ticket': {
'Meta': {'ordering': "['-date_updated']", 'unique_together': "(('number', 'subticket_id'),)", 'object_name': 'Ticket'},
'assigned_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tickets'", 'null': 'True', 'to': u"orm['auth.Group']"}),
'assignee': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'assigned_tickets'", 'null': 'True', 'to': u"orm['{0}']".format(AUTH_USER_MODEL)}),
'body': ('django.db.models.fields.TextField', [], {}),
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'is_internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subtickets'", 'null': 'True', 'to': u"orm['oscar_support.Ticket']"}),
'priority': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tickets'", 'null': 'True', 'to': u"orm['oscar_support.Priority']"}),
'related_lines': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'tickets'", 'blank': 'True', 'through': u"orm['oscar_support.RelatedOrderLine']", 'to': u"orm['order.Line']"}),
'related_orders': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'tickets'", 'blank': 'True', 'through': u"orm['oscar_support.RelatedOrder']", 'to': u"orm['order.Order']"}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'tickets'", 'blank': 'True', 'through': u"orm['oscar_support.RelatedProduct']", 'to': u"orm['catalogue.Product']"}),
'requester': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'submitted_tickets'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tickets'", 'to': u"orm['oscar_support.TicketStatus']"}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'subticket_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tickets'", 'to': u"orm['oscar_support.TicketType']"}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'oscar_support.ticketstatus': {
'Meta': {'object_name': 'TicketStatus'},
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'oscar_support.tickettype': {
'Meta': {'object_name': 'TicketType'},
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
'uuid': ('shortuuidfield.fields.ShortUUIDField', [], {'max_length': '22', 'primary_key': 'True'})
},
u'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['{0}']".format(AUTH_USER_MODEL)})
},
u'partner.stockrecord': {
'Meta': {'unique_together': "(('partner', 'partner_sku'),)", 'object_name': 'StockRecord'},
'cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'low_stock_threshold': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_allocated': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_in_stock': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stockrecords'", 'to': u"orm['partner.Partner']"}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'price_currency': ('django.db.models.fields.CharField', [], {'default': "'GBP'", 'max_length': '12'}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'price_retail': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stockrecords'", 'to': u"orm['catalogue.Product']"})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['oscar_support'] | {
"content_hash": "e04db2198976b4e2025af43b719269e0",
"timestamp": "",
"source": "github",
"line_count": 386,
"max_line_length": 246,
"avg_line_length": 92.18911917098445,
"alnum_prop": 0.5618940564844738,
"repo_name": "snowball-one/django-oscar-support",
"id": "655e318e3d2dbace3fb0fabfbc24b8e855b62bbe",
"size": "35609",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oscar_support/migrations/0003_auto__add_field_tickettype_slug__add_field_ticketstatus_slug.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "21496"
},
{
"name": "JavaScript",
"bytes": "1846"
},
{
"name": "Python",
"bytes": "253792"
},
{
"name": "Shell",
"bytes": "5133"
}
],
"symlink_target": ""
} |
"""Read from and write to tar format archives.
"""
__version__ = "$Revision: 70525 $"
version = "0.9.0"
__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
__date__ = "$Date: 2009-03-22 15:34:29 -0500 (Sun, 22 Mar 2009) $"
__cvsid__ = "$Id: tarfile.py 70525 2009-03-22 20:34:29Z lars.gustaebel $"
__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
#---------
# Imports
#---------
import sys
import os
import shutil
import stat
import errno
import time
import struct
import copy
import re
if sys.platform == 'mac':
# This module needs work for MacOS9, especially in the area of pathname
# handling. In many places it is assumed a simple substitution of / by the
# local os.path.sep is good enough to convert pathnames, but this does not
# work with the mac rooted:path:name versus :nonrooted:path:name syntax
raise ImportError("tarfile does not work for platform==mac")
try:
import grp, pwd
except ImportError:
grp = pwd = None
# from tarfile import *
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
from builtins import open as _open # Since 'open' is TarFile.open
#---------------------------------------------------------
# tar constants
#---------------------------------------------------------
NUL = b"\0" # the null character
BLOCKSIZE = 512 # length of processing blocks
RECORDSIZE = BLOCKSIZE * 20 # length of records
GNU_MAGIC = b"ustar \0" # magic gnu tar string
POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
LENGTH_NAME = 100 # maximum length of a filename
LENGTH_LINK = 100 # maximum length of a linkname
LENGTH_PREFIX = 155 # maximum length of the prefix field
REGTYPE = b"0" # regular file
AREGTYPE = b"\0" # regular file
LNKTYPE = b"1" # link (inside tarfile)
SYMTYPE = b"2" # symbolic link
CHRTYPE = b"3" # character special device
BLKTYPE = b"4" # block special device
DIRTYPE = b"5" # directory
FIFOTYPE = b"6" # fifo special device
CONTTYPE = b"7" # contiguous file
GNUTYPE_LONGNAME = b"L" # GNU tar longname
GNUTYPE_LONGLINK = b"K" # GNU tar longlink
GNUTYPE_SPARSE = b"S" # GNU tar sparse file
XHDTYPE = b"x" # POSIX.1-2001 extended header
XGLTYPE = b"g" # POSIX.1-2001 global header
SOLARIS_XHDTYPE = b"X" # Solaris extended header
USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
GNU_FORMAT = 1 # GNU tar format
PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
DEFAULT_FORMAT = GNU_FORMAT
#---------------------------------------------------------
# tarfile constants
#---------------------------------------------------------
# File types that tarfile supports:
SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
SYMTYPE, DIRTYPE, FIFOTYPE,
CONTTYPE, CHRTYPE, BLKTYPE,
GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# File types that will be treated as a regular file.
REGULAR_TYPES = (REGTYPE, AREGTYPE,
CONTTYPE, GNUTYPE_SPARSE)
# File types that are part of the GNU tar format.
GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# Fields from a pax header that override a TarInfo attribute.
PAX_FIELDS = ("path", "linkpath", "size", "mtime",
"uid", "gid", "uname", "gname")
# Fields in a pax header that are numbers, all other fields
# are treated as strings.
PAX_NUMBER_FIELDS = {
"atime": float,
"ctime": float,
"mtime": float,
"uid": int,
"gid": int,
"size": int
}
#---------------------------------------------------------
# Bits used in the mode field, values in octal.
#---------------------------------------------------------
S_IFLNK = 0o120000 # symbolic link
S_IFREG = 0o100000 # regular file
S_IFBLK = 0o060000 # block device
S_IFDIR = 0o040000 # directory
S_IFCHR = 0o020000 # character device
S_IFIFO = 0o010000 # fifo
TSUID = 0o4000 # set UID on execution
TSGID = 0o2000 # set GID on execution
TSVTX = 0o1000 # reserved
TUREAD = 0o400 # read by owner
TUWRITE = 0o200 # write by owner
TUEXEC = 0o100 # execute/search by owner
TGREAD = 0o040 # read by group
TGWRITE = 0o020 # write by group
TGEXEC = 0o010 # execute/search by group
TOREAD = 0o004 # read by other
TOWRITE = 0o002 # write by other
TOEXEC = 0o001 # execute/search by other
#---------------------------------------------------------
# initialization
#---------------------------------------------------------
ENCODING = sys.getfilesystemencoding()
if ENCODING is None:
ENCODING = "ascii"
#---------------------------------------------------------
# Some useful functions
#---------------------------------------------------------
def stn(s, length, encoding, errors):
"""Convert a string to a null-terminated bytes object.
"""
s = s.encode(encoding, errors)
return s[:length] + (length - len(s)) * NUL
def nts(s, encoding, errors):
"""Convert a null-terminated bytes object to a string.
"""
p = s.find(b"\0")
if p != -1:
s = s[:p]
return s.decode(encoding, errors)
def nti(s):
"""Convert a number field to a python number.
"""
# There are two possible encodings for a number field, see
# itn() below.
if s[0] != chr(0o200):
try:
n = int(nts(s, "ascii", "strict") or "0", 8)
except ValueError:
raise HeaderError("invalid header")
else:
n = 0
for i in range(len(s) - 1):
n <<= 8
n += ord(s[i + 1])
return n
def itn(n, digits=8, format=DEFAULT_FORMAT):
"""Convert a python number to a number field.
"""
# POSIX 1003.1-1988 requires numbers to be encoded as a string of
# octal digits followed by a null-byte, this allows values up to
# (8**(digits-1))-1. GNU tar allows storing numbers greater than
# that if necessary. A leading 0o200 byte indicates this particular
# encoding, the following digits-1 bytes are a big-endian
# representation. This allows values up to (256**(digits-1))-1.
if 0 <= n < 8 ** (digits - 1):
s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
else:
if format != GNU_FORMAT or n >= 256 ** (digits - 1):
raise ValueError("overflow in number field")
if n < 0:
# XXX We mimic GNU tar's behaviour with negative numbers,
# this could raise OverflowError.
n = struct.unpack("L", struct.pack("l", n))[0]
s = bytearray()
for i in range(digits - 1):
s.insert(0, n & 0o377)
n >>= 8
s.insert(0, 0o200)
return s
def calc_chksums(buf):
"""Calculate the checksum for a member's header by summing up all
characters except for the chksum field which is treated as if
it was filled with spaces. According to the GNU tar sources,
some tars (Sun and NeXT) calculate chksum with signed char,
which will be different if there are chars in the buffer with
the high bit set. So we calculate two checksums, unsigned and
signed.
"""
unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
return unsigned_chksum, signed_chksum
def copyfileobj(src, dst, length=None):
"""Copy length bytes from fileobj src to fileobj dst.
If length is None, copy the entire content.
"""
if length == 0:
return
if length is None:
shutil.copyfileobj(src, dst)
return
BUFSIZE = 16 * 1024
blocks, remainder = divmod(length, BUFSIZE)
for b in range(blocks):
buf = src.read(BUFSIZE)
if len(buf) < BUFSIZE:
raise IOError("end of file reached")
dst.write(buf)
if remainder != 0:
buf = src.read(remainder)
if len(buf) < remainder:
raise IOError("end of file reached")
dst.write(buf)
return
filemode_table = (
((S_IFLNK, "l"),
(S_IFREG, "-"),
(S_IFBLK, "b"),
(S_IFDIR, "d"),
(S_IFCHR, "c"),
(S_IFIFO, "p")),
((TUREAD, "r"),),
((TUWRITE, "w"),),
((TUEXEC|TSUID, "s"),
(TSUID, "S"),
(TUEXEC, "x")),
((TGREAD, "r"),),
((TGWRITE, "w"),),
((TGEXEC|TSGID, "s"),
(TSGID, "S"),
(TGEXEC, "x")),
((TOREAD, "r"),),
((TOWRITE, "w"),),
((TOEXEC|TSVTX, "t"),
(TSVTX, "T"),
(TOEXEC, "x"))
)
def filemode(mode):
"""Convert a file's mode to a string of the form
-rwxrwxrwx.
Used by TarFile.list()
"""
perm = []
for table in filemode_table:
for bit, char in table:
if mode & bit == bit:
perm.append(char)
break
else:
perm.append("-")
return "".join(perm)
if os.sep != "/":
normpath = lambda path: os.path.normpath(path).replace(os.sep, "/")
else:
normpath = os.path.normpath
class TarError(Exception):
"""Base exception."""
pass
class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
"""Exception for unreadble tar archives."""
pass
class CompressionError(TarError):
"""Exception for unavailable compression methods."""
pass
class StreamError(TarError):
"""Exception for unsupported operations on stream-like TarFiles."""
pass
class HeaderError(TarError):
"""Exception for invalid headers."""
pass
#---------------------------
# internal stream interface
#---------------------------
class _LowLevelFile:
"""Low-level file object. Supports reading and writing.
It is used instead of a regular file object for streaming
access.
"""
def __init__(self, name, mode):
mode = {
"r": os.O_RDONLY,
"w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
}[mode]
if hasattr(os, "O_BINARY"):
mode |= os.O_BINARY
self.fd = os.open(name, mode)
def close(self):
os.close(self.fd)
def read(self, size):
return os.read(self.fd, size)
def write(self, s):
os.write(self.fd, s)
class _Stream:
"""Class that serves as an adapter between TarFile and
a stream-like object. The stream-like object only
needs to have a read() or write() method and is accessed
blockwise. Use of gzip or bzip2 compression is possible.
A stream-like object could be for example: sys.stdin,
sys.stdout, a socket, a tape device etc.
_Stream is intended to be used only internally.
"""
def __init__(self, name, mode, comptype, fileobj, bufsize):
"""Construct a _Stream object.
"""
self._extfileobj = True
if fileobj is None:
fileobj = _LowLevelFile(name, mode)
self._extfileobj = False
if comptype == '*':
# Enable transparent compression detection for the
# stream interface
fileobj = _StreamProxy(fileobj)
comptype = fileobj.getcomptype()
self.name = name or ""
self.mode = mode
self.comptype = comptype
self.fileobj = fileobj
self.bufsize = bufsize
self.buf = b""
self.pos = 0
self.closed = False
if comptype == "gz":
try:
import zlib
except ImportError:
raise CompressionError("zlib module is not available")
self.zlib = zlib
self.crc = zlib.crc32("")
if mode == "r":
self._init_read_gz()
else:
self._init_write_gz()
if comptype == "bz2":
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if mode == "r":
self.dbuf = b""
self.cmp = bz2.BZ2Decompressor()
else:
self.cmp = bz2.BZ2Compressor()
def __del__(self):
if hasattr(self, "closed") and not self.closed:
self.close()
def _init_write_gz(self):
"""Initialize for writing with gzip compression.
"""
self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
-self.zlib.MAX_WBITS,
self.zlib.DEF_MEM_LEVEL,
0)
timestamp = struct.pack("<L", int(time.time()))
self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
if self.name.endswith(".gz"):
self.name = self.name[:-3]
# RFC1952 says we must use ISO-8859-1 for the FNAME field.
self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
def write(self, s):
"""Write string s to the stream.
"""
if self.comptype == "gz":
self.crc = self.zlib.crc32(s, self.crc)
self.pos += len(s)
if self.comptype != "tar":
s = self.cmp.compress(s)
self.__write(s)
def __write(self, s):
"""Write string s to the stream if a whole new block
is ready to be written.
"""
self.buf += s
while len(self.buf) > self.bufsize:
self.fileobj.write(self.buf[:self.bufsize])
self.buf = self.buf[self.bufsize:]
def close(self):
"""Close the _Stream object. No operation should be
done on it afterwards.
"""
if self.closed:
return
if self.mode == "w" and self.comptype != "tar":
self.buf += self.cmp.flush()
if self.mode == "w" and self.buf:
self.fileobj.write(self.buf)
self.buf = b""
if self.comptype == "gz":
# The native zlib crc is an unsigned 32-bit integer, but
# the Python wrapper implicitly casts that to a signed C
# long. So, on a 32-bit box self.crc may "look negative",
# while the same crc on a 64-bit box may "look positive".
# To avoid irksome warnings from the `struct` module, force
# it to look positive on all boxes.
self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff))
self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def _init_read_gz(self):
"""Initialize for reading a gzip compressed fileobj.
"""
self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
self.dbuf = b""
# taken from gzip.GzipFile with some alterations
if self.__read(2) != b"\037\213":
raise ReadError("not a gzip file")
if self.__read(1) != b"\010":
raise CompressionError("unsupported compression method")
flag = ord(self.__read(1))
self.__read(6)
if flag & 4:
xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
self.read(xlen)
if flag & 8:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 16:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 2:
self.__read(2)
def tell(self):
"""Return the stream's file pointer position.
"""
return self.pos
def seek(self, pos=0):
"""Set the stream's file pointer to pos. Negative seeking
is forbidden.
"""
if pos - self.pos >= 0:
blocks, remainder = divmod(pos - self.pos, self.bufsize)
for i in range(blocks):
self.read(self.bufsize)
self.read(remainder)
else:
raise StreamError("seeking backwards is not allowed")
return self.pos
def read(self, size=None):
"""Return the next size number of bytes from the stream.
If size is not defined, return all bytes of the stream
up to EOF.
"""
if size is None:
t = []
while True:
buf = self._read(self.bufsize)
if not buf:
break
t.append(buf)
buf = "".join(t)
else:
buf = self._read(size)
self.pos += len(buf)
return buf
def _read(self, size):
"""Return size bytes from the stream.
"""
if self.comptype == "tar":
return self.__read(size)
c = len(self.dbuf)
while c < size:
buf = self.__read(self.bufsize)
if not buf:
break
try:
buf = self.cmp.decompress(buf)
except IOError:
raise ReadError("invalid compressed data")
self.dbuf += buf
c += len(buf)
buf = self.dbuf[:size]
self.dbuf = self.dbuf[size:]
return buf
def __read(self, size):
"""Return size bytes from stream. If internal buffer is empty,
read another block from the stream.
"""
c = len(self.buf)
while c < size:
buf = self.fileobj.read(self.bufsize)
if not buf:
break
self.buf += buf
c += len(buf)
buf = self.buf[:size]
self.buf = self.buf[size:]
return buf
# class _Stream
class _StreamProxy(object):
"""Small proxy class that enables transparent compression
detection for the Stream interface (mode 'r|*').
"""
def __init__(self, fileobj):
self.fileobj = fileobj
self.buf = self.fileobj.read(BLOCKSIZE)
def read(self, size):
self.read = self.fileobj.read
return self.buf
def getcomptype(self):
if self.buf.startswith(b"\037\213\010"):
return "gz"
if self.buf.startswith(b"BZh91"):
return "bz2"
return "tar"
def close(self):
self.fileobj.close()
# class StreamProxy
class _BZ2Proxy(object):
"""Small proxy class that enables external file object
support for "r:bz2" and "w:bz2" modes. This is actually
a workaround for a limitation in bz2 module's BZ2File
class which (unlike gzip.GzipFile) has no support for
a file object argument.
"""
blocksize = 16 * 1024
def __init__(self, fileobj, mode):
self.fileobj = fileobj
self.mode = mode
self.name = getattr(self.fileobj, "name", None)
self.init()
def init(self):
import bz2
self.pos = 0
if self.mode == "r":
self.bz2obj = bz2.BZ2Decompressor()
self.fileobj.seek(0)
self.buf = b""
else:
self.bz2obj = bz2.BZ2Compressor()
def read(self, size):
x = len(self.buf)
while x < size:
raw = self.fileobj.read(self.blocksize)
if not raw:
break
data = self.bz2obj.decompress(raw)
self.buf += data
x += len(data)
buf = self.buf[:size]
self.buf = self.buf[size:]
self.pos += len(buf)
return buf
def seek(self, pos):
if pos < self.pos:
self.init()
self.read(pos - self.pos)
def tell(self):
return self.pos
def write(self, data):
self.pos += len(data)
raw = self.bz2obj.compress(data)
self.fileobj.write(raw)
def close(self):
if self.mode == "w":
raw = self.bz2obj.flush()
self.fileobj.write(raw)
# class _BZ2Proxy
#------------------------
# Extraction file object
#------------------------
class _FileInFile(object):
"""A thin wrapper around an existing file object that
provides a part of its data as an individual file
object.
"""
def __init__(self, fileobj, offset, size, sparse=None):
self.fileobj = fileobj
self.offset = offset
self.size = size
self.sparse = sparse
self.position = 0
def seekable(self):
if not hasattr(self.fileobj, "seekable"):
# XXX gzip.GzipFile and bz2.BZ2File
return True
return self.fileobj.seekable()
def tell(self):
"""Return the current file position.
"""
return self.position
def seek(self, position):
"""Seek to a position in the file.
"""
self.position = position
def read(self, size=None):
"""Read data from the file.
"""
if size is None:
size = self.size - self.position
else:
size = min(size, self.size - self.position)
if self.sparse is None:
return self.readnormal(size)
else:
return self.readsparse(size)
def readnormal(self, size):
"""Read operation for regular files.
"""
self.fileobj.seek(self.offset + self.position)
self.position += size
return self.fileobj.read(size)
def readsparse(self, size):
"""Read operation for sparse files.
"""
data = b""
while size > 0:
buf = self.readsparsesection(size)
if not buf:
break
size -= len(buf)
data += buf
return data
def readsparsesection(self, size):
"""Read a single section of a sparse file.
"""
section = self.sparse.find(self.position)
if section is None:
return b""
size = min(size, section.offset + section.size - self.position)
if isinstance(section, _data):
realpos = section.realpos + self.position - section.offset
self.fileobj.seek(self.offset + realpos)
self.position += size
return self.fileobj.read(size)
else:
self.position += size
return NUL * size
#class _FileInFile
class ExFileObject(object):
"""File-like object for reading an archive member.
Is returned by TarFile.extractfile().
"""
blocksize = 1024
def __init__(self, tarfile, tarinfo):
self.fileobj = _FileInFile(tarfile.fileobj,
tarinfo.offset_data,
tarinfo.size,
tarinfo.sparse)
self.name = tarinfo.name
self.mode = "r"
self.closed = False
self.size = tarinfo.size
self.position = 0
self.buffer = b""
def readable(self):
return True
def writable(self):
return False
def seekable(self):
return self.fileobj.seekable()
def read(self, size=None):
"""Read at most size bytes from the file. If size is not
present or None, read all data until EOF is reached.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
buf = b""
if self.buffer:
if size is None:
buf = self.buffer
self.buffer = b""
else:
buf = self.buffer[:size]
self.buffer = self.buffer[size:]
if size is None:
buf += self.fileobj.read()
else:
buf += self.fileobj.read(size - len(buf))
self.position += len(buf)
return buf
# XXX TextIOWrapper uses the read1() method.
read1 = read
def readline(self, size=-1):
"""Read one entire line from the file. If size is present
and non-negative, return a string with at most that
size, which may be an incomplete line.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
pos = self.buffer.find(b"\n") + 1
if pos == 0:
# no newline found.
while True:
buf = self.fileobj.read(self.blocksize)
self.buffer += buf
if not buf or b"\n" in buf:
pos = self.buffer.find(b"\n") + 1
if pos == 0:
# no newline found.
pos = len(self.buffer)
break
if size != -1:
pos = min(size, pos)
buf = self.buffer[:pos]
self.buffer = self.buffer[pos:]
self.position += len(buf)
return buf
def readlines(self):
"""Return a list with all remaining lines.
"""
result = []
while True:
line = self.readline()
if not line: break
result.append(line)
return result
def tell(self):
"""Return the current file position.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
return self.position
def seek(self, pos, whence=os.SEEK_SET):
"""Seek to a position in the file.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
if whence == os.SEEK_SET:
self.position = min(max(pos, 0), self.size)
elif whence == os.SEEK_CUR:
if pos < 0:
self.position = max(self.position + pos, 0)
else:
self.position = min(self.position + pos, self.size)
elif whence == os.SEEK_END:
self.position = max(min(self.size + pos, self.size), 0)
else:
raise ValueError("Invalid argument")
self.buffer = b""
self.fileobj.seek(self.position)
def close(self):
"""Close the file object.
"""
self.closed = True
def __iter__(self):
"""Get an iterator over the file's lines.
"""
while True:
line = self.readline()
if not line:
break
yield line
#class ExFileObject
#------------------
# Exported Classes
#------------------
class TarInfo(object):
"""Informational class which holds the details about an
archive member given by a tar header block.
TarInfo objects are returned by TarFile.getmember(),
TarFile.getmembers() and TarFile.gettarinfo() and are
usually created internally.
"""
__slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
"chksum", "type", "linkname", "uname", "gname",
"devmajor", "devminor",
"offset", "offset_data", "pax_headers", "sparse",
"tarfile", "_sparse_structs", "_link_target")
def __init__(self, name=""):
"""Construct a TarInfo object. name is the optional name
of the member.
"""
self.name = name # member name
self.mode = 0o644 # file permissions
self.uid = 0 # user id
self.gid = 0 # group id
self.size = 0 # file size
self.mtime = 0 # modification time
self.chksum = 0 # header checksum
self.type = REGTYPE # member type
self.linkname = "" # link name
self.uname = "root" # user name
self.gname = "root" # group name
self.devmajor = 0 # device major number
self.devminor = 0 # device minor number
self.offset = 0 # the tar header starts here
self.offset_data = 0 # the file's data starts here
self.sparse = None # sparse member information
self.pax_headers = {} # pax header information
# In pax headers the "name" and "linkname" field are called
# "path" and "linkpath".
def _getpath(self):
return self.name
def _setpath(self, name):
self.name = name
path = property(_getpath, _setpath)
def _getlinkpath(self):
return self.linkname
def _setlinkpath(self, linkname):
self.linkname = linkname
linkpath = property(_getlinkpath, _setlinkpath)
def __repr__(self):
return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
def get_info(self):
"""Return the TarInfo's attributes as a dictionary.
"""
info = {
"name": normpath(self.name),
"mode": self.mode & 0o7777,
"uid": self.uid,
"gid": self.gid,
"size": self.size,
"mtime": self.mtime,
"chksum": self.chksum,
"type": self.type,
"linkname": normpath(self.linkname) if self.linkname else "",
"uname": self.uname,
"gname": self.gname,
"devmajor": self.devmajor,
"devminor": self.devminor
}
if info["type"] == DIRTYPE and not info["name"].endswith("/"):
info["name"] += "/"
return info
def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="strict"):
"""Return a tar header as a string of 512 byte blocks.
"""
info = self.get_info()
if format == USTAR_FORMAT:
return self.create_ustar_header(info, encoding, errors)
elif format == GNU_FORMAT:
return self.create_gnu_header(info, encoding, errors)
elif format == PAX_FORMAT:
return self.create_pax_header(info)
else:
raise ValueError("invalid format")
def create_ustar_header(self, info, encoding, errors):
"""Return the object as a ustar header block.
"""
info["magic"] = POSIX_MAGIC
if len(info["linkname"]) > LENGTH_LINK:
raise ValueError("linkname is too long")
if len(info["name"]) > LENGTH_NAME:
info["prefix"], info["name"] = self._posix_split_name(info["name"])
return self._create_header(info, USTAR_FORMAT, encoding, errors)
def create_gnu_header(self, info, encoding, errors):
"""Return the object as a GNU header block sequence.
"""
info["magic"] = GNU_MAGIC
buf = b""
if len(info["linkname"]) > LENGTH_LINK:
buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
if len(info["name"]) > LENGTH_NAME:
buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
def create_pax_header(self, info):
"""Return the object as a ustar header block. If it cannot be
represented this way, prepend a pax extended header sequence
with supplement information.
"""
info["magic"] = POSIX_MAGIC
pax_headers = self.pax_headers.copy()
# Test string fields for values that exceed the field length or cannot
# be represented in ASCII encoding.
for name, hname, length in (
("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
("uname", "uname", 32), ("gname", "gname", 32)):
if hname in pax_headers:
# The pax header has priority.
continue
# Try to encode the string as ASCII.
try:
info[name].encode("ascii", "strict")
except UnicodeEncodeError:
pax_headers[hname] = info[name]
continue
if len(info[name]) > length:
pax_headers[hname] = info[name]
# Test number fields for values that exceed the field limit or values
# that like to be stored as float.
for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
if name in pax_headers:
# The pax header has priority. Avoid overflow.
info[name] = 0
continue
val = info[name]
if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
pax_headers[name] = str(val)
info[name] = 0
# Create a pax extended header if necessary.
if pax_headers:
buf = self._create_pax_generic_header(pax_headers, XHDTYPE)
else:
buf = b""
return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
@classmethod
def create_pax_global_header(cls, pax_headers):
"""Return the object as a pax global header block sequence.
"""
return cls._create_pax_generic_header(pax_headers, XGLTYPE)
def _posix_split_name(self, name):
"""Split a name longer than 100 chars into a prefix
and a name part.
"""
prefix = name[:LENGTH_PREFIX + 1]
while prefix and prefix[-1] != "/":
prefix = prefix[:-1]
name = name[len(prefix):]
prefix = prefix[:-1]
if not prefix or len(name) > LENGTH_NAME:
raise ValueError("name is too long")
return prefix, name
@staticmethod
def _create_header(info, format, encoding, errors):
"""Return a header block. info is a dictionary with file
information, format must be one of the *_FORMAT constants.
"""
parts = [
stn(info.get("name", ""), 100, encoding, errors),
itn(info.get("mode", 0) & 0o7777, 8, format),
itn(info.get("uid", 0), 8, format),
itn(info.get("gid", 0), 8, format),
itn(info.get("size", 0), 12, format),
itn(info.get("mtime", 0), 12, format),
b" ", # checksum field
info.get("type", REGTYPE),
stn(info.get("linkname", ""), 100, encoding, errors),
info.get("magic", POSIX_MAGIC),
stn(info.get("uname", "root"), 32, encoding, errors),
stn(info.get("gname", "root"), 32, encoding, errors),
itn(info.get("devmajor", 0), 8, format),
itn(info.get("devminor", 0), 8, format),
stn(info.get("prefix", ""), 155, encoding, errors)
]
buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
return buf
@staticmethod
def _create_payload(payload):
"""Return the string payload filled with zero bytes
up to the next 512 byte border.
"""
blocks, remainder = divmod(len(payload), BLOCKSIZE)
if remainder > 0:
payload += (BLOCKSIZE - remainder) * NUL
return payload
@classmethod
def _create_gnu_long_header(cls, name, type, encoding, errors):
"""Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
for name.
"""
name = name.encode(encoding, errors) + NUL
info = {}
info["name"] = "././@LongLink"
info["type"] = type
info["size"] = len(name)
info["magic"] = GNU_MAGIC
# create extended header + name blocks.
return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
cls._create_payload(name)
@classmethod
def _create_pax_generic_header(cls, pax_headers, type):
"""Return a POSIX.1-2001 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be strings.
"""
records = b""
for keyword, value in pax_headers.items():
keyword = keyword.encode("utf8")
value = value.encode("utf8")
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
while True:
n = l + len(str(p))
if n == p:
break
p = n
records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
# We use a hardcoded "././@PaxHeader" name like star does
# instead of the one that POSIX recommends.
info = {}
info["name"] = "././@PaxHeader"
info["type"] = type
info["size"] = len(records)
info["magic"] = POSIX_MAGIC
# Create pax header + record blocks.
return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
cls._create_payload(records)
@classmethod
def frombuf(cls, buf, encoding, errors):
"""Construct a TarInfo object from a 512 byte bytes object.
"""
if len(buf) != BLOCKSIZE:
raise HeaderError("truncated header")
if buf.count(NUL) == BLOCKSIZE:
raise HeaderError("empty header")
chksum = nti(buf[148:156])
if chksum not in calc_chksums(buf):
raise HeaderError("bad checksum")
obj = cls()
obj.name = nts(buf[0:100], encoding, errors)
obj.mode = nti(buf[100:108])
obj.uid = nti(buf[108:116])
obj.gid = nti(buf[116:124])
obj.size = nti(buf[124:136])
obj.mtime = nti(buf[136:148])
obj.chksum = chksum
obj.type = buf[156:157]
obj.linkname = nts(buf[157:257], encoding, errors)
obj.uname = nts(buf[265:297], encoding, errors)
obj.gname = nts(buf[297:329], encoding, errors)
obj.devmajor = nti(buf[329:337])
obj.devminor = nti(buf[337:345])
prefix = nts(buf[345:500], encoding, errors)
# Old V7 tar format represents a directory as a regular
# file with a trailing slash.
if obj.type == AREGTYPE and obj.name.endswith("/"):
obj.type = DIRTYPE
# The old GNU sparse format occupies some of the unused
# space in the buffer for up to 4 sparse structures.
# Save the them for later processing in _proc_sparse().
if obj.type == GNUTYPE_SPARSE:
pos = 386
structs = []
for i in range(4):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
structs.append((offset, numbytes))
pos += 24
isextended = bool(buf[482])
origsize = nti(buf[483:495])
obj._sparse_structs = (structs, isextended, origsize)
# Remove redundant slashes from directories.
if obj.isdir():
obj.name = obj.name.rstrip("/")
# Reconstruct a ustar longname.
if prefix and obj.type not in GNU_TYPES:
obj.name = prefix + "/" + obj.name
return obj
@classmethod
def fromtarfile(cls, tarfile):
"""Return the next TarInfo object from TarFile object
tarfile.
"""
buf = tarfile.fileobj.read(BLOCKSIZE)
if not buf:
return
obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
return obj._proc_member(tarfile)
#--------------------------------------------------------------------------
# The following are methods that are called depending on the type of a
# member. The entry point is _proc_member() which can be overridden in a
# subclass to add custom _proc_*() methods. A _proc_*() method MUST
# implement the following
# operations:
# 1. Set self.offset_data to the position where the data blocks begin,
# if there is data that follows.
# 2. Set tarfile.offset to the position where the next member's header will
# begin.
# 3. Return self or another valid TarInfo object.
def _proc_member(self, tarfile):
"""Choose the right processing method depending on
the type and call it.
"""
if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
return self._proc_gnulong(tarfile)
elif self.type == GNUTYPE_SPARSE:
return self._proc_sparse(tarfile)
elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
return self._proc_pax(tarfile)
else:
return self._proc_builtin(tarfile)
def _proc_builtin(self, tarfile):
"""Process a builtin type or an unknown type which
will be treated as a regular file.
"""
self.offset_data = tarfile.fileobj.tell()
offset = self.offset_data
if self.isreg() or self.type not in SUPPORTED_TYPES:
# Skip the following data blocks.
offset += self._block(self.size)
tarfile.offset = offset
# Patch the TarInfo object with saved global
# header information.
self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
return self
def _proc_gnulong(self, tarfile):
"""Process the blocks that hold a GNU longname
or longlink member.
"""
buf = tarfile.fileobj.read(self._block(self.size))
# Fetch the next header and process it.
next = self.fromtarfile(tarfile)
if next is None:
raise HeaderError("missing subsequent header")
# Patch the TarInfo object from the next header with
# the longname information.
next.offset = self.offset
if self.type == GNUTYPE_LONGNAME:
next.name = nts(buf, tarfile.encoding, tarfile.errors)
elif self.type == GNUTYPE_LONGLINK:
next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
return next
def _proc_sparse(self, tarfile):
"""Process a GNU sparse header plus extra headers.
"""
# We already collected some sparse structures in frombuf().
structs, isextended, origsize = self._sparse_structs
del self._sparse_structs
# Collect sparse structures from extended header blocks.
while isextended:
buf = tarfile.fileobj.read(BLOCKSIZE)
pos = 0
for i in range(21):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
structs.append((offset, numbytes))
pos += 24
isextended = bool(buf[504])
# Transform the sparse structures to something we can use
# in ExFileObject.
self.sparse = _ringbuffer()
lastpos = 0
realpos = 0
for offset, numbytes in structs:
if offset > lastpos:
self.sparse.append(_hole(lastpos, offset - lastpos))
self.sparse.append(_data(offset, numbytes, realpos))
realpos += numbytes
lastpos = offset + numbytes
if lastpos < origsize:
self.sparse.append(_hole(lastpos, origsize - lastpos))
self.offset_data = tarfile.fileobj.tell()
tarfile.offset = self.offset_data + self._block(self.size)
self.size = origsize
return self
def _proc_pax(self, tarfile):
"""Process an extended or global header as described in
POSIX.1-2001.
"""
# Read the header information.
buf = tarfile.fileobj.read(self._block(self.size))
# A pax header stores supplemental information for either
# the following file (extended) or all following files
# (global).
if self.type == XGLTYPE:
pax_headers = tarfile.pax_headers
else:
pax_headers = tarfile.pax_headers.copy()
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
# the newline. keyword and value are both UTF-8 encoded strings.
regex = re.compile(br"(\d+) ([^=]+)=")
pos = 0
while True:
match = regex.match(buf, pos)
if not match:
break
length, keyword = match.groups()
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
keyword = keyword.decode("utf8")
value = value.decode("utf8")
pax_headers[keyword] = value
pos += length
# Fetch the next header.
next = self.fromtarfile(tarfile)
if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
if next is None:
raise HeaderError("missing subsequent header")
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
next.offset = self.offset
if "size" in pax_headers:
# If the extended header replaces the size field,
# we need to recalculate the offset where the next
# header starts.
offset = next.offset_data
if next.isreg() or next.type not in SUPPORTED_TYPES:
offset += next._block(next.size)
tarfile.offset = offset
return next
def _apply_pax_info(self, pax_headers, encoding, errors):
"""Replace fields with supplemental information from a previous
pax extended or global header.
"""
for keyword, value in pax_headers.items():
if keyword not in PAX_FIELDS:
continue
if keyword == "path":
value = value.rstrip("/")
if keyword in PAX_NUMBER_FIELDS:
try:
value = PAX_NUMBER_FIELDS[keyword](value)
except ValueError:
value = 0
setattr(self, keyword, value)
self.pax_headers = pax_headers.copy()
def _block(self, count):
"""Round up a byte count by BLOCKSIZE and return it,
e.g. _block(834) => 1024.
"""
blocks, remainder = divmod(count, BLOCKSIZE)
if remainder:
blocks += 1
return blocks * BLOCKSIZE
def isreg(self):
return self.type in REGULAR_TYPES
def isfile(self):
return self.isreg()
def isdir(self):
return self.type == DIRTYPE
def issym(self):
return self.type == SYMTYPE
def islnk(self):
return self.type == LNKTYPE
def ischr(self):
return self.type == CHRTYPE
def isblk(self):
return self.type == BLKTYPE
def isfifo(self):
return self.type == FIFOTYPE
def issparse(self):
return self.type == GNUTYPE_SPARSE
def isdev(self):
return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo
class TarFile(object):
"""The TarFile Class provides an interface to tar archives.
"""
debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
dereference = False # If true, add content of linked file to the
# tar file, else the link.
ignore_zeros = False # If true, skips empty or invalid blocks and
# continues processing.
errorlevel = 0 # If 0, fatal errors only appear in debug
# messages (if debug >= 0). If > 0, errors
# are passed to the caller as exceptions.
format = DEFAULT_FORMAT # The format to use when creating an archive.
encoding = ENCODING # Encoding for 8-bit character strings.
errors = None # Error handler for unicode conversion.
tarinfo = TarInfo # The default TarInfo class to use.
fileobject = ExFileObject # The default ExFileObject class to use.
def __init__(self, name=None, mode="r", fileobj=None, format=None,
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
errors=None, pax_headers=None, debug=None, errorlevel=None):
"""Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
read from an existing archive, 'a' to append data to an existing
file or 'w' to create a new file overwriting an existing one. `mode'
defaults to 'r'.
If `fileobj' is given, it is used for reading or writing data. If it
can be determined, `mode' is overridden by `fileobj's mode.
`fileobj' is not closed, when TarFile is closed.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
self.mode = mode
self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
if not fileobj:
if self.mode == "a" and not os.path.exists(name):
# Create nonexistent files in append mode.
self.mode = "w"
self._mode = "wb"
fileobj = bltn_open(name, self._mode)
self._extfileobj = False
else:
if name is None and hasattr(fileobj, "name"):
name = fileobj.name
if hasattr(fileobj, "mode"):
self._mode = fileobj.mode
self._extfileobj = True
self.name = os.path.abspath(name) if name else None
self.fileobj = fileobj
# Init attributes.
if format is not None:
self.format = format
if tarinfo is not None:
self.tarinfo = tarinfo
if dereference is not None:
self.dereference = dereference
if ignore_zeros is not None:
self.ignore_zeros = ignore_zeros
if encoding is not None:
self.encoding = encoding
if errors is not None:
self.errors = errors
elif mode == "r":
self.errors = "replace"
else:
self.errors = "strict"
if pax_headers is not None and self.format == PAX_FORMAT:
self.pax_headers = pax_headers
else:
self.pax_headers = {}
if debug is not None:
self.debug = debug
if errorlevel is not None:
self.errorlevel = errorlevel
# Init datastructures.
self.closed = False
self.members = [] # list of members as TarInfo objects
self._loaded = False # flag if all members have been read
self.offset = self.fileobj.tell()
# current position in the archive file
self.inodes = {} # dictionary caching the inodes of
# archive members already added
if self.mode == "r":
self.firstmember = None
self.firstmember = self.next()
if self.mode == "a":
# Move to the end of the archive,
# before the first empty block.
self.firstmember = None
while True:
if self.next() is None:
if self.offset > 0:
self.fileobj.seek(self.fileobj.tell() - BLOCKSIZE)
break
if self.mode in "aw":
self._loaded = True
if self.pax_headers:
buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
self.fileobj.write(buf)
self.offset += len(buf)
#--------------------------------------------------------------------------
# Below are the classmethods which act as alternate constructors to the
# TarFile class. The open() method is the only one that is needed for
# public use; it is the "super"-constructor and is able to select an
# adequate "sub"-constructor for a particular compression using the mapping
# from OPEN_METH.
#
# This concept allows one to subclass TarFile without losing the comfort of
# the super-constructor. A sub-constructor is registered and made available
# by adding it to the mapping in OPEN_METH.
@classmethod
def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
"""Open a tar archive for reading, writing or appending. Return
an appropriate TarFile class.
mode:
'r' or 'r:*' open for reading with transparent compression
'r:' open for reading exclusively uncompressed
'r:gz' open for reading with gzip compression
'r:bz2' open for reading with bzip2 compression
'a' or 'a:' open for appending, creating the file if necessary
'w' or 'w:' open for writing without compression
'w:gz' open for writing with gzip compression
'w:bz2' open for writing with bzip2 compression
'r|*' open a stream of tar blocks with transparent compression
'r|' open an uncompressed stream of tar blocks for reading
'r|gz' open a gzip compressed stream of tar blocks
'r|bz2' open a bzip2 compressed stream of tar blocks
'w|' open an uncompressed stream for writing
'w|gz' open a gzip compressed stream for writing
'w|bz2' open a bzip2 compressed stream for writing
"""
if not name and not fileobj:
raise ValueError("nothing to open")
if mode in ("r", "r:*"):
# Find out which *open() is appropriate for opening the file.
for comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
if fileobj is not None:
saved_pos = fileobj.tell()
try:
return func(name, "r", fileobj, **kwargs)
except (ReadError, CompressionError) as e:
if fileobj is not None:
fileobj.seek(saved_pos)
continue
raise ReadError("file could not be opened successfully")
elif ":" in mode:
filemode, comptype = mode.split(":", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
# Select the *open() function according to
# given compression.
if comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
else:
raise CompressionError("unknown compression type %r" % comptype)
return func(name, filemode, fileobj, **kwargs)
elif "|" in mode:
filemode, comptype = mode.split("|", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
if filemode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
t = cls(name, filemode,
_Stream(name, filemode, comptype, fileobj, bufsize),
**kwargs)
t._extfileobj = False
return t
elif mode in "aw":
return cls.taropen(name, mode, fileobj, **kwargs)
raise ValueError("undiscernible mode")
@classmethod
def taropen(cls, name, mode="r", fileobj=None, **kwargs):
"""Open uncompressed tar archive name for reading or writing.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
return cls(name, mode, fileobj, **kwargs)
@classmethod
def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open gzip compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
try:
import gzip
gzip.GzipFile
except (ImportError, AttributeError):
raise CompressionError("gzip module is not available")
if fileobj is None:
fileobj = bltn_open(name, mode + "b")
try:
t = cls.taropen(name, mode,
gzip.GzipFile(name, mode, compresslevel, fileobj),
**kwargs)
except IOError:
raise ReadError("not a gzip file")
t._extfileobj = False
return t
@classmethod
def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open bzip2 compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'.")
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if fileobj is not None:
fileobj = _BZ2Proxy(fileobj, mode)
else:
fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
except IOError:
raise ReadError("not a bzip2 file")
t._extfileobj = False
return t
# All *open() methods are registered here.
OPEN_METH = {
"tar": "taropen", # uncompressed tar
"gz": "gzopen", # gzip compressed tar
"bz2": "bz2open" # bzip2 compressed tar
}
#--------------------------------------------------------------------------
# The public methods which TarFile provides:
def close(self):
"""Close the TarFile. In write-mode, two finishing zero blocks are
appended to the archive.
"""
if self.closed:
return
if self.mode in "aw":
self.fileobj.write(NUL * (BLOCKSIZE * 2))
self.offset += (BLOCKSIZE * 2)
# fill up the end with zero-blocks
# (like option -b20 for tar does)
blocks, remainder = divmod(self.offset, RECORDSIZE)
if remainder > 0:
self.fileobj.write(NUL * (RECORDSIZE - remainder))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def getmember(self, name):
"""Return a TarInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version.
"""
tarinfo = self._getmember(name)
if tarinfo is None:
raise KeyError("filename %r not found" % name)
return tarinfo
def getmembers(self):
"""Return the members of the archive as a list of TarInfo objects. The
list has the same order as the members in the archive.
"""
self._check()
if not self._loaded: # if we want to obtain a list of
self._load() # all members, we first have to
# scan the whole archive.
return self.members
def getnames(self):
"""Return the members of the archive as a list of their names. It has
the same order as the list returned by getmembers().
"""
return [tarinfo.name for tarinfo in self.getmembers()]
def gettarinfo(self, name=None, arcname=None, fileobj=None):
"""Create a TarInfo object for either the file `name' or the file
object `fileobj' (using os.fstat on its file descriptor). You can
modify some of the TarInfo's attributes before you add it using
addfile(). If given, `arcname' specifies an alternative name for the
file in the archive.
"""
self._check("aw")
# When fileobj is given, replace name by
# fileobj's real name.
if fileobj is not None:
name = fileobj.name
# Building the name of the member in the archive.
# Backward slashes are converted to forward slashes,
# Absolute paths are turned to relative paths.
if arcname is None:
arcname = name
arcname = normpath(arcname)
drv, arcname = os.path.splitdrive(arcname)
while arcname[0:1] == "/":
arcname = arcname[1:]
# Now, fill the TarInfo object with
# information specific for the file.
tarinfo = self.tarinfo()
tarinfo.tarfile = self
# Use os.stat or os.lstat, depending on platform
# and if symlinks shall be resolved.
if fileobj is None:
if hasattr(os, "lstat") and not self.dereference:
statres = os.lstat(name)
else:
statres = os.stat(name)
else:
statres = os.fstat(fileobj.fileno())
linkname = ""
stmd = statres.st_mode
if stat.S_ISREG(stmd):
inode = (statres.st_ino, statres.st_dev)
if not self.dereference and statres.st_nlink > 1 and \
inode in self.inodes and arcname != self.inodes[inode]:
# Is it a hardlink to an already
# archived file?
type = LNKTYPE
linkname = self.inodes[inode]
else:
# The inode is added only if its valid.
# For win32 it is always 0.
type = REGTYPE
if inode[0]:
self.inodes[inode] = arcname
elif stat.S_ISDIR(stmd):
type = DIRTYPE
elif stat.S_ISFIFO(stmd):
type = FIFOTYPE
elif stat.S_ISLNK(stmd):
type = SYMTYPE
linkname = os.readlink(name)
elif stat.S_ISCHR(stmd):
type = CHRTYPE
elif stat.S_ISBLK(stmd):
type = BLKTYPE
else:
return None
# Fill the TarInfo object with all
# information we can get.
tarinfo.name = arcname
tarinfo.mode = stmd
tarinfo.uid = statres.st_uid
tarinfo.gid = statres.st_gid
if stat.S_ISREG(stmd):
tarinfo.size = statres.st_size
else:
tarinfo.size = 0
tarinfo.mtime = statres.st_mtime
tarinfo.type = type
tarinfo.linkname = linkname
if pwd:
try:
tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
except KeyError:
pass
if grp:
try:
tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
except KeyError:
pass
if type in (CHRTYPE, BLKTYPE):
if hasattr(os, "major") and hasattr(os, "minor"):
tarinfo.devmajor = os.major(statres.st_rdev)
tarinfo.devminor = os.minor(statres.st_rdev)
return tarinfo
def list(self, verbose=True):
"""Print a table of contents to sys.stdout. If `verbose' is False, only
the names of the members are printed. If it is True, an `ls -l'-like
output is produced.
"""
self._check()
for tarinfo in self:
if verbose:
print(filemode(tarinfo.mode), end=' ')
print("%s/%s" % (tarinfo.uname or tarinfo.uid,
tarinfo.gname or tarinfo.gid), end=' ')
if tarinfo.ischr() or tarinfo.isblk():
print("%10s" % ("%d,%d" \
% (tarinfo.devmajor, tarinfo.devminor)), end=' ')
else:
print("%10d" % tarinfo.size, end=' ')
print("%d-%02d-%02d %02d:%02d:%02d" \
% time.localtime(tarinfo.mtime)[:6], end=' ')
print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ')
if verbose:
if tarinfo.issym():
print("->", tarinfo.linkname, end=' ')
if tarinfo.islnk():
print("link to", tarinfo.linkname, end=' ')
print()
def add(self, name, arcname=None, recursive=True, exclude=None):
"""Add the file `name' to the archive. `name' may be any type of file
(directory, fifo, symbolic link, etc.). If given, `arcname'
specifies an alternative name for the file in the archive.
Directories are added recursively by default. This can be avoided by
setting `recursive' to False. `exclude' is a function that should
return True for each filename to be excluded.
"""
self._check("aw")
if arcname is None:
arcname = name
# Exclude pathnames.
if exclude is not None and exclude(name):
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Skip if somebody tries to archive the archive...
if self.name is not None and os.path.abspath(name) == self.name:
self._dbg(2, "tarfile: Skipped %r" % name)
return
# Special case: The user wants to add the current
# working directory.
if name == ".":
if recursive:
if arcname == ".":
arcname = ""
for f in os.listdir(name):
self.add(f, os.path.join(arcname, f), recursive, exclude)
return
self._dbg(1, name)
# Create a TarInfo object from the file.
tarinfo = self.gettarinfo(name, arcname)
if tarinfo is None:
self._dbg(1, "tarfile: Unsupported type %r" % name)
return
# Append the tar header and data to the archive.
if tarinfo.isreg():
f = bltn_open(name, "rb")
self.addfile(tarinfo, f)
f.close()
elif tarinfo.isdir():
self.addfile(tarinfo)
if recursive:
for f in os.listdir(name):
self.add(os.path.join(name, f), os.path.join(arcname, f), recursive, exclude)
else:
self.addfile(tarinfo)
def addfile(self, tarinfo, fileobj=None):
"""Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
given, tarinfo.size bytes are read from it and added to the archive.
You can create TarInfo objects using gettarinfo().
On Windows platforms, `fileobj' should always be opened with mode
'rb' to avoid irritation about the file size.
"""
self._check("aw")
tarinfo = copy.copy(tarinfo)
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
self.fileobj.write(buf)
self.offset += len(buf)
# If there's data to follow, append it.
if fileobj is not None:
copyfileobj(fileobj, self.fileobj, tarinfo.size)
blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
if remainder > 0:
self.fileobj.write(NUL * (BLOCKSIZE - remainder))
blocks += 1
self.offset += blocks * BLOCKSIZE
self.members.append(tarinfo)
def extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 0o700
self.extract(tarinfo, path)
# Reverse sort directories.
directories.sort(key=lambda a: a.name)
directories.reverse()
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extract(self, member, path=""):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
specify a different directory using `path'.
"""
self._check("r")
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
# Prepare the link target for makelink().
if tarinfo.islnk():
tarinfo._link_target = os.path.join(path, tarinfo.linkname)
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name))
except EnvironmentError as e:
if self.errorlevel > 0:
raise
else:
if e.filename is None:
self._dbg(1, "tarfile: %s" % e.strerror)
else:
self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
a filename or a TarInfo object. If `member' is a regular file, a
file-like object is returned. If `member' is a link, a file-like
object is constructed from the link's target. If `member' is none of
the above, None is returned.
The file-like object is read-only and provides the following
methods: read(), readline(), readlines(), seek() and tell()
"""
self._check("r")
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
if tarinfo.isreg():
return self.fileobject(self, tarinfo)
elif tarinfo.type not in SUPPORTED_TYPES:
# If a member's type is unknown, it is treated as a
# regular file.
return self.fileobject(self, tarinfo)
elif tarinfo.islnk() or tarinfo.issym():
if isinstance(self.fileobj, _Stream):
# A small but ugly workaround for the case that someone tries
# to extract a (sym)link as a file-object from a non-seekable
# stream of tar blocks.
raise StreamError("cannot extract (sym)link as file object")
else:
# A (sym)link's file object is its target's file object.
return self.extractfile(self._getmember(tarinfo.linkname,
tarinfo))
else:
# If there's no data associated with the member (directory, chrdev,
# blkdev, etc.), return None instead of a file object.
return None
def _extract_member(self, tarinfo, targetpath):
"""Extract the TarInfo object tarinfo to a physical
file called targetpath.
"""
# Fetch the TarInfo object for the given name
# and build the destination pathname, replacing
# forward slashes to platform specific separators.
if targetpath[-1:] == "/":
targetpath = targetpath[:-1]
targetpath = os.path.normpath(targetpath)
# Create all upper directories.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
# Create directories that are not part of the archive with
# default permissions.
os.makedirs(upperdirs)
if tarinfo.islnk() or tarinfo.issym():
self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
else:
self._dbg(1, tarinfo.name)
if tarinfo.isreg():
self.makefile(tarinfo, targetpath)
elif tarinfo.isdir():
self.makedir(tarinfo, targetpath)
elif tarinfo.isfifo():
self.makefifo(tarinfo, targetpath)
elif tarinfo.ischr() or tarinfo.isblk():
self.makedev(tarinfo, targetpath)
elif tarinfo.islnk() or tarinfo.issym():
self.makelink(tarinfo, targetpath)
elif tarinfo.type not in SUPPORTED_TYPES:
self.makeunknown(tarinfo, targetpath)
else:
self.makefile(tarinfo, targetpath)
self.chown(tarinfo, targetpath)
if not tarinfo.issym():
self.chmod(tarinfo, targetpath)
self.utime(tarinfo, targetpath)
#--------------------------------------------------------------------------
# Below are the different file methods. They are called via
# _extract_member() when extract() is called. They can be replaced in a
# subclass to implement other functionality.
def makedir(self, tarinfo, targetpath):
"""Make a directory called targetpath.
"""
try:
# Use a safe mode for the directory, the real mode is set
# later in _extract_member().
os.mkdir(targetpath, 0o700)
except EnvironmentError as e:
if e.errno != errno.EEXIST:
raise
def makefile(self, tarinfo, targetpath):
"""Make a file called targetpath.
"""
source = self.extractfile(tarinfo)
target = bltn_open(targetpath, "wb")
copyfileobj(source, target)
source.close()
target.close()
def makeunknown(self, tarinfo, targetpath):
"""Make a file from a TarInfo object with an unknown type
at targetpath.
"""
self.makefile(tarinfo, targetpath)
self._dbg(1, "tarfile: Unknown file type %r, " \
"extracted as regular file." % tarinfo.type)
def makefifo(self, tarinfo, targetpath):
"""Make a fifo called targetpath.
"""
if hasattr(os, "mkfifo"):
os.mkfifo(targetpath)
else:
raise ExtractError("fifo not supported by system")
def makedev(self, tarinfo, targetpath):
"""Make a character or block device called targetpath.
"""
if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
raise ExtractError("special devices not supported by system")
mode = tarinfo.mode
if tarinfo.isblk():
mode |= stat.S_IFBLK
else:
mode |= stat.S_IFCHR
os.mknod(targetpath, mode,
os.makedev(tarinfo.devmajor, tarinfo.devminor))
def makelink(self, tarinfo, targetpath):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
"""
linkpath = tarinfo.linkname
try:
if tarinfo.issym():
os.symlink(linkpath, targetpath)
else:
# See extract().
os.link(tarinfo._link_target, targetpath)
except AttributeError:
if tarinfo.issym():
linkpath = os.path.join(os.path.dirname(tarinfo.name),
linkpath)
linkpath = normpath(linkpath)
try:
self._extract_member(self.getmember(linkpath), targetpath)
except (EnvironmentError, KeyError) as e:
linkpath = os.path.normpath(linkpath)
try:
shutil.copy2(linkpath, targetpath)
except EnvironmentError as e:
raise IOError("link could not be created")
def chown(self, tarinfo, targetpath):
"""Set owner of targetpath according to tarinfo.
"""
if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
# We have to be root to do so.
try:
g = grp.getgrnam(tarinfo.gname)[2]
except KeyError:
try:
g = grp.getgrgid(tarinfo.gid)[2]
except KeyError:
g = os.getgid()
try:
u = pwd.getpwnam(tarinfo.uname)[2]
except KeyError:
try:
u = pwd.getpwuid(tarinfo.uid)[2]
except KeyError:
u = os.getuid()
try:
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
if sys.platform != "os2emx":
os.chown(targetpath, u, g)
except EnvironmentError as e:
raise ExtractError("could not change owner")
def chmod(self, tarinfo, targetpath):
"""Set file permissions of targetpath according to tarinfo.
"""
if hasattr(os, 'chmod'):
try:
os.chmod(targetpath, tarinfo.mode)
except EnvironmentError as e:
raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
"""Set modification time of targetpath according to tarinfo.
"""
if not hasattr(os, 'utime'):
return
try:
os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
except EnvironmentError as e:
raise ExtractError("could not change modification time")
#--------------------------------------------------------------------------
def next(self):
"""Return the next member of the archive as a TarInfo object, when
TarFile is opened for reading. Return None if there is no more
available.
"""
self._check("ra")
if self.firstmember is not None:
m = self.firstmember
self.firstmember = None
return m
# Read the next block.
self.fileobj.seek(self.offset)
while True:
try:
tarinfo = self.tarinfo.fromtarfile(self)
if tarinfo is None:
return
self.members.append(tarinfo)
except HeaderError as e:
if self.ignore_zeros:
self._dbg(2, "0x%X: %s" % (self.offset, e))
self.offset += BLOCKSIZE
continue
else:
if self.offset == 0:
raise ReadError(str(e))
return None
break
return tarinfo
#--------------------------------------------------------------------------
# Little helper methods:
def _getmember(self, name, tarinfo=None):
"""Find an archive member by name from bottom to top.
If tarinfo is given, it is used as the starting point.
"""
# Ensure that all members have been loaded.
members = self.getmembers()
if tarinfo is None:
end = len(members)
else:
end = members.index(tarinfo)
for i in range(end - 1, -1, -1):
if name == members[i].name:
return members[i]
def _load(self):
"""Read through the entire archive file and look for readable
members.
"""
while True:
tarinfo = self.next()
if tarinfo is None:
break
self._loaded = True
def _check(self, mode=None):
"""Check if TarFile is still open, and if the operation's mode
corresponds to TarFile's mode.
"""
if self.closed:
raise IOError("%s is closed" % self.__class__.__name__)
if mode is not None and self.mode not in mode:
raise IOError("bad operation for mode %r" % self.mode)
def __iter__(self):
"""Provide an iterator object.
"""
if self._loaded:
return iter(self.members)
else:
return TarIter(self)
def _dbg(self, level, msg):
"""Write debugging output to sys.stderr.
"""
if level <= self.debug:
print(msg, file=sys.stderr)
# class TarFile
class TarIter:
"""Iterator Class.
for tarinfo in TarFile(...):
suite...
"""
def __init__(self, tarfile):
"""Construct a TarIter object.
"""
self.tarfile = tarfile
self.index = 0
def __iter__(self):
"""Return iterator object.
"""
return self
def __next__(self):
"""Return the next item using TarFile's next() method.
When all members have been read, set TarFile as _loaded.
"""
# Fix for SF #1100429: Under rare circumstances it can
# happen that getmembers() is called during iteration,
# which will cause TarIter to stop prematurely.
if not self.tarfile._loaded:
tarinfo = self.tarfile.next()
if not tarinfo:
self.tarfile._loaded = True
raise StopIteration
else:
try:
tarinfo = self.tarfile.members[self.index]
except IndexError:
raise StopIteration
self.index += 1
return tarinfo
# Helper classes for sparse file support
class _section:
"""Base class for _data and _hole.
"""
def __init__(self, offset, size):
self.offset = offset
self.size = size
def __contains__(self, offset):
return self.offset <= offset < self.offset + self.size
class _data(_section):
"""Represent a data section in a sparse file.
"""
def __init__(self, offset, size, realpos):
_section.__init__(self, offset, size)
self.realpos = realpos
class _hole(_section):
"""Represent a hole section in a sparse file.
"""
pass
class _ringbuffer(list):
"""Ringbuffer class which increases performance
over a regular list.
"""
def __init__(self):
self.idx = 0
def find(self, offset):
idx = self.idx
while True:
item = self[idx]
if offset in item:
break
idx += 1
if idx == len(self):
idx = 0
if idx == self.idx:
# End of File
return None
self.idx = idx
return item
#--------------------
# exported functions
#--------------------
def is_tarfile(name):
"""Return True if name points to a tar archive that we
are able to handle, else return False.
"""
try:
t = open(name)
t.close()
return True
except TarError:
return False
bltn_open = open
open = TarFile.open
| {
"content_hash": "b7eb29453894f83360413aac55fffc5a",
"timestamp": "",
"source": "github",
"line_count": 2425,
"max_line_length": 103,
"avg_line_length": 34.43628865979382,
"alnum_prop": 0.5350744838817838,
"repo_name": "MalloyPower/parsing-python",
"id": "72b3232f25d77e39bde06ca611b8f99900ca8468",
"size": "84898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-3.1/Lib/tarfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
import logging
import sys
from django.db.models.signals import post_delete
from django.utils.six.moves import input
from morango.certificates import Certificate
from morango.models import Buffer
from morango.models import DatabaseIDModel
from morango.models import DeletedModels
from morango.models import Store
from kolibri.core.auth.models import FacilityDataset
from kolibri.core.auth.models import FacilityUser
from kolibri.core.device.models import DevicePermissions
from kolibri.core.device.models import DeviceSettings
from kolibri.core.logger.models import AttemptLog
from kolibri.core.logger.models import ContentSessionLog
from kolibri.core.logger.models import ContentSummaryLog
from kolibri.core.tasks.management.commands.base import AsyncCommand
from kolibri.utils.cli import server
logger = logging.getLogger(__name__)
MODELS_TO_DELETE = [
AttemptLog, ContentSessionLog, ContentSummaryLog, FacilityUser, FacilityDataset, Certificate,
DatabaseIDModel, Store, Buffer, DevicePermissions, DeletedModels, DeviceSettings
]
# we want to disable the post_delete signal temporarily when deleting, so morango doesn't create DeletedModels objects
class DisablePostDeleteSignal(object):
def __enter__(self):
self.receivers = post_delete.receivers
post_delete.receivers = []
def __exit__(self, exc_type, exc_val, exc_tb):
post_delete.receivers = self.receivers
self.receivers = None
def confirm_or_exit(message):
answer = ""
while answer not in ["yes", "n", "no"]:
answer = input("{} [Type 'yes' or 'no'.] ".format(message)).lower()
if answer != "yes":
print("Canceled! Exiting without touching the database.")
sys.exit(1)
class Command(AsyncCommand):
help = "Delete all facility user data from the local database, and put it back to a clean state (but leaving content as-is)."
def deprovision(self):
with DisablePostDeleteSignal(), self.start_progress(total=len(MODELS_TO_DELETE)) as progress_update:
for Model in MODELS_TO_DELETE:
Model.objects.all().delete()
progress_update(1)
def handle_async(self, *args, **options):
# safest not to run this command while the server is running
status_code, _ = server.get_urls()
if status_code == server.STATUS_RUNNING:
logger.error("The Kolibri server is currently running. Please stop it and then re-run this command.")
sys.exit(1)
# ensure the user REALLY wants to do this!
confirm_or_exit("Are you sure you wish to deprovision your database? This will DELETE ALL USER DATA!")
confirm_or_exit("ARE YOU SURE? If you do this, there is no way to recover the user data on this device.")
print("Proceeding with deprovisioning. Deleting all user data.")
self.deprovision()
print("Deprovisioning complete. All user data has been deleted.")
| {
"content_hash": "f57c5076979001dbbb90a6930c94227b",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 129,
"avg_line_length": 39.18666666666667,
"alnum_prop": 0.7199727798570943,
"repo_name": "DXCanas/kolibri",
"id": "4c367e22fc1c3f17e1cb3af15a5a7de3dd92b27e",
"size": "2939",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "kolibri/core/auth/management/commands/deprovision.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "864"
},
{
"name": "CSS",
"bytes": "32872"
},
{
"name": "Dockerfile",
"bytes": "4332"
},
{
"name": "Gherkin",
"bytes": "115979"
},
{
"name": "HTML",
"bytes": "14251"
},
{
"name": "JavaScript",
"bytes": "890295"
},
{
"name": "Makefile",
"bytes": "9885"
},
{
"name": "Python",
"bytes": "1363204"
},
{
"name": "Shell",
"bytes": "10407"
},
{
"name": "Vue",
"bytes": "944905"
}
],
"symlink_target": ""
} |
"""
WSGI config for gettingstarted project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
from SpyPartyDraft import app
if __name__ == '__main__':
app.run()
| {
"content_hash": "ecaeebbcb5bdc322b37c8fd352f96892",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 78,
"avg_line_length": 27.272727272727273,
"alnum_prop": 0.7233333333333334,
"repo_name": "LtHummus/SpyPartyDraft",
"id": "3b2b719c8d0a9418c40d5f293ca2b40356d2e8f5",
"size": "300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11066"
},
{
"name": "Dockerfile",
"bytes": "292"
},
{
"name": "HTML",
"bytes": "14048"
},
{
"name": "JavaScript",
"bytes": "17922"
},
{
"name": "Python",
"bytes": "33750"
}
],
"symlink_target": ""
} |
import zope.interface
class IContent(zope.interface.Interface): pass
class ITrash(zope.interface.Interface): pass
| {
"content_hash": "1df990f78cab51588f189b8af157d379",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 46,
"avg_line_length": 23.2,
"alnum_prop": 0.8189655172413793,
"repo_name": "sbrauer/recms",
"id": "1c1085cef7ebb04440eba82a63b38c6dea29e2dc",
"size": "116",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cms/resources/interfaces.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "32117"
},
{
"name": "Python",
"bytes": "294578"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('speakers', '0006_speaker_profile_flags'),
]
operations = [
migrations.AlterField(
model_name='speaker',
name='interested_mentee',
field=multiselectfield.db.fields.MultiSelectField(blank=True, max_length=47, null=True, choices=[(b'brainstorming', b'Brainstorming'), (b'proposal_creation', b'Proposal Creation'), (b'proposal_review', b'Proposal Review')]),
),
migrations.AlterField(
model_name='speaker',
name='interested_mentor',
field=multiselectfield.db.fields.MultiSelectField(blank=True, max_length=47, null=True, choices=[(b'brainstorming', b'Brainstorming'), (b'proposal_creation', b'Proposal Creation'), (b'proposal_review', b'Proposal Review')]),
),
]
| {
"content_hash": "36c84341b73497348960fdd1ac7ba16b",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 236,
"avg_line_length": 40.208333333333336,
"alnum_prop": 0.6601036269430052,
"repo_name": "PyCon/pycon",
"id": "ab8e01fe97a6ffbf446851c1aad06de07fb6a0da",
"size": "989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "symposion/speakers/migrations/0007_auto_20180921_1053.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "80909"
},
{
"name": "Dockerfile",
"bytes": "163"
},
{
"name": "HTML",
"bytes": "313093"
},
{
"name": "JavaScript",
"bytes": "161207"
},
{
"name": "Makefile",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "993540"
},
{
"name": "Shell",
"bytes": "14094"
},
{
"name": "Smarty",
"bytes": "7379"
}
],
"symlink_target": ""
} |
from django.urls import include, re_path
urlpatterns = [
re_path(r"^", include("dpaste.urls.dpaste_api")),
re_path(r"^", include("dpaste.urls.dpaste")),
re_path(r"^i18n/", include("django.conf.urls.i18n")),
]
# Custom error handlers which load `dpaste/<code>.html` instead of `<code>.html`
handler404 = "dpaste.views.handler404"
handler500 = "dpaste.views.handler500"
| {
"content_hash": "a5d5c5d13733bdcc429ace6ae5e5fe2c",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 80,
"avg_line_length": 34.72727272727273,
"alnum_prop": 0.6910994764397905,
"repo_name": "bartTC/dpaste",
"id": "ad746cf37bacfa0e1f52576058ccdb124d174311",
"size": "382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dpaste/urls/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17102"
},
{
"name": "Dockerfile",
"bytes": "1505"
},
{
"name": "HTML",
"bytes": "11625"
},
{
"name": "JavaScript",
"bytes": "4822"
},
{
"name": "Makefile",
"bytes": "2161"
},
{
"name": "Python",
"bytes": "91387"
}
],
"symlink_target": ""
} |
import demistomock as demisto # noqa: F401
from MarkAsNoteByTag import mark_as_note
import MarkAsNoteByTag
ENTRIES = [
{
'Metadata': {'tags': ['test1', 'test2'],
'id': '1'},
},
{
'Metadata': {'tags': ['test1'],
'id': '2'},
}
]
def test_mark_as_note(mocker):
"""
Given:
- The script args.
When:
- Running the mark_as_note function.
Then:
- Validating the outputs as expected.
"""
mocker.patch.object(MarkAsNoteByTag, 'isError', return_value=False)
mocker.patch.object(demisto, 'results')
mocker.patch.object(demisto, 'getArg', return_value='test1')
excecute_mocker = mocker.patch.object(demisto, 'executeCommand')
mark_as_note(ENTRIES)
assert excecute_mocker.call_args[0][1] == {'entryIDs': '1,2'}
def test_mark_as_note_no_res(mocker):
"""
Given:
- The script args.
When:
- Running the mark_as_note function.
Then:
- Validating the outputs as expected.
"""
mocker.patch.object(MarkAsNoteByTag, 'isError', return_value=False)
results_mock = mocker.patch.object(demisto, 'results')
mocker.patch.object(demisto, 'getArg', return_value='test3')
mark_as_note(ENTRIES)
assert 'No entries with' in results_mock.call_args[0][0]['Contents']
| {
"content_hash": "f539e25defd6cb7888352990230f68e6",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 72,
"avg_line_length": 28.574468085106382,
"alnum_prop": 0.6046165301563663,
"repo_name": "demisto/content",
"id": "c60e96efc345cf68a75f931ba4979f5f89025226",
"size": "1343",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Packs/CommonScripts/Scripts/MarkAsNoteByTag/MarkAsNoteByTag_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2146"
},
{
"name": "HTML",
"bytes": "205901"
},
{
"name": "JavaScript",
"bytes": "1584075"
},
{
"name": "PowerShell",
"bytes": "442288"
},
{
"name": "Python",
"bytes": "47881712"
},
{
"name": "Rich Text Format",
"bytes": "480911"
},
{
"name": "Shell",
"bytes": "108066"
},
{
"name": "YARA",
"bytes": "1185"
}
],
"symlink_target": ""
} |
import sys, os, sniper_lib
_, EVENT_MARKER, EVENT_THREAD_NAME, EVENT_APP_START, EVENT_APP_EXIT, EVENT_THREAD_CREATE, EVENT_THREAD_EXIT = range(7)
class SniperStatsBase:
def parse_stats(self, (k1, k2), ncores, metrics = None):
v1 = self.read_snapshot(k1, metrics = metrics)
v2 = self.read_snapshot(k2, metrics = metrics)
results = []
for metricid in self.names.keys():
name = '%s.%s' % self.names[metricid]
if metrics and name not in metrics:
continue
id_min = min(min(v2.get(metricid, {}).keys() or [0]), 0)
id_max = max(max(v2.get(metricid, {}).keys() or [0])+1, ncores)
vals1 = v1.get(metricid, {})
vals2 = v2.get(metricid, {})
results += [ (name, idx, vals2.get(idx, 0) - vals1.get(idx, 0)) for idx in range(id_min, id_max) ]
if name == 'performance_model.elapsed_time' and idx < ncores:
results += [ ('performance_model.elapsed_time_begin', idx, vals1.get(idx, 0)) for idx in range(ncores) ]
results += [ ('performance_model.elapsed_time_end', idx, vals2.get(idx, 0)) for idx in range(ncores) ]
elif name == 'barrier.global_time':
results += [ ('barrier.global_time_begin', idx, vals1.get(idx, 0)) for idx in range(ncores) ]
results += [ ('barrier.global_time_end', idx, vals2.get(idx, 0)) for idx in range(ncores) ]
return results
def get_topology(self):
raise ValueError("Topology information not available from statistics of this type")
def get_events(self):
raise ValueError("Event information not available from statistics of this type")
def get_markers(self):
markers = {}
for event, time, core, thread, arg0, arg1, s in self.get_events():
if event == EVENT_MARKER:
markers.append((time, core, thread, arg0, arg1, s))
return markers
def get_thread_names(self):
names = {}
for event, time, core, thread, arg0, arg1, s in self.get_events():
if event == EVENT_THREAD_NAME:
names[thread] = s
return names
def get_results(self, **kwds):
return sniper_lib.get_results(stats = self, **kwds)
def SniperStats(resultsdir = '.', jobid = None):
if jobid:
import sniper_stats_jobid
stats = sniper_stats_jobid.SniperStatsJobid(jobid)
elif os.path.exists(os.path.join(resultsdir, 'sim.stats.sqlite3')):
import sniper_stats_sqlite
stats = sniper_stats_sqlite.SniperStatsSqlite(os.path.join(resultsdir, 'sim.stats.sqlite3'))
elif os.path.exists(os.path.join(resultsdir, 'sim.stats.db')):
import sniper_stats_db
stats = sniper_stats_db.SniperStatsDb(os.path.join(resultsdir, 'sim.stats.db'))
else:
import sniper_stats_compat
stats = sniper_stats_compat.SniperStatsCompat(resultsdir)
stats.config = sniper_lib.get_config(jobid, resultsdir)
return stats
| {
"content_hash": "f48ee27d22367610dcc115558d659870",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 118,
"avg_line_length": 42.87692307692308,
"alnum_prop": 0.6594904915679942,
"repo_name": "abanaiyan/sniper",
"id": "817061d3a634c3fbe35610f77da4caa5f205270e",
"size": "2787",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tools/sniper_stats.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "90653"
},
{
"name": "C++",
"bytes": "1724797"
},
{
"name": "CSS",
"bytes": "19848"
},
{
"name": "HTML",
"bytes": "40364"
},
{
"name": "JavaScript",
"bytes": "168428"
},
{
"name": "Makefile",
"bytes": "21654"
},
{
"name": "Objective-C",
"bytes": "645"
},
{
"name": "Python",
"bytes": "1366430"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, absolute_import, print_function
import os
import shutil
from collections import OrderedDict
from cookiecutter.prompt import read_user_yes_no
try:
input = raw_input
except NameError:
pass
folders = OrderedDict()
folders['tasks']= {
'question': '\nShould it have tasks? ',
'hint': ' Add task name i.e (Install packages) ',
'action': '- name: {}\n # TODO\n\n'
}
folders['handlers'] = {
'question': '\nShould it have handlers?',
'hint': ' Add handler name i.e (Restart uwsgi) ',
'action': '- name: {}\n # TODO\n\n'
}
folders['defaults'] = {
'question': '\nIt should contain default variables?: ',
'hint': ' Add variable i.e (operator: drunken_master) ',
'action': '{}\n\n'
}
folders['meta']= {
'question': '\nShould it have meta info? ',
'pre_hint': ' - Should it have dependencies? ',
'pre_action': '\ndependencies:\n',
'hint': ' Add dependency i.e ({role: aptsupercow, var: \'value\'}) ',
'action': ' - {}\n'
}
folders['templates'] = {
'question': '\nShould it have templates? ',
}
folders['files'] = {
'question': '\nShould it have files? ',
}
def configure_role():
print('\n\nROLE CONFIGURATION:\n===================')
for folder_name, folder in folders.items():
if read_user_yes_no(folder['question'], default_value=u'yes'):
try:
# this file has to be there, git doesn't store empty folders.
os.remove(os.path.join(folder_name, '.empty'))
except OSError:
pass
if 'hint' in folder:
with open('{}/main.yml'.format(folder_name), 'a') as fp:
if 'pre_hint' in folder:
if read_user_yes_no(folder['pre_hint'], default_value=u'yes'):
fp.write(folder['pre_action'])
else:
continue
action_name = input(folder['hint'])
while action_name:
fp.write(folder['action'].format(action_name))
action_name = input(folder['hint'])
else:
shutil.rmtree(folder_name)
if __name__ == '__main__':
configure_role()
| {
"content_hash": "4772da5c77c0f95bf129dee3896b2ed8",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 86,
"avg_line_length": 28.14814814814815,
"alnum_prop": 0.5368421052631579,
"repo_name": "iknite/cookiecutter-ansible-role",
"id": "26a4829c617736ab401bd20eeab00bf6a026cd0f",
"size": "2302",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hooks/post_gen_project.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2302"
}
],
"symlink_target": ""
} |
import multiprocessing as m
import time
import numpy
class StopRemoteIteration:
pass
class ForkedIterator(m.Process):
def __init__(self, fn, *args, **kargs):
self.fn = fn
self.p1, self.p2 = m.Pipe()
self.args = args
self.kargs = kargs
m.Process.__init__(self)
self.daemon = True
self.start()
self.p2.close() ## we will never use this end of the pipe from here; close it locally so we can detect when the pipe is fully closed
def send(self, obj):
self.p1.send(obj)
def run(self):
self.p1.close() ## we will never use this end of the pipe from here; close it locally so we can detect when the pipe is fully closed
#print "args", self.args, "kargs", self.kargs
#print "fn", self.fn
for x in self.fn(*self.args, **self.kargs):
self.p2.send(x)
self.p2.send(StopRemoteIteration())
self.p2.close()
def __iter__(self):
return self
def next(self):
try:
x = self.p1.recv()
#print "recv:", x
except EOFError: ## nothing left in pipe
if self.is_alive():
raise Exception("Remote process has already closed pipe (but is still alive)")
else:
raise Exception("Remote process has ended (and pipe is empty). (exit code %d)" % self.exitcode)
except IOError as (errno, strerror):
if errno == 4: ## blocking read was interrupted; try again.
return self.next()
else:
raise
if isinstance(x, StopRemoteIteration):
#print "iteration done"
raise StopIteration
else:
return x
#val = {'a': ['complex', 'object']}
#v = m.Value(dict, val)
if __name__ == '__main__':
def gen(nMax):
for i in range(nMax):
r = numpy.random.random()
#if r < 0.05:
#print "Fatal error in process"
#import os
#os.kill(os.getpid(), 9)
#elif r < 0.1:
#raise Exception("Error in process")
yield i, nMax
yield 3.1415, None
fg = ForkedIterator(gen, 10)
for x in fg:
print x
| {
"content_hash": "59e84befafa43e3e64c1782da1d9c4bb",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 141,
"avg_line_length": 30.96,
"alnum_prop": 0.5219638242894057,
"repo_name": "hiuwo/acq4",
"id": "5c36792df7c99d9561c687d4615c8e8fb9d3ce79",
"size": "2322",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "acq4/util/ForkedIterator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "18652"
},
{
"name": "C",
"bytes": "1051646"
},
{
"name": "C++",
"bytes": "636100"
},
{
"name": "CSS",
"bytes": "716"
},
{
"name": "Matlab",
"bytes": "1752"
},
{
"name": "Processing",
"bytes": "13403"
},
{
"name": "Python",
"bytes": "4925976"
},
{
"name": "Shell",
"bytes": "64"
}
],
"symlink_target": ""
} |
'''OpenGL extension EXT.framebuffer_object
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_EXT_framebuffer_object'
_DEPRECATED = False
GL_INVALID_FRAMEBUFFER_OPERATION_EXT = constant.Constant( 'GL_INVALID_FRAMEBUFFER_OPERATION_EXT', 0x506 )
GL_MAX_RENDERBUFFER_SIZE_EXT = constant.Constant( 'GL_MAX_RENDERBUFFER_SIZE_EXT', 0x84E8 )
glget.addGLGetConstant( GL_MAX_RENDERBUFFER_SIZE_EXT, (1,) )
GL_FRAMEBUFFER_BINDING_EXT = constant.Constant( 'GL_FRAMEBUFFER_BINDING_EXT', 0x8CA6 )
glget.addGLGetConstant( GL_FRAMEBUFFER_BINDING_EXT, (1,) )
GL_RENDERBUFFER_BINDING_EXT = constant.Constant( 'GL_RENDERBUFFER_BINDING_EXT', 0x8CA7 )
glget.addGLGetConstant( GL_RENDERBUFFER_BINDING_EXT, (1,) )
GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT = constant.Constant( 'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT', 0x8CD0 )
GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT = constant.Constant( 'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT', 0x8CD1 )
GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT = constant.Constant( 'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT', 0x8CD2 )
GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT = constant.Constant( 'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT', 0x8CD3 )
GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT = constant.Constant( 'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT', 0x8CD4 )
GL_FRAMEBUFFER_COMPLETE_EXT = constant.Constant( 'GL_FRAMEBUFFER_COMPLETE_EXT', 0x8CD5 )
GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT = constant.Constant( 'GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT', 0x8CD6 )
GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT = constant.Constant( 'GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT', 0x8CD7 )
GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT = constant.Constant( 'GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT', 0x8CD9 )
GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT = constant.Constant( 'GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT', 0x8CDA )
GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT = constant.Constant( 'GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT', 0x8CDB )
GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT = constant.Constant( 'GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT', 0x8CDC )
GL_FRAMEBUFFER_UNSUPPORTED_EXT = constant.Constant( 'GL_FRAMEBUFFER_UNSUPPORTED_EXT', 0x8CDD )
GL_MAX_COLOR_ATTACHMENTS_EXT = constant.Constant( 'GL_MAX_COLOR_ATTACHMENTS_EXT', 0x8CDF )
glget.addGLGetConstant( GL_MAX_COLOR_ATTACHMENTS_EXT, (1,) )
GL_COLOR_ATTACHMENT0_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT0_EXT', 0x8CE0 )
GL_COLOR_ATTACHMENT1_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT1_EXT', 0x8CE1 )
GL_COLOR_ATTACHMENT2_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT2_EXT', 0x8CE2 )
GL_COLOR_ATTACHMENT3_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT3_EXT', 0x8CE3 )
GL_COLOR_ATTACHMENT4_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT4_EXT', 0x8CE4 )
GL_COLOR_ATTACHMENT5_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT5_EXT', 0x8CE5 )
GL_COLOR_ATTACHMENT6_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT6_EXT', 0x8CE6 )
GL_COLOR_ATTACHMENT7_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT7_EXT', 0x8CE7 )
GL_COLOR_ATTACHMENT8_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT8_EXT', 0x8CE8 )
GL_COLOR_ATTACHMENT9_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT9_EXT', 0x8CE9 )
GL_COLOR_ATTACHMENT10_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT10_EXT', 0x8CEA )
GL_COLOR_ATTACHMENT11_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT11_EXT', 0x8CEB )
GL_COLOR_ATTACHMENT12_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT12_EXT', 0x8CEC )
GL_COLOR_ATTACHMENT13_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT13_EXT', 0x8CED )
GL_COLOR_ATTACHMENT14_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT14_EXT', 0x8CEE )
GL_COLOR_ATTACHMENT15_EXT = constant.Constant( 'GL_COLOR_ATTACHMENT15_EXT', 0x8CEF )
GL_DEPTH_ATTACHMENT_EXT = constant.Constant( 'GL_DEPTH_ATTACHMENT_EXT', 0x8D00 )
GL_STENCIL_ATTACHMENT_EXT = constant.Constant( 'GL_STENCIL_ATTACHMENT_EXT', 0x8D20 )
GL_FRAMEBUFFER_EXT = constant.Constant( 'GL_FRAMEBUFFER_EXT', 0x8D40 )
GL_RENDERBUFFER_EXT = constant.Constant( 'GL_RENDERBUFFER_EXT', 0x8D41 )
GL_RENDERBUFFER_WIDTH_EXT = constant.Constant( 'GL_RENDERBUFFER_WIDTH_EXT', 0x8D42 )
GL_RENDERBUFFER_HEIGHT_EXT = constant.Constant( 'GL_RENDERBUFFER_HEIGHT_EXT', 0x8D43 )
GL_RENDERBUFFER_INTERNAL_FORMAT_EXT = constant.Constant( 'GL_RENDERBUFFER_INTERNAL_FORMAT_EXT', 0x8D44 )
GL_STENCIL_INDEX1_EXT = constant.Constant( 'GL_STENCIL_INDEX1_EXT', 0x8D46 )
GL_STENCIL_INDEX4_EXT = constant.Constant( 'GL_STENCIL_INDEX4_EXT', 0x8D47 )
GL_STENCIL_INDEX8_EXT = constant.Constant( 'GL_STENCIL_INDEX8_EXT', 0x8D48 )
GL_STENCIL_INDEX16_EXT = constant.Constant( 'GL_STENCIL_INDEX16_EXT', 0x8D49 )
GL_RENDERBUFFER_RED_SIZE_EXT = constant.Constant( 'GL_RENDERBUFFER_RED_SIZE_EXT', 0x8D50 )
GL_RENDERBUFFER_GREEN_SIZE_EXT = constant.Constant( 'GL_RENDERBUFFER_GREEN_SIZE_EXT', 0x8D51 )
GL_RENDERBUFFER_BLUE_SIZE_EXT = constant.Constant( 'GL_RENDERBUFFER_BLUE_SIZE_EXT', 0x8D52 )
GL_RENDERBUFFER_ALPHA_SIZE_EXT = constant.Constant( 'GL_RENDERBUFFER_ALPHA_SIZE_EXT', 0x8D53 )
GL_RENDERBUFFER_DEPTH_SIZE_EXT = constant.Constant( 'GL_RENDERBUFFER_DEPTH_SIZE_EXT', 0x8D54 )
GL_RENDERBUFFER_STENCIL_SIZE_EXT = constant.Constant( 'GL_RENDERBUFFER_STENCIL_SIZE_EXT', 0x8D55 )
glIsRenderbufferEXT = platform.createExtensionFunction(
'glIsRenderbufferEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=constants.GLboolean,
argTypes=(constants.GLuint,),
doc='glIsRenderbufferEXT(GLuint(renderbuffer)) -> constants.GLboolean',
argNames=('renderbuffer',),
deprecated=_DEPRECATED,
)
glBindRenderbufferEXT = platform.createExtensionFunction(
'glBindRenderbufferEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glBindRenderbufferEXT(GLenum(target), GLuint(renderbuffer)) -> None',
argNames=('target','renderbuffer',),
deprecated=_DEPRECATED,
)
glDeleteRenderbuffersEXT = platform.createExtensionFunction(
'glDeleteRenderbuffersEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLsizei,arrays.GLuintArray,),
doc='glDeleteRenderbuffersEXT(GLsizei(n), GLuintArray(renderbuffers)) -> None',
argNames=('n','renderbuffers',),
deprecated=_DEPRECATED,
)
glGenRenderbuffersEXT = platform.createExtensionFunction(
'glGenRenderbuffersEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLsizei,arrays.GLuintArray,),
doc='glGenRenderbuffersEXT(GLsizei(n), GLuintArray(renderbuffers)) -> None',
argNames=('n','renderbuffers',),
deprecated=_DEPRECATED,
)
glRenderbufferStorageEXT = platform.createExtensionFunction(
'glRenderbufferStorageEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLsizei,constants.GLsizei,),
doc='glRenderbufferStorageEXT(GLenum(target), GLenum(internalformat), GLsizei(width), GLsizei(height)) -> None',
argNames=('target','internalformat','width','height',),
deprecated=_DEPRECATED,
)
glGetRenderbufferParameterivEXT = platform.createExtensionFunction(
'glGetRenderbufferParameterivEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,arrays.GLintArray,),
doc='glGetRenderbufferParameterivEXT(GLenum(target), GLenum(pname), GLintArray(params)) -> None',
argNames=('target','pname','params',),
deprecated=_DEPRECATED,
)
glIsFramebufferEXT = platform.createExtensionFunction(
'glIsFramebufferEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=constants.GLboolean,
argTypes=(constants.GLuint,),
doc='glIsFramebufferEXT(GLuint(framebuffer)) -> constants.GLboolean',
argNames=('framebuffer',),
deprecated=_DEPRECATED,
)
glBindFramebufferEXT = platform.createExtensionFunction(
'glBindFramebufferEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glBindFramebufferEXT(GLenum(target), GLuint(framebuffer)) -> None',
argNames=('target','framebuffer',),
deprecated=_DEPRECATED,
)
glDeleteFramebuffersEXT = platform.createExtensionFunction(
'glDeleteFramebuffersEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLsizei,arrays.GLuintArray,),
doc='glDeleteFramebuffersEXT(GLsizei(n), GLuintArray(framebuffers)) -> None',
argNames=('n','framebuffers',),
deprecated=_DEPRECATED,
)
glGenFramebuffersEXT = platform.createExtensionFunction(
'glGenFramebuffersEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLsizei,arrays.GLuintArray,),
doc='glGenFramebuffersEXT(GLsizei(n), GLuintArray(framebuffers)) -> None',
argNames=('n','framebuffers',),
deprecated=_DEPRECATED,
)
glCheckFramebufferStatusEXT = platform.createExtensionFunction(
'glCheckFramebufferStatusEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=constants.GLenum,
argTypes=(constants.GLenum,),
doc='glCheckFramebufferStatusEXT(GLenum(target)) -> constants.GLenum',
argNames=('target',),
deprecated=_DEPRECATED,
)
glFramebufferTexture1DEXT = platform.createExtensionFunction(
'glFramebufferTexture1DEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLenum,constants.GLuint,constants.GLint,),
doc='glFramebufferTexture1DEXT(GLenum(target), GLenum(attachment), GLenum(textarget), GLuint(texture), GLint(level)) -> None',
argNames=('target','attachment','textarget','texture','level',),
deprecated=_DEPRECATED,
)
glFramebufferTexture2DEXT = platform.createExtensionFunction(
'glFramebufferTexture2DEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLenum,constants.GLuint,constants.GLint,),
doc='glFramebufferTexture2DEXT(GLenum(target), GLenum(attachment), GLenum(textarget), GLuint(texture), GLint(level)) -> None',
argNames=('target','attachment','textarget','texture','level',),
deprecated=_DEPRECATED,
)
glFramebufferTexture3DEXT = platform.createExtensionFunction(
'glFramebufferTexture3DEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLenum,constants.GLuint,constants.GLint,constants.GLint,),
doc='glFramebufferTexture3DEXT(GLenum(target), GLenum(attachment), GLenum(textarget), GLuint(texture), GLint(level), GLint(zoffset)) -> None',
argNames=('target','attachment','textarget','texture','level','zoffset',),
deprecated=_DEPRECATED,
)
glFramebufferRenderbufferEXT = platform.createExtensionFunction(
'glFramebufferRenderbufferEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLenum,constants.GLuint,),
doc='glFramebufferRenderbufferEXT(GLenum(target), GLenum(attachment), GLenum(renderbuffertarget), GLuint(renderbuffer)) -> None',
argNames=('target','attachment','renderbuffertarget','renderbuffer',),
deprecated=_DEPRECATED,
)
glGetFramebufferAttachmentParameterivEXT = platform.createExtensionFunction(
'glGetFramebufferAttachmentParameterivEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLenum,arrays.GLintArray,),
doc='glGetFramebufferAttachmentParameterivEXT(GLenum(target), GLenum(attachment), GLenum(pname), GLintArray(params)) -> None',
argNames=('target','attachment','pname','params',),
deprecated=_DEPRECATED,
)
glGenerateMipmapEXT = platform.createExtensionFunction(
'glGenerateMipmapEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,),
doc='glGenerateMipmapEXT(GLenum(target)) -> None',
argNames=('target',),
deprecated=_DEPRECATED,
)
def glInitFramebufferObjectEXT():
'''Return boolean indicating whether this extension is available'''
return extensions.hasGLExtension( EXTENSION_NAME )
| {
"content_hash": "f947de3fed6912527e457720d854f28d",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 142,
"avg_line_length": 49.76569037656904,
"alnum_prop": 0.7956953085589373,
"repo_name": "Universal-Model-Converter/UMC3.0a",
"id": "3a754429866496bf6e446c84b87592c01031d74e",
"size": "11894",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "data/Python/x86/Lib/site-packages/OpenGL/raw/GL/EXT/framebuffer_object.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "226"
},
{
"name": "C",
"bytes": "1082640"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "3621086"
},
{
"name": "CSS",
"bytes": "6226"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "FORTRAN",
"bytes": "7795"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "Groff",
"bytes": "5943"
},
{
"name": "HTML",
"bytes": "1196266"
},
{
"name": "Java",
"bytes": "5793"
},
{
"name": "Makefile",
"bytes": "1109"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "33351557"
},
{
"name": "R",
"bytes": "1370"
},
{
"name": "Shell",
"bytes": "6931"
},
{
"name": "Tcl",
"bytes": "2084458"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
} |
"""
Print to file differences in two directory structures, ignoring file contents.
Looking recursively in two directories (which are defined below),
print to file the differences found in the subdirectory names,
highlighting odd subdirectories.
Reason for this:
SyncBackFree is a powerful synchronisation program, but with two drawbacks:
* big changes in directory structure are hard to interpret and act upon
* it seems to sync the existence of empty directories without reporting
So when I've changed a lot on my netbook, and wish to sync my directory
with my portable hard drive, I can run this module first, and use the output
to guide me through some manual directory synching before running SyncBackFree.
This program does not change anything, but just (re)writes the result file,
which is automatically named after this program.
"""
import datetime
import os
import re
import socket
import sys
import time
# Get the start time, to be able to check against later:
start = time.time()
# Get the datetime:
startd = datetime.datetime.now().isoformat(' ')
# Specify the parent directory's location
# (the third value is for special case when this file is Current.py):
drv = ['D:/', 'E:/', 'F:/']
# List of empty directories not to be flagged up
# (add regex's to this as required):
known_empty = [r'\.dropbox\.cache',
r'Cross-platform\\ImageMagick',
# anonymized Firefox profile folders:
r'IT\\Cross-platform\\Browsing\\Firefox\\profile\\',
r'Cross-platform\\pandoc-master\\data\\templates',
r'GT-S7560 Camera',
# from my notes "to reduce":
r'Gigabyte GA M57SLI-S4\\',
# my Firefox profile folders:
r'MSWinUser\\.*\\.*\.default\\',
# my LibreOffice profile folders:
r'MSWinUser\\.*\\user\\',
r'MSWin\\Notepad\+\+\\npp.6.7.7.bin\\plugins\\doc',
r'MSWin\\AV\\ImageMagick-6.8.9\\',
r'\\Q-Dir\\Favoriten\\Quick-Link',
r'Sharon\'s Music\\',
# Canon PowerShot A470\Software\CD:
r'SOFTWARE\\DOTNET\\ENGLISH',
r'Unix-like\\GNU-Linux\\Arch\\package-query.*',
r'WriteBackup',
r'\\CDExPortable\\',
r'\\tempera\\admin\\images\\schemes',
r'\\\.git\\',
r'\\\.vim\\',
r'\\vimfiles\\',
r'backup-.*_josep396\\',
r'debian-7.5.0-i386-xfce-CD-1\\Snapshots',
r'config\\soffice\.cfg\\modules\\']
# Get this script's directory path:
sdp = os.getcwd()[3:]
if(sdp):
sdp = sdp + '/'
# Get this script's filename without extension,
# for use as the name of the directory to be synched:
dtbs = os.path.basename(os.path.splitext(sys.argv[0])[0])
# and for use as the name of the output file:
outfile = dtbs + '.txt'
print('When this is done, you should open', outfile, '\n')
def dirlister(dirTolist):
""" Prepare a subdirectory list, with relative paths included.
(Progress is reported, and odd directories are noted.)"""
print('Looking at contents of', dirTolist, ':')
dlc = olc = 0
# Initialise two lists just with the base folder path:
dirList = [dirTolist]
oddList = [dirTolist]
for root, folders, files in os.walk(dirTolist):
for subd in folders:
# Report a progress count:
dlc += 1
print('\r', dlc, end=' ')
# subd is just the folder name, need to add its path:
abspath = os.path.join(root, subd)
relpath = abspath.replace(dirTolist+"\\", "")
# First check there's not an unwriteable
# Unicode escape character gotten in there:
if '\ufffd' in abspath:
# Store such messed-up directory paths as ASCII:
oddList.append(ascii(relpath))
olc += 1
else:
# Check there's something in the subdirectory:
if (os.listdir(abspath) or
# (treat known empty subdirectories normally:)
any(re.search(ke, abspath) for ke in known_empty)):
# Add relative path to the subdirectory name, and store it
# in the list for the directory to be compared:
dirList.append(relpath)
else:
# Add relative path to the subdirectory name, and store it
# in the list of odd or empty directories:
oddList.append(relpath)
olc += 1
print(' - subdirectory records loaded in')
# Adjust the first (title) items:
dlctxt = (' ---> contains ' + str(dlc) + ' subdirectories, these ones ')
oddList[0] += (dlctxt + 'are odd or Empty:')
dirList[0] += (dlctxt + 'are Unmatched:')
# unmatched list & count, and the list of odd or empty:
return dirList, (dlc - olc), oddList
# Now build two lists of subdirectories, with counts.
# Initialise a list to contain the two pairs of lists:
list = [[], []]
# (taking care to ensure they've separate id's)
empt = [[], []]
# and the two counts:
sdc = [0, 0]
# Get the lists, the local directory is alongside this script:
list[0], sdc[0], empt[0] = dirlister(drv[0] + sdp + dtbs)
# the external directory may have a reduced path:
if dtbs == 'Current' or dtbs == 'Pointure_23':
list[1], sdc[1], empt[1] = dirlister(drv[2] + dtbs)
elif dtbs == 'Copied' or dtbs == 'Stack':
list[1], sdc[1], empt[1] = dirlister(drv[1] + 'Dr_' + dtbs)
else:
# or an equivalent external path:
list[1], sdc[1], empt[1] = dirlister(drv[1] + sdp + dtbs)
# Identify the index of the list to be picked through:
# it can be the 2nd list:
d = 1
# but not if the 1st is shorter:
if sdc[0] < sdc[1]:
d = 0
# Prepare an empty list to move unmatched items into:
listd = []
# Get the index for the other (possibly longer) list:
dl = abs(d-1)
# Pick through one list now, comparing items with with the other list:
print('Cross-checking now for differences :')
for _ in range(sdc[d]+1):
# Pull off the first item from the pick-list:
item = list[d].pop(0)
print('\r', _, end=' ')
# If the item's in the other list, remove it from there too:
try:
list[dl].remove(item)
# If not, save it to the (new) unmatched pick-list:
except ValueError:
listd.append(item)
print(' - subdirectory records compared')
# Create a file object for output:
fo = open(outfile, 'w')
# Create a nice header:
wrt1 = '\n' + socket.gethostname()+' folder changes at '+startd+'\n'
wrt2 = '\n'+'\n'.join(empt[0])+'\n\n'+'\n'.join(empt[1])+'\n'
# Next the two lists of unmatched items
# (automatically headed by their title lines):
wrt3 = '\n'+'\n'.join(list[dl])+'\n\n'+'\n'.join(listd)+'\n'
# And the time taken:
wrt4 = '\ntook '+str(time.time()-start)+' seconds to find the differences'
# Write and close the file object:
fo.write(wrt1+wrt2+wrt3+wrt4)
fo.close()
| {
"content_hash": "312d53132d1d81f4f05b1082fde3d08e",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 79,
"avg_line_length": 38.86740331491713,
"alnum_prop": 0.6092395167022032,
"repo_name": "harriott/SyncPortableDrives",
"id": "152c7278b87e9662d40e2a1da50b525bd6b234ed",
"size": "7079",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CompDirStructs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "18500"
},
{
"name": "Python",
"bytes": "7079"
}
],
"symlink_target": ""
} |
import os
import pytest
import sh
import shutil
from .config import active_config, FileConfigBuilder
from .io_utils import path_from_var_dir
from .hyperparam_search import (itertools, main, HyperparamSearch,
TrainingCommand)
# Because we spawn other processes, we cannot mock anything in them. So, we
# deliberately set an invalid config path in order for the test runs fast.
# When it is needed to do the real test, set DRY_RUN to False and set
# DatasetProvider.training_steps and DatasetProvider.validation_steps to 1.
DRY_RUN = True
NOT_EXISTING_PATH = '/tmp/keras_image_captioning/this.does_not_exist'
NUM_GPUS = 2
NUM_SEARCHES = 6
EPOCHS = 2
@pytest.fixture(scope='module')
def clean_up_training_result_dir():
result_dir = path_from_var_dir('flickr8k/training-results/test/hpsearch')
if os.path.exists(result_dir):
shutil.rmtree(result_dir)
@pytest.mark.usefixtures('clean_up_training_result_dir')
class TestHyperparamSearch(object):
def test__init__(self, mocker):
mocker.patch.object(HyperparamSearch, 'num_gpus',
mocker.PropertyMock(return_value=NUM_GPUS))
search = HyperparamSearch(training_label_prefix='test/hpsearch/init1',
dataset_name='flickr8k',
epochs=EPOCHS)
assert search.num_gpus == NUM_GPUS
def test___init___with_num_gpus(self):
search = HyperparamSearch(training_label_prefix='test/hpsearch/init2',
dataset_name='flickr8k',
epochs=EPOCHS,
num_gpus=NUM_GPUS + 1)
assert search.num_gpus == NUM_GPUS + 1
def test_run(self, mocker):
if DRY_RUN:
mocker.patch.object(TrainingCommand, 'config_filepath',
mocker.PropertyMock(
return_value=NOT_EXISTING_PATH))
mocker.patch.object(HyperparamSearch, 'num_gpus',
mocker.PropertyMock(return_value=NUM_GPUS))
mocker.patch.object(itertools, 'count', lambda: range(NUM_SEARCHES))
search = HyperparamSearch(training_label_prefix='test/hpsearch/search',
dataset_name='flickr8k',
epochs=EPOCHS)
search.run()
assert all(not x[1].process.is_alive()[0]
for x in search.running_commands)
@pytest.mark.usefixtures('clean_up_training_result_dir')
class TestTrainingCommand(object):
@pytest.fixture
def config_used(self):
config = active_config()
config = config._replace(epochs=2, time_limit=None, batch_size=2)
return config
@pytest.fixture
def training_command(self):
return TrainingCommand(training_label='test/hpsearch/training-command',
config=self.config_used(),
gpu_index=0,
background=True)
def test_execute(self, training_command):
finished = []
def done_callback(cmd, success, exit_code):
finished.append(True)
training_command._done_callback = done_callback
if DRY_RUN:
training_command._config_filepath = NOT_EXISTING_PATH
running_command = training_command.execute()
with pytest.raises(sh.ErrorReturnCode_1):
running_command.wait()
else:
running_command = training_command.execute()
running_command.wait()
assert len(finished) == 1 and finished[0]
def test__init_config_filepath(self, training_command, config_used):
training_command._init_config_filepath()
config_builder = FileConfigBuilder(training_command._config_filepath)
config = config_builder.build_config()
assert config == config_used
def test__init_log_filepath(self, training_command):
training_command._init_log_filepath()
assert training_command._log_filepath.find(
training_command.training_label) != -1
@pytest.mark.usefixtures('clean_up_training_result_dir')
def test_main(mocker):
if DRY_RUN:
mocker.patch.object(TrainingCommand, 'config_filepath',
mocker.PropertyMock(
return_value=NOT_EXISTING_PATH))
mocker.patch.object(HyperparamSearch, 'num_gpus',
mocker.PropertyMock(return_value=NUM_GPUS))
mocker.patch.object(itertools, 'count', lambda: range(NUM_SEARCHES))
# Inside main() if wait=False is passed to executor.shutdown(), the mock
# will be broken. It works only for a few miliseconds, but then poof! The
# object is restored to the original. If wait=True, the mock will work
# perfectly.
main(training_label_prefix='test/hpsearch/main', dataset_name='flickr8k',
epochs=EPOCHS)
| {
"content_hash": "823c81ac2a5dfc01b582d44f2a9f335e",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 79,
"avg_line_length": 39.46456692913386,
"alnum_prop": 0.6097366320830008,
"repo_name": "danieljl/keras-image-captioning",
"id": "97b29116d4df3bf3ce389d27519b381b4c3ed761",
"size": "5012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keras_image_captioning/hyperparam_search_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "149892"
},
{
"name": "Shell",
"bytes": "1631"
}
],
"symlink_target": ""
} |
from pyjamas.ui.Composite import Composite
from pyjamas.ui.FlexTable import FlexTable
from pyjamas.ui.HTML import HTML
from pyjamas.ui.HorizontalPanel import HorizontalPanel
from pyjamas.ui.Widget import Widget
from pyjamas.ui.Label import Label
from pyjamas.ui import HasAlignment
from MailItems import MailItems
from MailLogger import Logger
class MailList(Composite):
VISIBLE_EMAIL_COUNT = 10
def __init__(self, mailObject):
Composite.__init__(self)
self.countLabel = HTML()
self.newerButton = HTML("<a href='javascript:;'>< newer</a>", True)
self.olderButton = HTML("<a href='javascript:;'>older ></a>", True)
self.startIndex = 0
self.selectedRow = -1
self.table = FlexTable()
self.navBar = HorizontalPanel()
self.mailObject = mailObject
# Setup the table.
self.table.setCellSpacing(0)
self.table.setCellPadding(2)
self.table.setWidth("100%")
# Hook up events.
self.table.addTableListener(self)
self.newerButton.addClickListener(self)
self.olderButton.addClickListener(self)
# Create the 'navigation' bar at the upper-right.
innerNavBar = HorizontalPanel()
innerNavBar.setSpacing(8)
innerNavBar.add(self.newerButton)
innerNavBar.add(self.countLabel)
innerNavBar.add(self.olderButton)
self.navBar.setStyleName("mail-ListNavBar")
self.navBar.setHorizontalAlignment(HasAlignment.ALIGN_RIGHT)
self.navBar.add(innerNavBar)
self.navBar.setWidth("100%")
self.initWidget(self.table)
self.setStyleName("mail-List")
self.initTable()
self.update()
def onCellDoubleClicked(self, sender, row, cell):
pass
def onCellClicked(self, sender, row, cell):
# Select the row that was clicked (-1 to account for header row).
if (row > 0):
self.selectRow(row - 1)
def onClick(self, sender):
if (sender == self.olderButton):
# Move forward a page.
self.startIndex = self.startIndex + MailList.VISIBLE_EMAIL_COUNT
if (self.startIndex >= MailItems().getMailItemCount()):
self.startIndex = self.startIndex - MailList.VISIBLE_EMAIL_COUNT
else:
self.styleRow(self.selectedRow, False)
self.selectedRow = -1
self.update()
elif (sender == self.newerButton):
# Move back a page.
self.startIndex = self.startIndex - MailList.VISIBLE_EMAIL_COUNT
if (self.startIndex < 0):
self.startIndex = 0
else:
self.styleRow(self.selectedRow, False)
self.selectedRow = -1
self.update()
def initTable(self):
# Create the header row.
self.table.setText(0, 0, "sender")
self.table.setText(0, 1, "email")
self.table.setText(0, 2, "subject")
self.table.setWidget(0, 3, self.navBar)
self.table.getRowFormatter().setStyleName(0, "mail-ListHeader")
# Initialize the rest of the rows.
i = 0
while i < MailList.VISIBLE_EMAIL_COUNT:
self.table.setText(i + 1, 0, "")
self.table.setText(i + 1, 1, "")
self.table.setText(i + 1, 2, "")
self.table.getCellFormatter().setWordWrap(i + 1, 0, False)
self.table.getCellFormatter().setWordWrap(i + 1, 1, False)
self.table.getCellFormatter().setWordWrap(i + 1, 2, False)
self.table.getFlexCellFormatter().setColSpan(i + 1, 2, 2)
i = i + 1
def selectRow(self, row):
# When a row (other than the first one, which is used as a header) is
# selected, display its associated MailItem.
item = MailItems().getMailItem(self.startIndex + row)
if item is None:
return
self.styleRow(self.selectedRow, False)
self.styleRow(row, True)
item.read = True
self.selectedRow = row
self.mailObject.get().displayItem(item)
def styleRow(self, row, selected):
if (row != -1):
if (selected):
self.table.getRowFormatter().addStyleName(row + 1, "mail-SelectedRow")
else:
self.table.getRowFormatter().removeStyleName(row + 1, "mail-SelectedRow")
def update(self):
# Update the older/newer buttons & label.
count = MailItems().getMailItemCount()
max = self.startIndex + MailList.VISIBLE_EMAIL_COUNT
if (max > count):
max = count
self.newerButton.setVisible(self.startIndex != 0)
self.olderButton.setVisible(self.startIndex + MailList.VISIBLE_EMAIL_COUNT < count)
startIndexPlusOne = self.startIndex + 1
self.countLabel.setText("%d - %d of %d" % (startIndexPlusOne, max, count))
# Show the selected emails.
i = 0
while (i < MailList.VISIBLE_EMAIL_COUNT):
# Don't read past the end.
if (self.startIndex + i >= MailItems().getMailItemCount()):
break
item = MailItems().getMailItem(self.startIndex + i)
# Add a row to the table, then set each of its columns to the
# email's sender and subject values.
self.table.setText(i + 1, 0, item.sender)
self.table.setText(i + 1, 1, item.email)
self.table.setText(i + 1, 2, item.subject)
i = i + 1
# Clear any remaining slots.
while (i < MailList.VISIBLE_EMAIL_COUNT):
self.table.setHTML(i + 1, 0, " ")
self.table.setHTML(i + 1, 1, " ")
self.table.setHTML(i + 1, 2, " ")
i = i + 1
# Select the first row if none is selected.
if (self.selectedRow == -1):
self.selectRow(0)
| {
"content_hash": "15064d6e5e5fab734785b6c9ff23464c",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 91,
"avg_line_length": 35.56024096385542,
"alnum_prop": 0.5935964763679485,
"repo_name": "pyjs/pyjs",
"id": "e390963a00480818980641cac1b6885a52da3ffc",
"size": "5903",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "examples/mail/MailList.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4640"
},
{
"name": "Groff",
"bytes": "6633"
},
{
"name": "HTML",
"bytes": "10106"
},
{
"name": "JavaScript",
"bytes": "63385"
},
{
"name": "Makefile",
"bytes": "453"
},
{
"name": "Python",
"bytes": "5515375"
},
{
"name": "Shell",
"bytes": "4264"
}
],
"symlink_target": ""
} |
"""Callbacks: utilities called at certain points during model training.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import csv
import io
import json
import os
import re
import time
import numpy as np
import six
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.distribute import collective_all_reduce_strategy
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import distributed_file_utils
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.distribute import worker_training_state
from tensorflow.python.keras.optimizer_v2 import learning_rate_schedule
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.keras.utils import version_utils
from tensorflow.python.keras.utils.data_utils import Sequence
from tensorflow.python.keras.utils.generic_utils import Progbar
from tensorflow.python.keras.utils.io_utils import path_to_string
from tensorflow.python.keras.utils.mode_keys import ModeKeys
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import summary_ops_v2
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.profiler import profiler_v2 as profiler
from tensorflow.python.saved_model import save_options as save_options_lib
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training.saving import checkpoint_options as checkpoint_options_lib
from tensorflow.python.util import nest
from tensorflow.python.util.compat import collections_abc
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
try:
import requests
except ImportError:
requests = None
def configure_callbacks(callbacks,
model,
do_validation=False,
batch_size=None,
epochs=None,
steps_per_epoch=None,
samples=None,
verbose=1,
count_mode='steps',
mode=ModeKeys.TRAIN):
"""Configures callbacks for use in various training loops.
Arguments:
callbacks: List of Callbacks.
model: Model being trained.
do_validation: Whether or not validation loop will be run.
batch_size: Number of samples per batch.
epochs: Number of epoch to train.
steps_per_epoch: Number of batches to run per training epoch.
samples: Number of training samples.
verbose: int, 0 or 1. Keras logging verbosity to pass to ProgbarLogger.
count_mode: One of 'steps' or 'samples'. Per-batch or per-sample count.
mode: String. One of ModeKeys.TRAIN, ModeKeys.TEST, or ModeKeys.PREDICT.
Which loop mode to configure callbacks for.
Returns:
Instance of CallbackList used to control all Callbacks.
"""
# Check if callbacks have already been configured.
if isinstance(callbacks, CallbackList):
return callbacks
if not callbacks:
callbacks = []
# Add additional callbacks during training.
if mode == ModeKeys.TRAIN:
model.history = History()
callbacks = [BaseLogger()] + (callbacks or []) + [model.history]
if verbose:
callbacks.append(ProgbarLogger(count_mode))
callback_list = CallbackList(callbacks)
# Set callback model
callback_model = model._get_callback_model() # pylint: disable=protected-access
callback_list.set_model(callback_model)
set_callback_parameters(
callback_list,
model,
do_validation=do_validation,
batch_size=batch_size,
epochs=epochs,
steps_per_epoch=steps_per_epoch,
samples=samples,
verbose=verbose,
mode=mode)
callback_list.model.stop_training = False
return callback_list
def set_callback_parameters(callback_list,
model,
do_validation=False,
batch_size=None,
epochs=None,
steps_per_epoch=None,
samples=None,
verbose=1,
mode=ModeKeys.TRAIN):
"""Sets callback parameters.
Arguments:
callback_list: CallbackList instance.
model: Model being trained.
do_validation: Whether or not validation loop will be run.
batch_size: Number of samples per batch.
epochs: Number of epoch to train.
steps_per_epoch: Number of batches to run per training epoch.
samples: Number of training samples.
verbose: int, 0 or 1. Keras logging verbosity to pass to ProgbarLogger.
mode: String. One of ModeKeys.TRAIN, ModeKeys.TEST, or ModeKeys.PREDICT.
Which loop mode to configure callbacks for.
"""
metric_names = model.metrics_names
for cbk in callback_list:
if isinstance(cbk, (BaseLogger, ProgbarLogger)):
cbk.stateful_metrics = metric_names[1:] # Exclude `loss`
# Set callback parameters
callback_metrics = []
# When we have deferred build scenario with iterator input, we will compile
# when we standardize first batch of data.
if mode != ModeKeys.PREDICT:
callback_metrics = copy.copy(metric_names)
if do_validation:
callback_metrics += ['val_' + n for n in metric_names]
callback_params = {
'batch_size': batch_size,
'epochs': epochs,
'steps': steps_per_epoch,
'samples': samples,
'verbose': verbose,
'do_validation': do_validation,
'metrics': callback_metrics,
}
callback_list.set_params(callback_params)
def _is_generator_like(data):
"""Checks if data is a generator, Sequence, or Iterator."""
return (hasattr(data, '__next__') or hasattr(data, 'next') or isinstance(
data, (Sequence, iterator_ops.Iterator, iterator_ops.OwnedIterator)))
def make_logs(model, logs, outputs, mode, prefix=''):
"""Computes logs for sending to `on_batch_end` methods."""
metric_names = model.metrics_names
if mode in {ModeKeys.TRAIN, ModeKeys.TEST} and metric_names:
for label, output in zip(metric_names, outputs):
logs[prefix + label] = output
else:
logs['outputs'] = outputs
return logs
@keras_export('keras.callbacks.CallbackList')
class CallbackList(object):
"""Container abstracting a list of callbacks."""
def __init__(self,
callbacks=None,
add_history=False,
add_progbar=False,
model=None,
**params):
"""Container for `Callback` instances.
This object wraps a list of `Callback` instances, making it possible
to call them all at once via a single endpoint
(e.g. `callback_list.on_epoch_end(...)`).
Arguments:
callbacks: List of `Callback` instances.
add_history: Whether a `History` callback should be added, if one does not
already exist in the `callbacks` list.
add_progbar: Whether a `ProgbarLogger` callback should be added, if one
does not already exist in the `callbacks` list.
model: The `Model` these callbacks are used with.
**params: If provided, parameters will be passed to each `Callback` via
`Callback.set_params`.
"""
self.callbacks = nest.flatten(callbacks) if callbacks else []
self._add_default_callbacks(add_history, add_progbar)
if model:
self.set_model(model)
if params:
self.set_params(params)
# Performance optimization: determines if batch hooks need to be called.
# pylint: disable=protected-access
self._should_call_train_batch_hooks = any(
cb._implements_train_batch_hooks() for cb in self.callbacks)
self._should_call_test_batch_hooks = any(
cb._implements_test_batch_hooks() for cb in self.callbacks)
self._should_call_predict_batch_hooks = any(
cb._implements_predict_batch_hooks() for cb in self.callbacks)
# pylint: enable=protected-access
# Performance check: Check batch hooks for slowness compared to batch time.
# Only run check for custom callbacks (i.e. not present in this file).
self._check_timing = any([cbk.__class__.__name__ not in globals()
for cbk in self.callbacks])
self._num_batches_for_timing_check = 5
self._hook_times = {}
self._batch_start_time = None
self._batch_times = []
def _add_default_callbacks(self, add_history, add_progbar):
"""Adds `Callback`s that are always present."""
self._progbar = None
self._history = None
for cb in self.callbacks:
if isinstance(cb, ProgbarLogger):
self._progbar = cb
elif isinstance(cb, History):
self._history = cb
if self._progbar is None and add_progbar:
self._progbar = ProgbarLogger(count_mode='steps')
self.callbacks.append(self._progbar)
if self._history is None and add_history:
self._history = History()
self.callbacks.append(self._history)
def append(self, callback):
self.callbacks.append(callback)
def set_params(self, params):
self.params = params
for callback in self.callbacks:
callback.set_params(params)
def set_model(self, model):
self.model = model
if self._history:
model.history = self._history
for callback in self.callbacks:
callback.set_model(model)
def _call_batch_hook(self, mode, hook, batch, logs=None):
"""Helper function for all batch_{begin | end} methods."""
if not self.callbacks:
return
if hook == 'begin':
self._call_batch_begin_hook(mode, batch, logs)
elif hook == 'end':
self._call_batch_end_hook(mode, batch, logs)
else:
raise ValueError('Unrecognized hook: {}'.format(hook))
def _call_batch_begin_hook(self, mode, batch, logs):
"""Helper function for `on_*_batch_begin` methods."""
hook_name = 'on_{mode}_batch_begin'.format(mode=mode)
self._call_batch_hook_helper(hook_name, batch, logs)
if self._check_timing:
self._batch_start_time = time.time()
def _call_batch_end_hook(self, mode, batch, logs):
"""Helper function for `on_*_batch_end` methods."""
hook_name = 'on_{mode}_batch_end'.format(mode=mode)
if self._check_timing and batch >= 1:
batch_time = time.time() - self._batch_start_time
self._batch_times.append(batch_time)
self._call_batch_hook_helper(hook_name, batch, logs)
if len(self._batch_times) >= self._num_batches_for_timing_check:
end_hook_name = hook_name
begin_hook_name = 'on_{mode}_batch_begin'.format(mode=mode)
avg_batch_time = sum(self._batch_times) / len(self._batch_times)
avg_end_hook_time = sum(self._hook_times[end_hook_name]) / len(
self._hook_times[end_hook_name])
avg_begin_hook_time = sum(self._hook_times[begin_hook_name]) / len(
self._hook_times[begin_hook_name])
threshold_time = 1.0 * avg_batch_time
warning_msg = ('Callback method `{hook}` is slow compared to '
'the batch time (batch time: {batch_time:.4f}s vs '
'`{hook}` time: {hook_time:.4f}s). Check your callbacks.')
if avg_begin_hook_time > threshold_time:
logging.warning(warning_msg.format(
hook=begin_hook_name,
batch_time=avg_batch_time,
hook_time=avg_begin_hook_time))
if avg_end_hook_time > threshold_time:
logging.warning(warning_msg.format(
hook=end_hook_name,
batch_time=avg_batch_time,
hook_time=avg_end_hook_time))
self._check_timing = False
self._batch_start_time = None
self._batch_times = []
self._hook_times = {}
def _call_batch_hook_helper(self, hook_name, batch, logs):
"""Helper function for `on_*_batch_*` methods."""
logs = logs or {}
numpy_logs = None
if self._check_timing:
start_time = time.time()
for callback in self.callbacks:
hook = getattr(callback, hook_name)
if getattr(callback, '_supports_tf_logs', False):
hook(batch, logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
hook(batch, numpy_logs)
if self._check_timing:
if hook_name not in self._hook_times:
self._hook_times[hook_name] = []
self._hook_times[hook_name].append(time.time() - start_time)
def _call_begin_hook(self, mode):
"""Helper function for on_{train|test|predict}_begin methods."""
if mode == ModeKeys.TRAIN:
self.on_train_begin()
elif mode == ModeKeys.TEST:
self.on_test_begin()
else:
self.on_predict_begin()
def _call_end_hook(self, mode):
"""Helper function for on_{train|test|predict}_end methods."""
if mode == ModeKeys.TRAIN:
self.on_train_end()
elif mode == ModeKeys.TEST:
self.on_test_end()
else:
self.on_predict_end()
def on_batch_begin(self, batch, logs=None):
if self._should_call_train_batch_hooks:
self._call_batch_hook(ModeKeys.TRAIN, 'begin', batch, logs=logs)
def on_batch_end(self, batch, logs=None):
if self._should_call_train_batch_hooks:
self._call_batch_hook(ModeKeys.TRAIN, 'end', batch, logs=logs)
def on_epoch_begin(self, epoch, logs=None):
"""Calls the `on_epoch_begin` methods of its callbacks.
This function should only be called during TRAIN mode.
Arguments:
epoch: Integer, index of epoch.
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
logs = logs or {}
numpy_logs = None
for callback in self.callbacks:
if getattr(callback, '_supports_tf_logs', False):
callback.on_epoch_begin(epoch, logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
callback.on_epoch_begin(epoch, numpy_logs)
def on_epoch_end(self, epoch, logs=None):
"""Calls the `on_epoch_end` methods of its callbacks.
This function should only be called during TRAIN mode.
Arguments:
epoch: Integer, index of epoch.
logs: Dict, metric results for this training epoch, and for the
validation epoch if validation is performed. Validation result keys
are prefixed with `val_`.
"""
logs = logs or {}
numpy_logs = None
for callback in self.callbacks:
if getattr(callback, '_supports_tf_logs', False):
callback.on_epoch_end(epoch, logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
callback.on_epoch_end(epoch, numpy_logs)
def on_train_batch_begin(self, batch, logs=None):
"""Calls the `on_train_batch_begin` methods of its callbacks.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict, contains the return value of `model.train_step`. Typically,
the values of the `Model`'s metrics are returned. Example:
`{'loss': 0.2, 'accuracy': 0.7}`.
"""
if self._should_call_train_batch_hooks:
self._call_batch_hook(ModeKeys.TRAIN, 'begin', batch, logs=logs)
def on_train_batch_end(self, batch, logs=None):
"""Calls the `on_train_batch_end` methods of its callbacks.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict. Aggregated metric results up until this batch.
"""
if self._should_call_train_batch_hooks:
self._call_batch_hook(ModeKeys.TRAIN, 'end', batch, logs=logs)
def on_test_batch_begin(self, batch, logs=None):
"""Calls the `on_test_batch_begin` methods of its callbacks.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict, contains the return value of `model.test_step`. Typically,
the values of the `Model`'s metrics are returned. Example:
`{'loss': 0.2, 'accuracy': 0.7}`.
"""
if self._should_call_test_batch_hooks:
self._call_batch_hook(ModeKeys.TEST, 'begin', batch, logs=logs)
def on_test_batch_end(self, batch, logs=None):
"""Calls the `on_test_batch_end` methods of its callbacks.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict. Aggregated metric results up until this batch.
"""
if self._should_call_test_batch_hooks:
self._call_batch_hook(ModeKeys.TEST, 'end', batch, logs=logs)
def on_predict_batch_begin(self, batch, logs=None):
"""Calls the `on_predict_batch_begin` methods of its callbacks.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict, contains the return value of `model.predict_step`,
it typically returns a dict with a key 'outputs' containing
the model's outputs.
"""
if self._should_call_predict_batch_hooks:
self._call_batch_hook(ModeKeys.PREDICT, 'begin', batch, logs=logs)
def on_predict_batch_end(self, batch, logs=None):
"""Calls the `on_predict_batch_end` methods of its callbacks.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict. Aggregated metric results up until this batch.
"""
if self._should_call_predict_batch_hooks:
self._call_batch_hook(ModeKeys.PREDICT, 'end', batch, logs=logs)
def on_train_begin(self, logs=None):
"""Calls the `on_train_begin` methods of its callbacks.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
logs = logs or {}
numpy_logs = None
for callback in self.callbacks:
if getattr(callback, '_supports_tf_logs', False):
callback.on_train_begin(logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
callback.on_train_begin(numpy_logs)
def on_train_end(self, logs=None):
"""Calls the `on_train_end` methods of its callbacks.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
logs = logs or {}
numpy_logs = None
for callback in self.callbacks:
if getattr(callback, '_supports_tf_logs', False):
callback.on_train_end(logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
callback.on_train_end(numpy_logs)
def on_test_begin(self, logs=None):
"""Calls the `on_test_begin` methods of its callbacks.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
logs = logs or {}
numpy_logs = None
for callback in self.callbacks:
if getattr(callback, '_supports_tf_logs', False):
callback.on_test_begin(logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
callback.on_test_begin(numpy_logs)
def on_test_end(self, logs=None):
"""Calls the `on_test_end` methods of its callbacks.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
logs = logs or {}
numpy_logs = None
for callback in self.callbacks:
if getattr(callback, '_supports_tf_logs', False):
callback.on_test_end(logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
callback.on_test_end(numpy_logs)
def on_predict_begin(self, logs=None):
"""Calls the 'on_predict_begin` methods of its callbacks.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
logs = logs or {}
numpy_logs = None
for callback in self.callbacks:
if getattr(callback, '_supports_tf_logs', False):
callback.on_predict_begin(logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
callback.on_predict_begin(numpy_logs)
def on_predict_end(self, logs=None):
"""Calls the `on_predict_end` methods of its callbacks.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
logs = logs or {}
numpy_logs = None
for callback in self.callbacks:
if getattr(callback, '_supports_tf_logs', False):
callback.on_predict_end(logs)
else:
if numpy_logs is None: # Only convert once.
numpy_logs = tf_utils.to_numpy_or_python_type(logs)
callback.on_predict_end(numpy_logs)
def __iter__(self):
return iter(self.callbacks)
@keras_export('keras.callbacks.Callback')
class Callback(object):
"""Abstract base class used to build new callbacks.
Attributes:
params: Dict. Training parameters
(eg. verbosity, batch size, number of epochs...).
model: Instance of `keras.models.Model`.
Reference of the model being trained.
The `logs` dictionary that callback methods
take as argument will contain keys for quantities relevant to
the current batch or epoch (see method-specific docstrings).
"""
def __init__(self):
self.validation_data = None # pylint: disable=g-missing-from-attributes
self.model = None
# Whether this Callback should only run on the chief worker in a
# Multi-Worker setting.
# TODO(omalleyt): Make this attr public once solution is stable.
self._chief_worker_only = None
self._supports_tf_logs = False
def set_params(self, params):
self.params = params
def set_model(self, model):
self.model = model
@doc_controls.for_subclass_implementers
@generic_utils.default
def on_batch_begin(self, batch, logs=None):
"""A backwards compatibility alias for `on_train_batch_begin`."""
@doc_controls.for_subclass_implementers
@generic_utils.default
def on_batch_end(self, batch, logs=None):
"""A backwards compatibility alias for `on_train_batch_end`."""
@doc_controls.for_subclass_implementers
def on_epoch_begin(self, epoch, logs=None):
"""Called at the start of an epoch.
Subclasses should override for any actions to run. This function should only
be called during TRAIN mode.
Arguments:
epoch: Integer, index of epoch.
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
@doc_controls.for_subclass_implementers
def on_epoch_end(self, epoch, logs=None):
"""Called at the end of an epoch.
Subclasses should override for any actions to run. This function should only
be called during TRAIN mode.
Arguments:
epoch: Integer, index of epoch.
logs: Dict, metric results for this training epoch, and for the
validation epoch if validation is performed. Validation result keys
are prefixed with `val_`.
"""
@doc_controls.for_subclass_implementers
@generic_utils.default
def on_train_batch_begin(self, batch, logs=None):
"""Called at the beginning of a training batch in `fit` methods.
Subclasses should override for any actions to run.
Note that if the `steps_per_execution` argument to `compile` in
`tf.keras.Model` is set to `N`, this method will only be called every `N`
batches.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict, contains the return value of `model.train_step`. Typically,
the values of the `Model`'s metrics are returned. Example:
`{'loss': 0.2, 'accuracy': 0.7}`.
"""
# For backwards compatibility.
self.on_batch_begin(batch, logs=logs)
@doc_controls.for_subclass_implementers
@generic_utils.default
def on_train_batch_end(self, batch, logs=None):
"""Called at the end of a training batch in `fit` methods.
Subclasses should override for any actions to run.
Note that if the `steps_per_execution` argument to `compile` in
`tf.keras.Model` is set to `N`, this method will only be called every `N`
batches.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict. Aggregated metric results up until this batch.
"""
# For backwards compatibility.
self.on_batch_end(batch, logs=logs)
@doc_controls.for_subclass_implementers
@generic_utils.default
def on_test_batch_begin(self, batch, logs=None):
"""Called at the beginning of a batch in `evaluate` methods.
Also called at the beginning of a validation batch in the `fit`
methods, if validation data is provided.
Subclasses should override for any actions to run.
Note that if the `steps_per_execution` argument to `compile` in
`tf.keras.Model` is set to `N`, this method will only be called every `N`
batches.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict, contains the return value of `model.test_step`. Typically,
the values of the `Model`'s metrics are returned. Example:
`{'loss': 0.2, 'accuracy': 0.7}`.
"""
@doc_controls.for_subclass_implementers
@generic_utils.default
def on_test_batch_end(self, batch, logs=None):
"""Called at the end of a batch in `evaluate` methods.
Also called at the end of a validation batch in the `fit`
methods, if validation data is provided.
Subclasses should override for any actions to run.
Note that if the `steps_per_execution` argument to `compile` in
`tf.keras.Model` is set to `N`, this method will only be called every `N`
batches.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict. Aggregated metric results up until this batch.
"""
@doc_controls.for_subclass_implementers
@generic_utils.default
def on_predict_batch_begin(self, batch, logs=None):
"""Called at the beginning of a batch in `predict` methods.
Subclasses should override for any actions to run.
Note that if the `steps_per_execution` argument to `compile` in
`tf.keras.Model` is set to `N`, this method will only be called every `N`
batches.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict, contains the return value of `model.predict_step`,
it typically returns a dict with a key 'outputs' containing
the model's outputs.
"""
@doc_controls.for_subclass_implementers
@generic_utils.default
def on_predict_batch_end(self, batch, logs=None):
"""Called at the end of a batch in `predict` methods.
Subclasses should override for any actions to run.
Note that if the `steps_per_execution` argument to `compile` in
`tf.keras.Model` is set to `N`, this method will only be called every `N`
batches.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict. Aggregated metric results up until this batch.
"""
@doc_controls.for_subclass_implementers
def on_train_begin(self, logs=None):
"""Called at the beginning of training.
Subclasses should override for any actions to run.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
@doc_controls.for_subclass_implementers
def on_train_end(self, logs=None):
"""Called at the end of training.
Subclasses should override for any actions to run.
Arguments:
logs: Dict. Currently the output of the last call to `on_epoch_end()`
is passed to this argument for this method but that may change in
the future.
"""
@doc_controls.for_subclass_implementers
def on_test_begin(self, logs=None):
"""Called at the beginning of evaluation or validation.
Subclasses should override for any actions to run.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
@doc_controls.for_subclass_implementers
def on_test_end(self, logs=None):
"""Called at the end of evaluation or validation.
Subclasses should override for any actions to run.
Arguments:
logs: Dict. Currently the output of the last call to
`on_test_batch_end()` is passed to this argument for this method
but that may change in the future.
"""
@doc_controls.for_subclass_implementers
def on_predict_begin(self, logs=None):
"""Called at the beginning of prediction.
Subclasses should override for any actions to run.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
@doc_controls.for_subclass_implementers
def on_predict_end(self, logs=None):
"""Called at the end of prediction.
Subclasses should override for any actions to run.
Arguments:
logs: Dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
def _implements_train_batch_hooks(self):
"""Determines if this Callback should be called for each train batch."""
return (not generic_utils.is_default(self.on_batch_begin) or
not generic_utils.is_default(self.on_batch_end) or
not generic_utils.is_default(self.on_train_batch_begin) or
not generic_utils.is_default(self.on_train_batch_end))
def _implements_test_batch_hooks(self):
"""Determines if this Callback should be called for each test batch."""
return (not generic_utils.is_default(self.on_test_batch_begin) or
not generic_utils.is_default(self.on_test_batch_end))
def _implements_predict_batch_hooks(self):
"""Determines if this Callback should be called for each predict batch."""
return (not generic_utils.is_default(self.on_predict_batch_begin) or
not generic_utils.is_default(self.on_predict_batch_end))
@keras_export('keras.callbacks.BaseLogger')
class BaseLogger(Callback):
"""Callback that accumulates epoch averages of metrics.
This callback is automatically applied to every Keras model.
Arguments:
stateful_metrics: Iterable of string names of metrics that
should *not* be averaged over an epoch.
Metrics in this list will be logged as-is in `on_epoch_end`.
All others will be averaged in `on_epoch_end`.
"""
def __init__(self, stateful_metrics=None):
super(BaseLogger, self).__init__()
self.stateful_metrics = set(stateful_metrics or [])
def on_epoch_begin(self, epoch, logs=None):
self.seen = 0
self.totals = {}
def on_batch_end(self, batch, logs=None):
logs = logs or {}
batch_size = logs.get('size', 0)
# In case of distribution strategy we can potentially run multiple steps
# at the same time, we should account for that in the `seen` calculation.
num_steps = logs.get('num_steps', 1)
self.seen += batch_size * num_steps
for k, v in logs.items():
if k in self.stateful_metrics:
self.totals[k] = v
else:
if k in self.totals:
self.totals[k] += v * batch_size
else:
self.totals[k] = v * batch_size
def on_epoch_end(self, epoch, logs=None):
if logs is not None:
for k in self.params['metrics']:
if k in self.totals:
# Make value available to next callbacks.
if k in self.stateful_metrics:
logs[k] = self.totals[k]
else:
logs[k] = self.totals[k] / self.seen
@keras_export('keras.callbacks.TerminateOnNaN')
class TerminateOnNaN(Callback):
"""Callback that terminates training when a NaN loss is encountered.
"""
def __init__(self):
super(TerminateOnNaN, self).__init__()
self._supports_tf_logs = True
def on_batch_end(self, batch, logs=None):
logs = logs or {}
loss = logs.get('loss')
if loss is not None:
loss = tf_utils.to_numpy_or_python_type(loss)
if np.isnan(loss) or np.isinf(loss):
print('Batch %d: Invalid loss, terminating training' % (batch))
self.model.stop_training = True
@keras_export('keras.callbacks.ProgbarLogger')
class ProgbarLogger(Callback):
"""Callback that prints metrics to stdout.
Arguments:
count_mode: One of `"steps"` or `"samples"`.
Whether the progress bar should
count samples seen or steps (batches) seen.
stateful_metrics: Iterable of string names of metrics that
should *not* be averaged over an epoch.
Metrics in this list will be logged as-is.
All others will be averaged over time (e.g. loss, etc).
If not provided, defaults to the `Model`'s metrics.
Raises:
ValueError: In case of invalid `count_mode`.
"""
def __init__(self, count_mode='samples', stateful_metrics=None):
super(ProgbarLogger, self).__init__()
self._supports_tf_logs = True
if count_mode == 'samples':
self.use_steps = False
elif count_mode == 'steps':
self.use_steps = True
else:
raise ValueError('Unknown `count_mode`: ' + str(count_mode))
# Defaults to all Model's metrics except for loss.
self.stateful_metrics = set(stateful_metrics) if stateful_metrics else None
self.seen = 0
self.progbar = None
self.target = None
self.verbose = 1
self.epochs = 1
self._train_step, self._test_step, self._predict_step = None, None, None
self._call_batch_hooks = True
self._called_in_fit = False
def set_params(self, params):
self.verbose = params['verbose']
self.epochs = params['epochs']
if self.use_steps and 'steps' in params:
self.target = params['steps']
elif not self.use_steps and 'samples' in params:
self.target = params['samples']
else:
self.target = None # Will be inferred at the end of the first epoch.
self._call_batch_hooks = self.verbose == 1
if self.target is None:
try:
self._train_step = self.model._train_counter # pylint: disable=protected-access
self._test_step = self.model._test_counter # pylint: disable=protected-access
self._predict_step = self.model._predict_counter # pylint: disable=protected-access
except AttributeError:
self._call_batch_hooks = True
def on_train_begin(self, logs=None):
# When this logger is called inside `fit`, validation is silent.
self._called_in_fit = True
def on_test_begin(self, logs=None):
if not self._called_in_fit:
self._reset_progbar()
self._maybe_init_progbar()
def on_predict_begin(self, logs=None):
self._reset_progbar()
self._maybe_init_progbar()
def on_epoch_begin(self, epoch, logs=None):
self._reset_progbar()
self._maybe_init_progbar()
if self.verbose and self.epochs > 1:
print('Epoch %d/%d' % (epoch + 1, self.epochs))
def on_train_batch_end(self, batch, logs=None):
self._batch_update_progbar(batch, logs)
def on_test_batch_end(self, batch, logs=None):
if not self._called_in_fit:
self._batch_update_progbar(batch, logs)
def on_predict_batch_end(self, batch, logs=None):
# Don't pass prediction results.
self._batch_update_progbar(batch, None)
def on_epoch_end(self, epoch, logs=None):
self._finalize_progbar(logs, self._train_step)
def on_test_end(self, logs=None):
if not self._called_in_fit:
self._finalize_progbar(logs, self._test_step)
def on_predict_end(self, logs=None):
self._finalize_progbar(logs, self._predict_step)
def _reset_progbar(self):
self.seen = 0
self.progbar = None
def _maybe_init_progbar(self):
if self.stateful_metrics is None:
if self.model:
self.stateful_metrics = set(m.name for m in self.model.metrics)
else:
self.stateful_metrics = set()
if self.progbar is None:
self.progbar = Progbar(
target=self.target,
verbose=self.verbose,
stateful_metrics=self.stateful_metrics,
unit_name='step' if self.use_steps else 'sample')
def _implements_train_batch_hooks(self):
return self._call_batch_hooks
def _implements_test_batch_hooks(self):
return self._call_batch_hooks
def _implements_predict_batch_hooks(self):
return self._call_batch_hooks
def _batch_update_progbar(self, batch, logs=None):
"""Updates the progbar."""
logs = logs or {}
self._maybe_init_progbar()
if self.use_steps:
self.seen = batch + 1 # One-indexed.
else:
# v1 path only.
logs = copy.copy(logs)
batch_size = logs.pop('size', 0)
num_steps = logs.pop('num_steps', 1)
logs.pop('batch', None)
add_seen = num_steps * batch_size
self.seen += add_seen
if self.verbose == 1:
# Only block async when verbose = 1.
logs = tf_utils.to_numpy_or_python_type(logs)
self.progbar.update(self.seen, list(logs.items()), finalize=False)
def _finalize_progbar(self, logs, counter):
logs = tf_utils.to_numpy_or_python_type(logs or {})
if self.target is None:
if counter is not None:
counter = counter.numpy()
if not self.use_steps:
counter *= logs.get('size', 1)
self.target = counter or self.seen
self.progbar.target = self.target
self.progbar.update(self.target, list(logs.items()), finalize=True)
@keras_export('keras.callbacks.History')
class History(Callback):
"""Callback that records events into a `History` object.
This callback is automatically applied to
every Keras model. The `History` object
gets returned by the `fit` method of models.
"""
def __init__(self):
super(History, self).__init__()
self.history = {}
def on_train_begin(self, logs=None):
self.epoch = []
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
self.epoch.append(epoch)
for k, v in logs.items():
self.history.setdefault(k, []).append(v)
# Set the history attribute on the model after the epoch ends. This will
# make sure that the state which is set is the latest one.
self.model.history = self
@keras_export('keras.callbacks.ModelCheckpoint')
class ModelCheckpoint(Callback):
"""Callback to save the Keras model or model weights at some frequency.
`ModelCheckpoint` callback is used in conjunction with training using
`model.fit()` to save a model or weights (in a checkpoint file) at some
interval, so the model or weights can be loaded later to continue the training
from the state saved.
A few options this callback provides include:
- Whether to only keep the model that has achieved the "best performance" so
far, or whether to save the model at the end of every epoch regardless of
performance.
- Definition of 'best'; which quantity to monitor and whether it should be
maximized or minimized.
- The frequency it should save at. Currently, the callback supports saving at
the end of every epoch, or after a fixed number of training batches.
- Whether only weights are saved, or the whole model is saved.
Example:
```python
EPOCHS = 10
checkpoint_filepath = '/tmp/checkpoint'
model_checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=checkpoint_filepath,
save_weights_only=True,
monitor='val_acc',
mode='max',
save_best_only=True)
# Model weights are saved at the end of every epoch, if it's the best seen
# so far.
model.fit(epochs=EPOCHS, callbacks=[model_checkpoint_callback])
# The model weights (that are considered the best) are loaded into the model.
model.load_weights(checkpoint_filepath)
```
Arguments:
filepath: string or `PathLike`, path to save the model file. `filepath`
can contain named formatting options, which will be filled the value of
`epoch` and keys in `logs` (passed in `on_epoch_end`). For example: if
`filepath` is `weights.{epoch:02d}-{val_loss:.2f}.hdf5`, then the model
checkpoints will be saved with the epoch number and the validation loss
in the filename.
monitor: quantity to monitor.
verbose: verbosity mode, 0 or 1.
save_best_only: if `save_best_only=True`, the latest best model according
to the quantity monitored will not be overwritten.
If `filepath` doesn't contain formatting options like `{epoch}` then
`filepath` will be overwritten by each new better model.
mode: one of {auto, min, max}. If `save_best_only=True`, the decision to
overwrite the current save file is made based on either the maximization
or the minimization of the monitored quantity. For `val_acc`, this
should be `max`, for `val_loss` this should be `min`, etc. In `auto`
mode, the direction is automatically inferred from the name of the
monitored quantity.
save_weights_only: if True, then only the model's weights will be saved
(`model.save_weights(filepath)`), else the full model is saved
(`model.save(filepath)`).
save_freq: `'epoch'` or integer. When using `'epoch'`, the callback saves
the model after each epoch. When using integer, the callback saves the
model at end of this many batches. If the `Model` is compiled with
`steps_per_execution=N`, then the saving criteria will be
checked every Nth batch. Note that if the saving isn't aligned to
epochs, the monitored metric may potentially be less reliable (it
could reflect as little as 1 batch, since the metrics get reset every
epoch). Defaults to `'epoch'`.
options: Optional `tf.train.CheckpointOptions` object if
`save_weights_only` is true or optional `tf.saved_model.SavedOptions`
object if `save_weights_only` is false.
**kwargs: Additional arguments for backwards compatibility. Possible key
is `period`.
"""
def __init__(self,
filepath,
monitor='val_loss',
verbose=0,
save_best_only=False,
save_weights_only=False,
mode='auto',
save_freq='epoch',
options=None,
**kwargs):
super(ModelCheckpoint, self).__init__()
self._supports_tf_logs = True
self.monitor = monitor
self.verbose = verbose
self.filepath = path_to_string(filepath)
self.save_best_only = save_best_only
self.save_weights_only = save_weights_only
self.save_freq = save_freq
self.epochs_since_last_save = 0
self._batches_seen_since_last_saving = 0
self._last_batch_seen = 0
if save_weights_only:
if options is None or isinstance(
options, checkpoint_options_lib.CheckpointOptions):
self._options = options or checkpoint_options_lib.CheckpointOptions()
else:
raise TypeError('If save_weights_only is True, then `options` must be'
'either None or a tf.train.CheckpointOptions')
else:
if options is None or isinstance(options, save_options_lib.SaveOptions):
self._options = options or save_options_lib.SaveOptions()
else:
raise TypeError('If save_weights_only is False, then `options` must be'
'either None or a tf.saved_model.SaveOptions')
# Deprecated field `load_weights_on_restart` is for loading the checkpoint
# file from `filepath` at the start of `model.fit()`
# TODO(rchao): Remove the arg during next breaking release.
if 'load_weights_on_restart' in kwargs:
self.load_weights_on_restart = kwargs['load_weights_on_restart']
logging.warning('`load_weights_on_restart` argument is deprecated. '
'Please use `model.load_weights()` for loading weights '
'before the start of `model.fit()`.')
else:
self.load_weights_on_restart = False
# Deprecated field `period` is for the number of epochs between which
# the model is saved.
if 'period' in kwargs:
self.period = kwargs['period']
logging.warning('`period` argument is deprecated. Please use `save_freq` '
'to specify the frequency in number of batches seen.')
else:
self.period = 1
if mode not in ['auto', 'min', 'max']:
logging.warning('ModelCheckpoint mode %s is unknown, '
'fallback to auto mode.', mode)
mode = 'auto'
if mode == 'min':
self.monitor_op = np.less
self.best = np.Inf
elif mode == 'max':
self.monitor_op = np.greater
self.best = -np.Inf
else:
if 'acc' in self.monitor or self.monitor.startswith('fmeasure'):
self.monitor_op = np.greater
self.best = -np.Inf
else:
self.monitor_op = np.less
self.best = np.Inf
if self.save_freq != 'epoch' and not isinstance(self.save_freq, int):
raise ValueError('Unrecognized save_freq: {}'.format(self.save_freq))
# Only the chief worker writes model checkpoints, but all workers
# restore checkpoint at on_train_begin().
self._chief_worker_only = False
def set_model(self, model):
self.model = model
# Use name matching rather than `isinstance` to avoid circular dependencies.
if (not self.save_weights_only and
not model._is_graph_network and # pylint: disable=protected-access
model.__class__.__name__ != 'Sequential'):
self.save_weights_only = True
def on_train_begin(self, logs=None):
if self.load_weights_on_restart:
filepath_to_load = (
self._get_most_recently_modified_file_matching_pattern(self.filepath))
if (filepath_to_load is not None and
self._checkpoint_exists(filepath_to_load)):
try:
# `filepath` may contain placeholders such as `{epoch:02d}`, and
# thus it attempts to load the most recently modified file with file
# name matching the pattern.
self.model.load_weights(filepath_to_load)
except (IOError, ValueError) as e:
raise ValueError('Error loading file from {}. Reason: {}'.format(
filepath_to_load, e))
def _implements_train_batch_hooks(self):
# Only call batch hooks when saving on batch
return self.save_freq != 'epoch'
def on_train_batch_end(self, batch, logs=None):
if self._should_save_on_batch(batch):
self._save_model(epoch=self._current_epoch, logs=logs)
def on_epoch_begin(self, epoch, logs=None):
self._current_epoch = epoch
def on_epoch_end(self, epoch, logs=None):
self.epochs_since_last_save += 1
# pylint: disable=protected-access
if self.save_freq == 'epoch':
self._save_model(epoch=epoch, logs=logs)
def _should_save_on_batch(self, batch):
"""Handles batch-level saving logic, supports steps_per_execution."""
if self.save_freq == 'epoch':
return False
if batch <= self._last_batch_seen: # New epoch.
add_batches = batch + 1 # batches are zero-indexed.
else:
add_batches = batch - self._last_batch_seen
self._batches_seen_since_last_saving += add_batches
self._last_batch_seen = batch
if self._batches_seen_since_last_saving >= self.save_freq:
self._batches_seen_since_last_saving = 0
return True
return False
def _save_model(self, epoch, logs):
"""Saves the model.
Arguments:
epoch: the epoch this iteration is in.
logs: the `logs` dict passed in to `on_batch_end` or `on_epoch_end`.
"""
logs = logs or {}
if isinstance(self.save_freq,
int) or self.epochs_since_last_save >= self.period:
# Block only when saving interval is reached.
logs = tf_utils.to_numpy_or_python_type(logs)
self.epochs_since_last_save = 0
filepath = self._get_file_path(epoch, logs)
try:
if self.save_best_only:
current = logs.get(self.monitor)
if current is None:
logging.warning('Can save best model only with %s available, '
'skipping.', self.monitor)
else:
if self.monitor_op(current, self.best):
if self.verbose > 0:
print('\nEpoch %05d: %s improved from %0.5f to %0.5f,'
' saving model to %s' % (epoch + 1, self.monitor,
self.best, current, filepath))
self.best = current
if self.save_weights_only:
self.model.save_weights(
filepath, overwrite=True, options=self._options)
else:
self.model.save(filepath, overwrite=True, options=self._options)
else:
if self.verbose > 0:
print('\nEpoch %05d: %s did not improve from %0.5f' %
(epoch + 1, self.monitor, self.best))
else:
if self.verbose > 0:
print('\nEpoch %05d: saving model to %s' % (epoch + 1, filepath))
if self.save_weights_only:
self.model.save_weights(
filepath, overwrite=True, options=self._options)
else:
self.model.save(filepath, overwrite=True, options=self._options)
self._maybe_remove_file()
except IOError as e:
# `e.errno` appears to be `None` so checking the content of `e.args[0]`.
if 'is a directory' in six.ensure_str(e.args[0]).lower():
raise IOError('Please specify a non-directory filepath for '
'ModelCheckpoint. Filepath used is an existing '
'directory: {}'.format(filepath))
# Re-throw the error for any other causes.
raise e
def _get_file_path(self, epoch, logs):
"""Returns the file path for checkpoint."""
# pylint: disable=protected-access
try:
# `filepath` may contain placeholders such as `{epoch:02d}` and
# `{mape:.2f}`. A mismatch between logged metrics and the path's
# placeholders can cause formatting to fail.
file_path = self.filepath.format(epoch=epoch + 1, **logs)
except KeyError as e:
raise KeyError('Failed to format this callback filepath: "{}". '
'Reason: {}'.format(self.filepath, e))
self._write_filepath = distributed_file_utils.write_filepath(
file_path, self.model.distribute_strategy)
return self._write_filepath
def _maybe_remove_file(self):
# Remove the checkpoint directory in multi-worker training where this worker
# should not checkpoint. It is a dummy directory previously saved for sync
# distributed training.
distributed_file_utils.remove_temp_dir_with_filepath(
self._write_filepath, self.model.distribute_strategy)
def _checkpoint_exists(self, filepath):
"""Returns whether the checkpoint `filepath` refers to exists."""
if filepath.endswith('.h5'):
return file_io.file_exists_v2(filepath)
tf_saved_model_exists = file_io.file_exists_v2(filepath)
tf_weights_only_checkpoint_exists = file_io.file_exists_v2(
filepath + '.index')
return tf_saved_model_exists or tf_weights_only_checkpoint_exists
def _get_most_recently_modified_file_matching_pattern(self, pattern):
"""Returns the most recently modified filepath matching pattern.
Pattern may contain python formatting placeholder. If
`tf.train.latest_checkpoint()` does not return None, use that; otherwise,
check for most recently modified one that matches the pattern.
In the rare case where there are more than one pattern-matching file having
the same modified time that is most recent among all, return the filepath
that is largest (by `>` operator, lexicographically using the numeric
equivalents). This provides a tie-breaker when multiple files are most
recent. Note that a larger `filepath` can sometimes indicate a later time of
modification (for instance, when epoch/batch is used as formatting option),
but not necessarily (when accuracy or loss is used). The tie-breaker is
put in the logic as best effort to return the most recent, and to avoid
undeterministic result.
Modified time of a file is obtained with `os.path.getmtime()`.
This utility function is best demonstrated via an example:
```python
file_pattern = 'f.batch{batch:02d}epoch{epoch:02d}.h5'
test_dir = self.get_temp_dir()
path_pattern = os.path.join(test_dir, file_pattern)
file_paths = [
os.path.join(test_dir, file_name) for file_name in
['f.batch03epoch02.h5', 'f.batch02epoch02.h5', 'f.batch01epoch01.h5']
]
for file_path in file_paths:
# Write something to each of the files
self.assertEqual(
_get_most_recently_modified_file_matching_pattern(path_pattern),
file_paths[-1])
```
Arguments:
pattern: The file pattern that may optionally contain python placeholder
such as `{epoch:02d}`.
Returns:
The most recently modified file's full filepath matching `pattern`. If
`pattern` does not contain any placeholder, this returns the filepath
that
exactly matches `pattern`. Returns `None` if no match is found.
"""
dir_name = os.path.dirname(pattern)
base_name = os.path.basename(pattern)
base_name_regex = '^' + re.sub(r'{.*}', r'.*', base_name) + '$'
# If tf.train.latest_checkpoint tells us there exists a latest checkpoint,
# use that as it is more robust than `os.path.getmtime()`.
latest_tf_checkpoint = checkpoint_management.latest_checkpoint(dir_name)
if latest_tf_checkpoint is not None and re.match(
base_name_regex, os.path.basename(latest_tf_checkpoint)):
return latest_tf_checkpoint
latest_mod_time = 0
file_path_with_latest_mod_time = None
n_file_with_latest_mod_time = 0
file_path_with_largest_file_name = None
if file_io.file_exists_v2(dir_name):
for file_name in os.listdir(dir_name):
# Only consider if `file_name` matches the pattern.
if re.match(base_name_regex, file_name):
file_path = os.path.join(dir_name, file_name)
mod_time = os.path.getmtime(file_path)
if (file_path_with_largest_file_name is None or
file_path > file_path_with_largest_file_name):
file_path_with_largest_file_name = file_path
if mod_time > latest_mod_time:
latest_mod_time = mod_time
file_path_with_latest_mod_time = file_path
# In the case a file with later modified time is found, reset
# the counter for the number of files with latest modified time.
n_file_with_latest_mod_time = 1
elif mod_time == latest_mod_time:
# In the case a file has modified time tied with the most recent,
# increment the counter for the number of files with latest modified
# time by 1.
n_file_with_latest_mod_time += 1
if n_file_with_latest_mod_time == 1:
# Return the sole file that has most recent modified time.
return file_path_with_latest_mod_time
else:
# If there are more than one file having latest modified time, return
# the file path with the largest file name.
return file_path_with_largest_file_name
@keras_export('keras.callbacks.experimental.BackupAndRestore', v1=[])
class BackupAndRestore(Callback):
"""Callback to back up and restore the training state.
`BackupAndRestore` callback is intended to recover from interruptions that
happened in the middle of a model.fit execution by backing up the
training states in a temporary checkpoint file (based on TF CheckpointManager)
at the end of each epoch. If training restarted before completion, the
training state and model are restored to the most recently saved state at the
beginning of a new model.fit() run.
Note that user is responsible to bring jobs back up.
This callback is important for the backup and restore mechanism for fault
tolerance purpose. And the model to be restored from an previous checkpoint is
expected to be the same as the one used to back up. If user changes arguments
passed to compile or fit, the checkpoint saved for fault tolerance can become
invalid.
Note:
1. This callback is not compatible with disabling eager execution.
2. A checkpoint is saved at the end of each epoch, when restoring we'll redo
any partial work from an unfinished epoch in which the training got restarted
(so the work done before a interruption doesn't affect the final model state).
3. This works for both single worker and multi-worker mode, only
MirroredStrategy and MultiWorkerMirroredStrategy are supported for now.
Example:
>>> class InterruptingCallback(tf.keras.callbacks.Callback):
... def on_epoch_begin(self, epoch, logs=None):
... if epoch == 4:
... raise RuntimeError('Interrupting!')
>>> callback = tf.keras.callbacks.experimental.BackupAndRestore(
... backup_dir="/tmp")
>>> model = tf.keras.models.Sequential([tf.keras.layers.Dense(10)])
>>> model.compile(tf.keras.optimizers.SGD(), loss='mse')
>>> try:
... model.fit(np.arange(100).reshape(5, 20), np.zeros(5), epochs=10,
... batch_size=1, callbacks=[callback, InterruptingCallback()],
... verbose=0)
... except:
... pass
>>> history = model.fit(np.arange(100).reshape(5, 20), np.zeros(5), epochs=10,
... batch_size=1, callbacks=[callback], verbose=0)
>>> # Only 6 more epochs are run, since first trainning got interrupted at
>>> # zero-indexed epoch 4, second training will continue from 4 to 9.
>>> len(history.history['loss'])
6
Arguments:
backup_dir: String, path to save the model file. This is the directory in
which the system stores temporary files to recover the model from jobs
terminated unexpectedly. The directory cannot be reused elsewhere to
store other checkpoints, e.g. by BackupAndRestore callback of another
training, or by another callback (ModelCheckpoint) of the same training.
"""
def __init__(self, backup_dir):
super(BackupAndRestore, self).__init__()
self.backup_dir = backup_dir
self._supports_tf_logs = True
self._supported_strategies = (
distribute_lib._DefaultDistributionStrategy,
mirrored_strategy.MirroredStrategy,
collective_all_reduce_strategy.CollectiveAllReduceStrategy)
if not context.executing_eagerly():
if ops.inside_function():
raise ValueError('This Callback\'s method contains Python state and '
'should be called outside of `tf.function`s.')
else: # Legacy graph mode:
raise ValueError(
'BackupAndRestore only supports eager mode. In graph '
'mode, consider using ModelCheckpoint to manually save '
'and restore weights with `model.load_weights()` and by '
'providing `initial_epoch` in `model.fit()` for fault tolerance.')
# Only the chief worker writes model checkpoints, but all workers
# restore checkpoint at on_train_begin().
self._chief_worker_only = False
def set_model(self, model):
self.model = model
def on_train_begin(self, logs=None):
# TrainingState is used to manage the training state needed for
# failure-recovery of a worker in training.
# pylint: disable=protected-access
if not isinstance(self.model.distribute_strategy,
self._supported_strategies):
raise NotImplementedError(
'Currently only support empty strategy, MirroredStrategy and '
'MultiWorkerMirroredStrategy.')
self.model._training_state = (
worker_training_state.WorkerTrainingState(self.model, self.backup_dir))
self._training_state = self.model._training_state
self._training_state.restore()
def on_train_end(self, logs=None):
# pylint: disable=protected-access
# On exit of training, delete the training state backup file that was saved
# for the purpose of worker recovery.
self._training_state.delete_backup()
# Clean up the training state.
del self._training_state
del self.model._training_state
def on_epoch_end(self, epoch, logs=None):
# Back up the model and current epoch for possible future recovery.
self._training_state.back_up(epoch)
@keras_export('keras.callbacks.EarlyStopping')
class EarlyStopping(Callback):
"""Stop training when a monitored metric has stopped improving.
Assuming the goal of a training is to minimize the loss. With this, the
metric to be monitored would be `'loss'`, and mode would be `'min'`. A
`model.fit()` training loop will check at end of every epoch whether
the loss is no longer decreasing, considering the `min_delta` and
`patience` if applicable. Once it's found no longer decreasing,
`model.stop_training` is marked True and the training terminates.
The quantity to be monitored needs to be available in `logs` dict.
To make it so, pass the loss or metrics at `model.compile()`.
Arguments:
monitor: Quantity to be monitored.
min_delta: Minimum change in the monitored quantity
to qualify as an improvement, i.e. an absolute
change of less than min_delta, will count as no
improvement.
patience: Number of epochs with no improvement
after which training will be stopped.
verbose: verbosity mode.
mode: One of `{"auto", "min", "max"}`. In `min` mode,
training will stop when the quantity
monitored has stopped decreasing; in `"max"`
mode it will stop when the quantity
monitored has stopped increasing; in `"auto"`
mode, the direction is automatically inferred
from the name of the monitored quantity.
baseline: Baseline value for the monitored quantity.
Training will stop if the model doesn't show improvement over the
baseline.
restore_best_weights: Whether to restore model weights from
the epoch with the best value of the monitored quantity.
If False, the model weights obtained at the last step of
training are used.
Example:
>>> callback = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=3)
>>> # This callback will stop the training when there is no improvement in
>>> # the validation loss for three consecutive epochs.
>>> model = tf.keras.models.Sequential([tf.keras.layers.Dense(10)])
>>> model.compile(tf.keras.optimizers.SGD(), loss='mse')
>>> history = model.fit(np.arange(100).reshape(5, 20), np.zeros(5),
... epochs=10, batch_size=1, callbacks=[callback],
... verbose=0)
>>> len(history.history['loss']) # Only 4 epochs are run.
4
"""
def __init__(self,
monitor='val_loss',
min_delta=0,
patience=0,
verbose=0,
mode='auto',
baseline=None,
restore_best_weights=False):
super(EarlyStopping, self).__init__()
self.monitor = monitor
self.patience = patience
self.verbose = verbose
self.baseline = baseline
self.min_delta = abs(min_delta)
self.wait = 0
self.stopped_epoch = 0
self.restore_best_weights = restore_best_weights
self.best_weights = None
if mode not in ['auto', 'min', 'max']:
logging.warning('EarlyStopping mode %s is unknown, '
'fallback to auto mode.', mode)
mode = 'auto'
if mode == 'min':
self.monitor_op = np.less
elif mode == 'max':
self.monitor_op = np.greater
else:
if 'acc' in self.monitor:
self.monitor_op = np.greater
else:
self.monitor_op = np.less
if self.monitor_op == np.greater:
self.min_delta *= 1
else:
self.min_delta *= -1
def on_train_begin(self, logs=None):
# Allow instances to be re-used
self.wait = 0
self.stopped_epoch = 0
if self.baseline is not None:
self.best = self.baseline
else:
self.best = np.Inf if self.monitor_op == np.less else -np.Inf
self.best_weights = None
def on_epoch_end(self, epoch, logs=None):
current = self.get_monitor_value(logs)
if current is None:
return
if self.monitor_op(current - self.min_delta, self.best):
self.best = current
self.wait = 0
if self.restore_best_weights:
self.best_weights = self.model.get_weights()
else:
self.wait += 1
if self.wait >= self.patience:
self.stopped_epoch = epoch
self.model.stop_training = True
if self.restore_best_weights:
if self.verbose > 0:
print('Restoring model weights from the end of the best epoch.')
self.model.set_weights(self.best_weights)
def on_train_end(self, logs=None):
if self.stopped_epoch > 0 and self.verbose > 0:
print('Epoch %05d: early stopping' % (self.stopped_epoch + 1))
def get_monitor_value(self, logs):
logs = logs or {}
monitor_value = logs.get(self.monitor)
if monitor_value is None:
logging.warning('Early stopping conditioned on metric `%s` '
'which is not available. Available metrics are: %s',
self.monitor, ','.join(list(logs.keys())))
return monitor_value
@keras_export('keras.callbacks.RemoteMonitor')
class RemoteMonitor(Callback):
"""Callback used to stream events to a server.
Requires the `requests` library.
Events are sent to `root + '/publish/epoch/end/'` by default. Calls are
HTTP POST, with a `data` argument which is a
JSON-encoded dictionary of event data.
If `send_as_json=True`, the content type of the request will be
`"application/json"`.
Otherwise the serialized JSON will be sent within a form.
Arguments:
root: String; root url of the target server.
path: String; path relative to `root` to which the events will be sent.
field: String; JSON field under which the data will be stored.
The field is used only if the payload is sent within a form
(i.e. send_as_json is set to False).
headers: Dictionary; optional custom HTTP headers.
send_as_json: Boolean; whether the request should be
sent as `"application/json"`.
"""
def __init__(self,
root='http://localhost:9000',
path='/publish/epoch/end/',
field='data',
headers=None,
send_as_json=False):
super(RemoteMonitor, self).__init__()
self.root = root
self.path = path
self.field = field
self.headers = headers
self.send_as_json = send_as_json
def on_epoch_end(self, epoch, logs=None):
if requests is None:
raise ImportError('RemoteMonitor requires the `requests` library.')
logs = logs or {}
send = {}
send['epoch'] = epoch
for k, v in logs.items():
# np.ndarray and np.generic are not scalar types
# therefore we must unwrap their scalar values and
# pass to the json-serializable dict 'send'
if isinstance(v, (np.ndarray, np.generic)):
send[k] = v.item()
else:
send[k] = v
try:
if self.send_as_json:
requests.post(self.root + self.path, json=send, headers=self.headers)
else:
requests.post(
self.root + self.path, {self.field: json.dumps(send)},
headers=self.headers)
except requests.exceptions.RequestException:
logging.warning('Warning: could not reach RemoteMonitor '
'root server at ' + str(self.root))
@keras_export('keras.callbacks.LearningRateScheduler')
class LearningRateScheduler(Callback):
"""Learning rate scheduler.
At the beginning of every epoch, this callback gets the updated learning rate
value from `schedule` function provided at `__init__`, with the current epoch
and current learning rate, and applies the updated learning rate
on the optimizer.
Arguments:
schedule: a function that takes an epoch index (integer, indexed from 0)
and current learning rate (float) as inputs and returns a new
learning rate as output (float).
verbose: int. 0: quiet, 1: update messages.
Example:
>>> # This function keeps the initial learning rate for the first ten epochs
>>> # and decreases it exponentially after that.
>>> def scheduler(epoch, lr):
... if epoch < 10:
... return lr
... else:
... return lr * tf.math.exp(-0.1)
>>>
>>> model = tf.keras.models.Sequential([tf.keras.layers.Dense(10)])
>>> model.compile(tf.keras.optimizers.SGD(), loss='mse')
>>> round(model.optimizer.lr.numpy(), 5)
0.01
>>> callback = tf.keras.callbacks.LearningRateScheduler(scheduler)
>>> history = model.fit(np.arange(100).reshape(5, 20), np.zeros(5),
... epochs=15, callbacks=[callback], verbose=0)
>>> round(model.optimizer.lr.numpy(), 5)
0.00607
"""
def __init__(self, schedule, verbose=0):
super(LearningRateScheduler, self).__init__()
self.schedule = schedule
self.verbose = verbose
def on_epoch_begin(self, epoch, logs=None):
if not hasattr(self.model.optimizer, 'lr'):
raise ValueError('Optimizer must have a "lr" attribute.')
try: # new API
lr = float(K.get_value(self.model.optimizer.lr))
lr = self.schedule(epoch, lr)
except TypeError: # Support for old API for backward compatibility
lr = self.schedule(epoch)
if not isinstance(lr, (ops.Tensor, float, np.float32, np.float64)):
raise ValueError('The output of the "schedule" function '
'should be float.')
if isinstance(lr, ops.Tensor) and not lr.dtype.is_floating:
raise ValueError('The dtype of Tensor should be float')
K.set_value(self.model.optimizer.lr, K.get_value(lr))
if self.verbose > 0:
print('\nEpoch %05d: LearningRateScheduler reducing learning '
'rate to %s.' % (epoch + 1, lr))
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs['lr'] = K.get_value(self.model.optimizer.lr)
@keras_export('keras.callbacks.TensorBoard', v1=[])
class TensorBoard(Callback, version_utils.TensorBoardVersionSelector):
# pylint: disable=line-too-long
"""Enable visualizations for TensorBoard.
TensorBoard is a visualization tool provided with TensorFlow.
This callback logs events for TensorBoard, including:
* Metrics summary plots
* Training graph visualization
* Activation histograms
* Sampled profiling
If you have installed TensorFlow with pip, you should be able
to launch TensorBoard from the command line:
```
tensorboard --logdir=path_to_your_logs
```
You can find more information about TensorBoard
[here](https://www.tensorflow.org/get_started/summaries_and_tensorboard).
Example (Basic):
```python
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir="./logs")
model.fit(x_train, y_train, epochs=2, callbacks=[tensorboard_callback])
# run the tensorboard command to view the visualizations.
```
Example (Profile):
```python
# profile a single batch, e.g. the 5th batch.
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir='./logs',
profile_batch=5)
model.fit(x_train, y_train, epochs=2, callbacks=[tensorboard_callback])
# Now run the tensorboard command to view the visualizations (profile plugin).
# profile a range of batches, e.g. from 10 to 20.
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir='./logs',
profile_batch='10,20')
model.fit(x_train, y_train, epochs=2, callbacks=[tensorboard_callback])
# Now run the tensorboard command to view the visualizations (profile plugin).
```
Arguments:
log_dir: the path of the directory where to save the log files to be
parsed by TensorBoard.
histogram_freq: frequency (in epochs) at which to compute activation and
weight histograms for the layers of the model. If set to 0, histograms
won't be computed. Validation data (or split) must be specified for
histogram visualizations.
write_graph: whether to visualize the graph in TensorBoard. The log file
can become quite large when write_graph is set to True.
write_images: whether to write model weights to visualize as image in
TensorBoard.
update_freq: `'batch'` or `'epoch'` or integer. When using `'batch'`,
writes the losses and metrics to TensorBoard after each batch. The same
applies for `'epoch'`. If using an integer, let's say `1000`, the
callback will write the metrics and losses to TensorBoard every 1000
batches. Note that writing too frequently to TensorBoard can slow down
your training.
profile_batch: Profile the batch(es) to sample compute characteristics.
profile_batch must be a non-negative integer or a tuple of integers.
A pair of positive integers signify a range of batches to profile.
By default, it will profile the second batch. Set profile_batch=0
to disable profiling.
embeddings_freq: frequency (in epochs) at which embedding layers will be
visualized. If set to 0, embeddings won't be visualized.
embeddings_metadata: a dictionary which maps layer name to a file name in
which metadata for this embedding layer is saved. See the
[details](
https://www.tensorflow.org/how_tos/embedding_viz/#metadata_optional)
about metadata files format. In case if the same metadata file is
used for all embedding layers, string can be passed.
Raises:
ValueError: If histogram_freq is set and no validation data is provided.
"""
# pylint: enable=line-too-long
def __init__(self,
log_dir='logs',
histogram_freq=0,
write_graph=True,
write_images=False,
update_freq='epoch',
profile_batch=2,
embeddings_freq=0,
embeddings_metadata=None,
**kwargs):
super(TensorBoard, self).__init__()
self._supports_tf_logs = True
self._validate_kwargs(kwargs)
self.log_dir = path_to_string(log_dir)
self.histogram_freq = histogram_freq
self.write_graph = write_graph
self.write_images = write_images
self.update_freq = 1 if update_freq == 'batch' else update_freq
self.embeddings_freq = embeddings_freq
self.embeddings_metadata = embeddings_metadata
self._init_profile_batch(profile_batch)
self._epoch = 0
self._global_train_batch = 0
# Lazily initialized in order to avoid creating event files when
# not needed.
self._writers = {}
# Used to restore any existing `SummaryWriter` after training ends.
self._prev_summary_state = []
def _validate_kwargs(self, kwargs):
"""Handle arguments were supported in V1."""
if kwargs.get('write_grads', False):
logging.warning('`write_grads` will be ignored in TensorFlow 2.0 '
'for the `TensorBoard` Callback.')
if kwargs.get('batch_size', False):
logging.warning('`batch_size` is no longer needed in the '
'`TensorBoard` Callback and will be ignored '
'in TensorFlow 2.0.')
if kwargs.get('embeddings_layer_names', False):
logging.warning('`embeddings_layer_names` is not supported in '
'TensorFlow 2.0. Instead, all `Embedding` layers '
'will be visualized.')
if kwargs.get('embeddings_data', False):
logging.warning('`embeddings_data` is not supported in TensorFlow '
'2.0. Instead, all `Embedding` variables will be '
'visualized.')
unrecognized_kwargs = set(kwargs.keys()) - {
'write_grads', 'embeddings_layer_names', 'embeddings_data', 'batch_size'
}
# Only allow kwargs that were supported in V1.
if unrecognized_kwargs:
raise ValueError('Unrecognized arguments in `TensorBoard` '
'Callback: ' + str(unrecognized_kwargs))
def set_model(self, model):
"""Sets Keras model and writes graph if specified."""
self.model = model
self._log_write_dir = self._get_log_write_dir()
self._train_dir = os.path.join(self._log_write_dir, 'train')
self._train_step = self.model._train_counter # pylint: disable=protected-access
self._val_dir = os.path.join(self._log_write_dir, 'validation')
self._val_step = self.model._test_counter # pylint: disable=protected-access
self._writers = {} # Resets writers.
self._should_write_train_graph = False
if self.write_graph:
self._write_keras_model_summary()
self._should_write_train_graph = True
if self.embeddings_freq:
self._configure_embeddings()
@property
def _train_writer(self):
if 'train' not in self._writers:
self._writers['train'] = summary_ops_v2.create_file_writer_v2(
self._train_dir)
return self._writers['train']
@property
def _val_writer(self):
if 'val' not in self._writers:
self._writers['val'] = summary_ops_v2.create_file_writer_v2(self._val_dir)
return self._writers['val']
def _get_log_write_dir(self):
"""For multi-worker, only chief should write, others write to '/tmp'."""
return distributed_file_utils.write_dirpath(self.log_dir,
self.model.distribute_strategy)
def _delete_tmp_write_dir(self):
"""Deletes tmp write directories for multi-worker."""
distributed_file_utils.remove_temp_dirpath(self.log_dir,
self.model.distribute_strategy)
def _write_keras_model_train_graph(self):
"""Writes Keras model train_function graph to TensorBoard."""
with self._train_writer.as_default():
with summary_ops_v2.always_record_summaries():
train_fn = self.model.train_function
# If the train_function is a `tf.function`, we can write out a graph
if hasattr(train_fn, 'function_spec'):
summary_ops_v2.graph(train_fn._concrete_stateful_fn.graph, step=0) # pylint: disable=protected-access
def _write_keras_model_summary(self):
"""Writes Keras graph network summary to TensorBoard."""
with self._train_writer.as_default():
with summary_ops_v2.always_record_summaries():
summary_writable = (
self.model._is_graph_network or # pylint: disable=protected-access
self.model.__class__.__name__ == 'Sequential') # pylint: disable=protected-access
if summary_writable:
summary_ops_v2.keras_model('keras', self.model, step=0)
def _configure_embeddings(self):
"""Configure the Projector for embeddings."""
# TODO(omalleyt): Add integration tests.
from google.protobuf import text_format
from tensorflow.python.keras.layers import embeddings
from tensorflow.python.keras.protobuf import projector_config_pb2
config = projector_config_pb2.ProjectorConfig()
for layer in self.model.layers:
if isinstance(layer, embeddings.Embedding):
embedding = config.embeddings.add()
# Embeddings are always the first layer, so this naming should be
# consistent in any keras models checkpoints.
name = 'layer_with_weights-0/embeddings/.ATTRIBUTES/VARIABLE_VALUE'
embedding.tensor_name = name
if self.embeddings_metadata is not None:
if isinstance(self.embeddings_metadata, str):
embedding.metadata_path = self.embeddings_metadata
else:
if layer.name in self.embeddings_metadata.keys():
embedding.metadata_path = self.embeddings_metadata.pop(layer.name)
if self.embeddings_metadata and not isinstance(self.embeddings_metadata,
str):
raise ValueError('Unrecognized `Embedding` layer names passed to '
'`keras.callbacks.TensorBoard` `embeddings_metadata` '
'argument: ' + str(self.embeddings_metadata.keys()))
config_pbtxt = text_format.MessageToString(config)
path = os.path.join(self._log_write_dir, 'projector_config.pbtxt')
with open(path, 'w') as f:
f.write(config_pbtxt)
def _push_writer(self, writer, step):
"""Sets the default writer for custom batch-level summaries."""
if self.update_freq == 'epoch':
return
summary_state = summary_ops_v2._summary_state # pylint: disable=protected-access
self._prev_summary_state.append({
'is_recording': summary_state.is_recording,
'writer': summary_state.writer,
'step': summary_state.step
})
if self.update_freq == 'epoch':
should_record = False
writer = None
else:
should_record = lambda: math_ops.equal(step % self.update_freq, 0)
summary_state.is_recording = should_record
summary_state.writer = writer
# TODO(b/151339474): Fix deadlock when not using .value() here.
summary_ops_v2.set_step(step.value())
def _pop_writer(self):
"""Pops the current writer."""
if self.update_freq == 'epoch':
return
prev_state = self._prev_summary_state.pop()
summary_state = summary_ops_v2._summary_state # pylint: disable=protected-access
summary_state.is_recording = prev_state['is_recording']
summary_state.writer = prev_state['writer']
summary_ops_v2.set_step(prev_state['step'])
def _close_writers(self):
for writer in self._writers.values():
writer.close()
def _init_profile_batch(self, profile_batch):
"""Validate profile_batch value and set the range of batches to profile.
Arguments:
profile_batch: The range of batches to profile. Should be a non-negative
integer or a comma separated string of pair of positive integers. A pair
of positive integers signify a range of batches to profile.
Returns:
A pair of non-negative integers specifying the start and stop batch to
profile.
Raises:
ValueError: If profile_batch is not an integer or a comma seperated pair
of positive integers.
"""
profile_batch_error_message = (
'profile_batch must be a non-negative integer or 2-tuple of positive '
'integers. A pair of positive integers signifies a range of batches '
'to profile. Found: {}'.format(profile_batch))
# Support legacy way of specifying "start,stop" or "start" as str.
if isinstance(profile_batch, six.string_types):
profile_batch = str(profile_batch).split(',')
profile_batch = nest.map_structure(int, profile_batch)
if isinstance(profile_batch, int):
self._start_batch = profile_batch
self._stop_batch = profile_batch
elif isinstance(profile_batch, (tuple, list)) and len(profile_batch) == 2:
self._start_batch, self._stop_batch = profile_batch
else:
raise ValueError(profile_batch_error_message)
if self._start_batch < 0 or self._stop_batch < self._start_batch:
raise ValueError(profile_batch_error_message)
if self._start_batch > 0:
# Warm up and improve the profiling accuracy.
profiler.start('')
profiler.stop(save=False)
# True when a trace is running.
self._is_tracing = False
# Setting `profile_batch=0` disables profiling.
self._should_trace = not (self._start_batch == 0 and self._stop_batch == 0)
def on_train_begin(self, logs=None):
self._global_train_batch = 0
self._push_writer(self._train_writer, self._train_step)
def on_train_end(self, logs=None):
self._pop_writer()
if self._is_tracing:
self._stop_trace()
self._close_writers()
self._delete_tmp_write_dir()
def on_test_begin(self, logs=None):
self._push_writer(self._val_writer, self._val_step)
def on_test_end(self, logs=None):
self._pop_writer()
def _implements_train_batch_hooks(self):
return self._should_trace # Only call batch hooks when tracing is enabled
def on_train_batch_begin(self, batch, logs=None):
self._global_train_batch += 1
if not self._should_trace:
return
if self._global_train_batch == self._start_batch:
self._start_trace()
def on_train_batch_end(self, batch, logs=None):
if self._should_write_train_graph:
self._write_keras_model_train_graph()
self._should_write_train_graph = False
if not self._should_trace:
return
if self._is_tracing and self._global_train_batch >= self._stop_batch:
self._stop_trace()
def on_epoch_begin(self, epoch, logs=None):
# Keeps track of epoch for profiling.
self._epoch = epoch
def on_epoch_end(self, epoch, logs=None):
"""Runs metrics and histogram summaries at epoch end."""
self._log_epoch_metrics(epoch, logs)
if self.histogram_freq and epoch % self.histogram_freq == 0:
self._log_weights(epoch)
if self.embeddings_freq and epoch % self.embeddings_freq == 0:
self._log_embeddings(epoch)
def _start_trace(self):
summary_ops_v2.trace_on(graph=True, profiler=False)
profiler.start(logdir=self._train_dir)
self._is_tracing = True
def _stop_trace(self, batch=None):
"""Logs the trace graph to TensorBoard."""
if batch is None:
batch = self._stop_batch
with self._train_writer.as_default():
with summary_ops_v2.always_record_summaries():
# TODO(b/126388999): Remove step info in the summary name.
summary_ops_v2.trace_export(name='batch_%d' % batch, step=batch)
profiler.stop()
self._is_tracing = False
def _collect_learning_rate(self, logs):
lr_schedule = getattr(self.model.optimizer, 'lr', None)
if isinstance(lr_schedule, learning_rate_schedule.LearningRateSchedule):
logs['learning_rate'] = lr_schedule(self.model.optimizer.iterations)
return logs
def _log_epoch_metrics(self, epoch, logs):
"""Writes epoch metrics out as scalar summaries.
Arguments:
epoch: Int. The global step to use for TensorBoard.
logs: Dict. Keys are scalar summary names, values are scalars.
"""
if not logs:
return
train_logs = {k: v for k, v in logs.items() if not k.startswith('val_')}
val_logs = {k: v for k, v in logs.items() if k.startswith('val_')}
train_logs = self._collect_learning_rate(train_logs)
with summary_ops_v2.always_record_summaries():
if train_logs:
with self._train_writer.as_default():
for name, value in train_logs.items():
summary_ops_v2.scalar('epoch_' + name, value, step=epoch)
if val_logs:
with self._val_writer.as_default():
for name, value in val_logs.items():
name = name[4:] # Remove 'val_' prefix.
summary_ops_v2.scalar('epoch_' + name, value, step=epoch)
def _log_weights(self, epoch):
"""Logs the weights of the Model to TensorBoard."""
with self._train_writer.as_default():
with summary_ops_v2.always_record_summaries():
for layer in self.model.layers:
for weight in layer.weights:
weight_name = weight.name.replace(':', '_')
summary_ops_v2.histogram(weight_name, weight, step=epoch)
if self.write_images:
self._log_weight_as_image(weight, weight_name, epoch)
self._train_writer.flush()
def _log_weight_as_image(self, weight, weight_name, epoch):
"""Logs a weight as a TensorBoard image."""
w_img = array_ops.squeeze(weight)
shape = K.int_shape(w_img)
if len(shape) == 1: # Bias case
w_img = array_ops.reshape(w_img, [1, shape[0], 1, 1])
elif len(shape) == 2: # Dense layer kernel case
if shape[0] > shape[1]:
w_img = array_ops.transpose(w_img)
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [1, shape[0], shape[1], 1])
elif len(shape) == 3: # ConvNet case
if K.image_data_format() == 'channels_last':
# Switch to channels_first to display every kernel as a separate
# image.
w_img = array_ops.transpose(w_img, perm=[2, 0, 1])
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [shape[0], shape[1], shape[2], 1])
shape = K.int_shape(w_img)
# Not possible to handle 3D convnets etc.
if len(shape) == 4 and shape[-1] in [1, 3, 4]:
summary_ops_v2.image(weight_name, w_img, step=epoch)
def _log_embeddings(self, epoch):
embeddings_ckpt = os.path.join(self._log_write_dir, 'train',
'keras_embedding.ckpt-{}'.format(epoch))
self.model.save_weights(embeddings_ckpt)
@keras_export('keras.callbacks.ReduceLROnPlateau')
class ReduceLROnPlateau(Callback):
"""Reduce learning rate when a metric has stopped improving.
Models often benefit from reducing the learning rate by a factor
of 2-10 once learning stagnates. This callback monitors a
quantity and if no improvement is seen for a 'patience' number
of epochs, the learning rate is reduced.
Example:
```python
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2,
patience=5, min_lr=0.001)
model.fit(X_train, Y_train, callbacks=[reduce_lr])
```
Arguments:
monitor: quantity to be monitored.
factor: factor by which the learning rate will be reduced.
`new_lr = lr * factor`.
patience: number of epochs with no improvement after which learning rate
will be reduced.
verbose: int. 0: quiet, 1: update messages.
mode: one of `{'auto', 'min', 'max'}`. In `'min'` mode,
the learning rate will be reduced when the
quantity monitored has stopped decreasing; in `'max'` mode it will be
reduced when the quantity monitored has stopped increasing; in `'auto'`
mode, the direction is automatically inferred from the name of the
monitored quantity.
min_delta: threshold for measuring the new optimum, to only focus on
significant changes.
cooldown: number of epochs to wait before resuming normal operation after
lr has been reduced.
min_lr: lower bound on the learning rate.
"""
def __init__(self,
monitor='val_loss',
factor=0.1,
patience=10,
verbose=0,
mode='auto',
min_delta=1e-4,
cooldown=0,
min_lr=0,
**kwargs):
super(ReduceLROnPlateau, self).__init__()
self.monitor = monitor
if factor >= 1.0:
raise ValueError('ReduceLROnPlateau ' 'does not support a factor >= 1.0.')
if 'epsilon' in kwargs:
min_delta = kwargs.pop('epsilon')
logging.warning('`epsilon` argument is deprecated and '
'will be removed, use `min_delta` instead.')
self.factor = factor
self.min_lr = min_lr
self.min_delta = min_delta
self.patience = patience
self.verbose = verbose
self.cooldown = cooldown
self.cooldown_counter = 0 # Cooldown counter.
self.wait = 0
self.best = 0
self.mode = mode
self.monitor_op = None
self._reset()
def _reset(self):
"""Resets wait counter and cooldown counter.
"""
if self.mode not in ['auto', 'min', 'max']:
logging.warning('Learning rate reduction mode %s is unknown, '
'fallback to auto mode.', self.mode)
self.mode = 'auto'
if (self.mode == 'min' or
(self.mode == 'auto' and 'acc' not in self.monitor)):
self.monitor_op = lambda a, b: np.less(a, b - self.min_delta)
self.best = np.Inf
else:
self.monitor_op = lambda a, b: np.greater(a, b + self.min_delta)
self.best = -np.Inf
self.cooldown_counter = 0
self.wait = 0
def on_train_begin(self, logs=None):
self._reset()
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs['lr'] = K.get_value(self.model.optimizer.lr)
current = logs.get(self.monitor)
if current is None:
logging.warning('Learning rate reduction is conditioned on metric `%s` '
'which is not available. Available metrics are: %s',
self.monitor, ','.join(list(logs.keys())))
else:
if self.in_cooldown():
self.cooldown_counter -= 1
self.wait = 0
if self.monitor_op(current, self.best):
self.best = current
self.wait = 0
elif not self.in_cooldown():
self.wait += 1
if self.wait >= self.patience:
old_lr = float(K.get_value(self.model.optimizer.lr))
if old_lr > self.min_lr:
new_lr = old_lr * self.factor
new_lr = max(new_lr, self.min_lr)
K.set_value(self.model.optimizer.lr, new_lr)
if self.verbose > 0:
print('\nEpoch %05d: ReduceLROnPlateau reducing learning '
'rate to %s.' % (epoch + 1, new_lr))
self.cooldown_counter = self.cooldown
self.wait = 0
def in_cooldown(self):
return self.cooldown_counter > 0
@keras_export('keras.callbacks.CSVLogger')
class CSVLogger(Callback):
"""Callback that streams epoch results to a CSV file.
Supports all values that can be represented as a string,
including 1D iterables such as `np.ndarray`.
Example:
```python
csv_logger = CSVLogger('training.log')
model.fit(X_train, Y_train, callbacks=[csv_logger])
```
Arguments:
filename: Filename of the CSV file, e.g. `'run/log.csv'`.
separator: String used to separate elements in the CSV file.
append: Boolean. True: append if file exists (useful for continuing
training). False: overwrite existing file.
"""
def __init__(self, filename, separator=',', append=False):
self.sep = separator
self.filename = path_to_string(filename)
self.append = append
self.writer = None
self.keys = None
self.append_header = True
if six.PY2:
self.file_flags = 'b'
self._open_args = {}
else:
self.file_flags = ''
self._open_args = {'newline': '\n'}
super(CSVLogger, self).__init__()
def on_train_begin(self, logs=None):
if self.append:
if file_io.file_exists_v2(self.filename):
with open(self.filename, 'r' + self.file_flags) as f:
self.append_header = not bool(len(f.readline()))
mode = 'a'
else:
mode = 'w'
self.csv_file = io.open(self.filename,
mode + self.file_flags,
**self._open_args)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
def handle_value(k):
is_zero_dim_ndarray = isinstance(k, np.ndarray) and k.ndim == 0
if isinstance(k, six.string_types):
return k
elif isinstance(k, collections_abc.Iterable) and not is_zero_dim_ndarray:
return '"[%s]"' % (', '.join(map(str, k)))
else:
return k
if self.keys is None:
self.keys = sorted(logs.keys())
if self.model.stop_training:
# We set NA so that csv parsers do not fail for this last epoch.
logs = dict((k, logs[k]) if k in logs else (k, 'NA') for k in self.keys)
if not self.writer:
class CustomDialect(csv.excel):
delimiter = self.sep
fieldnames = ['epoch'] + self.keys
if six.PY2:
fieldnames = [unicode(x) for x in fieldnames]
self.writer = csv.DictWriter(
self.csv_file,
fieldnames=fieldnames,
dialect=CustomDialect)
if self.append_header:
self.writer.writeheader()
row_dict = collections.OrderedDict({'epoch': epoch})
row_dict.update((key, handle_value(logs[key])) for key in self.keys)
self.writer.writerow(row_dict)
self.csv_file.flush()
def on_train_end(self, logs=None):
self.csv_file.close()
self.writer = None
@keras_export('keras.callbacks.LambdaCallback')
class LambdaCallback(Callback):
r"""Callback for creating simple, custom callbacks on-the-fly.
This callback is constructed with anonymous functions that will be called
at the appropriate time. Note that the callbacks expects positional
arguments, as:
- `on_epoch_begin` and `on_epoch_end` expect two positional arguments:
`epoch`, `logs`
- `on_batch_begin` and `on_batch_end` expect two positional arguments:
`batch`, `logs`
- `on_train_begin` and `on_train_end` expect one positional argument:
`logs`
Arguments:
on_epoch_begin: called at the beginning of every epoch.
on_epoch_end: called at the end of every epoch.
on_batch_begin: called at the beginning of every batch.
on_batch_end: called at the end of every batch.
on_train_begin: called at the beginning of model training.
on_train_end: called at the end of model training.
Example:
```python
# Print the batch number at the beginning of every batch.
batch_print_callback = LambdaCallback(
on_batch_begin=lambda batch,logs: print(batch))
# Stream the epoch loss to a file in JSON format. The file content
# is not well-formed JSON but rather has a JSON object per line.
import json
json_log = open('loss_log.json', mode='wt', buffering=1)
json_logging_callback = LambdaCallback(
on_epoch_end=lambda epoch, logs: json_log.write(
json.dumps({'epoch': epoch, 'loss': logs['loss']}) + '\n'),
on_train_end=lambda logs: json_log.close()
)
# Terminate some processes after having finished model training.
processes = ...
cleanup_callback = LambdaCallback(
on_train_end=lambda logs: [
p.terminate() for p in processes if p.is_alive()])
model.fit(...,
callbacks=[batch_print_callback,
json_logging_callback,
cleanup_callback])
```
"""
def __init__(self,
on_epoch_begin=None,
on_epoch_end=None,
on_batch_begin=None,
on_batch_end=None,
on_train_begin=None,
on_train_end=None,
**kwargs):
super(LambdaCallback, self).__init__()
self.__dict__.update(kwargs)
if on_epoch_begin is not None:
self.on_epoch_begin = on_epoch_begin
else:
self.on_epoch_begin = lambda epoch, logs: None
if on_epoch_end is not None:
self.on_epoch_end = on_epoch_end
else:
self.on_epoch_end = lambda epoch, logs: None
if on_batch_begin is not None:
self.on_batch_begin = on_batch_begin
else:
self.on_batch_begin = lambda batch, logs: None
if on_batch_end is not None:
self.on_batch_end = on_batch_end
else:
self.on_batch_end = lambda batch, logs: None
if on_train_begin is not None:
self.on_train_begin = on_train_begin
else:
self.on_train_begin = lambda logs: None
if on_train_end is not None:
self.on_train_end = on_train_end
else:
self.on_train_end = lambda logs: None
| {
"content_hash": "b5ae00635a1373d0a45a97be772da8b9",
"timestamp": "",
"source": "github",
"line_count": 2653,
"max_line_length": 112,
"avg_line_length": 37.59027516019601,
"alnum_prop": 0.6502150871880233,
"repo_name": "aldian/tensorflow",
"id": "7b1cc291be6b1dd086596cac2048c177fd2f9aa6",
"size": "100498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/callbacks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8458"
},
{
"name": "C",
"bytes": "201402"
},
{
"name": "C++",
"bytes": "29667924"
},
{
"name": "CMake",
"bytes": "647100"
},
{
"name": "Go",
"bytes": "976514"
},
{
"name": "Java",
"bytes": "412117"
},
{
"name": "Jupyter Notebook",
"bytes": "1833675"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "38128"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Perl",
"bytes": "6715"
},
{
"name": "Protocol Buffer",
"bytes": "275733"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "26424665"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "373109"
}
],
"symlink_target": ""
} |
'''
Copyright (C) 2022, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'ServerDefender VP (Port80 Software)'
def is_waf(self):
schemes = [
self.matchHeader(('X-Pint', r'p(ort\-)?80'))
]
if any(i for i in schemes):
return True
return False | {
"content_hash": "60b44b6383a457c40fe7033a03ee5909",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 52,
"avg_line_length": 20.133333333333333,
"alnum_prop": 0.6225165562913907,
"repo_name": "EnableSecurity/wafw00f",
"id": "e46182136778cb9f9907017cebe47654541763e9",
"size": "324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wafw00f/plugins/serverdefender.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "97"
},
{
"name": "Makefile",
"bytes": "339"
},
{
"name": "Python",
"bytes": "109477"
}
],
"symlink_target": ""
} |
'''
dns_sprockets_version - Contains version for dns_sprockets.
-----------------------------------------------------------
.. Copyright (c) 2015 Neustar, Inc. All rights reserved.
.. See COPYRIGHT.txt for full notice. See LICENSE.txt for terms and conditions.
'''
VERSION = '2.0.0'
# end of file
| {
"content_hash": "8133def4a2fb7866622fbe6c28b1f0ef",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 80,
"avg_line_length": 27.363636363636363,
"alnum_prop": 0.5681063122923588,
"repo_name": "ultradns/dns_sprockets",
"id": "ad7ef352584c2b30e6ce5e886d83f2cd30410eb3",
"size": "301",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dns_sprockets_version.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "2920"
},
{
"name": "Python",
"bytes": "127740"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(), "examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7,1)), np.zeros((7,1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
if __name__ == '__main__':
unittest.main() | {
"content_hash": "b994a7d3bffbac69d5f09adad0371c5d",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 113,
"avg_line_length": 26.736842105263158,
"alnum_prop": 0.6633858267716536,
"repo_name": "hanssusilo/drake",
"id": "69035c415ffe72b6a8eb5b076ce6e80cba78799c",
"size": "508",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "drake/bindings/python/pydrake/test/testRBTCoM.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "21125"
},
{
"name": "C++",
"bytes": "1740586"
},
{
"name": "CMake",
"bytes": "74933"
},
{
"name": "CSS",
"bytes": "8598"
},
{
"name": "HTML",
"bytes": "19158"
},
{
"name": "Java",
"bytes": "29789"
},
{
"name": "M",
"bytes": "19640"
},
{
"name": "Makefile",
"bytes": "4987"
},
{
"name": "Mathematica",
"bytes": "121"
},
{
"name": "Matlab",
"bytes": "4503264"
},
{
"name": "Objective-C",
"bytes": "2755"
},
{
"name": "Perl",
"bytes": "22865"
},
{
"name": "Python",
"bytes": "26868"
},
{
"name": "Shell",
"bytes": "6996"
},
{
"name": "TeX",
"bytes": "45762"
}
],
"symlink_target": ""
} |
import os
import time
import sys
import shutil
import traceback
import re
import tornado.ioloop
import tornado.web
from gitsync import GitSync
from gdrivesync import GDriveSync
rel_dir = '/'
def read_config():
with open("conf/tornado.conf") as f:
cfg_read = eval(f.read())
cfg_read['home_folder'] = os.path.expanduser(cfg_read['home_folder'])
return cfg_read
def rendertpl(rqst, tpl, **kwargs):
rqst.render("../www/" + tpl, **kwargs)
def log_info(s):
ts = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())
print (ts + " " + s)
sys.stdout.flush()
class PingHandler(tornado.web.RequestHandler):
def get(self):
log_info('pinghandler ' + str(self.request.uri))
rendertpl(self, "ping.tpl")
class BrowseHandler(tornado.web.RequestHandler):
def get(self):
log_info('browsehandler ' + str(self.request.uri))
global rel_dir
fetch_file = self.get_argument('fetch', None)
if None != fetch_file:
fetch_file = fetch_file.replace('..', '')
fetch_file = fetch_file.replace('//', '/')
file_name = os.path.join(cfg['home_folder'], rel_dir[1:], fetch_file)
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Disposition', 'attachment; filename=' + fetch_file)
with open(file_name, 'r') as f:
while True:
data = f.read(1024 * 12)
if not data:
break
self.write(data)
self.finish()
else:
rel_dir = self.get_argument('rel_dir', '/')
rel_dir = rel_dir.replace('..', '')
rel_dir = rel_dir.replace('//', '/')
if rel_dir != '/':
prev_dir_comps = filter(None, rel_dir.split('/'))
l = len(prev_dir_comps)
if l > 0:
prev_dir_comps.pop()
l -= 1
prev_dir = '/'
if l > 0:
prev_dir = prev_dir + '/'.join(prev_dir_comps) + '/'
else:
prev_dir = ''
wdir = os.path.join(cfg['home_folder'], rel_dir[1:])
files = []
folders = []
for fname in os.listdir(wdir):
if fname.startswith('.'):
continue
full_fname = os.path.join(wdir, fname)
if os.path.isdir(full_fname):
folders.append(fname)
else:
files.append(fname)
rendertpl(self, "upload.tpl", prevdir=prev_dir, currdir=rel_dir, files=files, folders=folders,
nfolders=len(folders), nfiles=len(files))
class UploadHandler(tornado.web.RequestHandler):
def post(self):
log_info('uploadhandler ' + str(self.request.uri))
for f in self.request.files['file']:
file_name = f.filename
file_contents = f.body
with open(os.path.join(cfg['home_folder'], rel_dir[1:], file_name), "wb") as fw:
fw.write(file_contents)
self.finish()
class SSHKeyHandler(tornado.web.RequestHandler):
def get(self):
with open(os.path.expanduser('~/.ssh/id_rsa.pub'), "r") as f:
response = {
'code': 0,
'data': f.read()
}
self.write(response)
class PkgInfoHandler(tornado.web.RequestHandler):
def get(self):
ver = self.get_argument('ver')
with open('/opt/julia_packages/' + ver + '_packages.txt', "r") as f:
response = {
'code': 0,
'data': f.read()
}
self.write(response)
class SyncHandler(tornado.web.RequestHandler):
LOC = '~/'
# LOC = '/tmp/x'
DEFAULT_BRANCH = 'master'
def get(self):
log_info('synchandler ' + str(self.request.uri))
gitrepos = SyncHandler.get_git_repos()
gdrive_repos = SyncHandler.get_gdrive_repos()
rendertpl(self, "sync.tpl", gitrepos=gitrepos, gdrive_repos=gdrive_repos)
def post(self):
log_info('synchandler ' + str(self.request.uri))
action = self.get_argument('action', None)
retcode = 0
if None != action:
try:
if action == 'addgdrive':
retcode = self.action_addgdrive()
elif action == 'delgdrive':
retcode = self.action_delgdrive()
elif action == 'syncgdrive':
retcode = self.action_syncgdrive()
elif action == "addgit":
retcode = self.action_addgit()
elif action == 'delgit':
retcode = self.action_delgit()
elif action == 'syncgit':
retcode = self.action_syncgit()
except:
# TODO: handle auth tok expiry and send out separate error code
traceback.print_exc()
retcode = -1
response = {'code': retcode, 'data': ''}
self.write(response)
def action_addgdrive(self):
self.set_gdrive_auth_tok()
retcode = 0
gfolder = self.get_argument('repo', '').strip()
loc = SyncHandler.sanitize_loc(self.get_argument('loc', '').strip())
loc = os.path.join(os.path.expanduser(SyncHandler.LOC), loc)
GDriveSync.clone(gfolder, loc, True)
return retcode
def action_delgdrive(self):
self.set_gdrive_auth_tok()
repo_id = self.get_argument('repo', None)
repo = SyncHandler.get_gdrive_repo(repo_id)
if (None != repo) and os.path.exists(repo.loc):
shutil.rmtree(repo.loc)
return 0
def action_syncgdrive(self):
self.set_gdrive_auth_tok()
retcode = 0
repo_id = self.get_argument('repo', None)
repo = SyncHandler.get_gdrive_repo(repo_id)
if None != repo:
repo.sync()
return retcode
def action_syncgit(self):
retcode = 0
repo_id = self.get_argument('repo', None)
gitrepo = self.get_git_repo(repo_id)
if None != gitrepo:
if gitrepo.sync():
log_info('conflicts during sync of repo ' + gitrepo.repo_name())
retcode = 1 # has conflicts
return retcode
def action_delgit(self):
repo_id = self.get_argument('repo', None)
gitrepo = self.get_git_repo(repo_id)
if (None != gitrepo) and os.path.exists(gitrepo.loc):
shutil.rmtree(gitrepo.loc)
return 0
def action_addgit(self):
retcode = 0
git_url = self.get_argument('repo', '').strip()
git_branch = self.get_argument('branch', '').strip()
loc = SyncHandler.sanitize_loc(self.get_argument('loc', '').strip())
if len(git_url) == 0:
retcode = -1
if (retcode == 0) and (not git_url.startswith('https://')) and (SyncHandler.add_to_ssh_knownhosts(git_url) < 0):
retcode = -1
if retcode == 0:
if len(git_branch) == 0:
git_branch = SyncHandler.DEFAULT_BRANCH
if len(loc) == 0:
loc = git_url[(git_url.rindex('/') + 1):git_url.rindex('.')]
loc = os.path.join(os.path.expanduser(SyncHandler.LOC), loc)
gs = GitSync.clone(git_url, loc, True)
gs.checkout(git_branch, from_remote=True)
if git_url.startswith('https://'):
retcode = 1
return retcode
@staticmethod
def add_to_ssh_knownhosts(git_url):
hostname = git_url.split('@')[1].split(':')[0]
khfile = os.path.expanduser('~/.ssh/known_hosts')
fopenmode = 'w'
if os.path.exists(khfile):
fopenmode = 'a'
with open(khfile) as f:
lines = f.readlines()
for line in lines:
if hostname in line:
return 1
hostname_lines = os.popen('ssh-keyscan -t rsa,dsa ' + hostname).readlines()
if len(hostname_lines) == 0:
log_info('ssh-keyscan failed')
return -1
with open(khfile, fopenmode) as f:
for line in hostname_lines:
f.write(line)
if fopenmode == 'w':
os.chmod(khfile, 0644)
return 0
@staticmethod
def get_git_repos():
gitrepo_paths = GitSync.scan_repo_paths([os.path.expanduser(SyncHandler.LOC)])
gitrepos = {}
for repopath in gitrepo_paths:
gs = GitSync(repopath)
gitrepos[gs.repo_hash()] = gs
return gitrepos
@staticmethod
def get_git_repo(repokey, gitrepos=None):
if None == gitrepos:
gitrepos = SyncHandler.get_git_repos()
if repokey in gitrepos:
return gitrepos[repokey]
return None
@staticmethod
def get_gdrive_repos():
gdriverepo_paths = GDriveSync.scan_repo_paths([os.path.expanduser(SyncHandler.LOC)])
gdriverepos = {}
for repopath in gdriverepo_paths:
gs = GDriveSync(repopath)
gdriverepos[gs.repo_hash()] = gs
return gdriverepos
@staticmethod
def get_gdrive_repo(repokey, gdriverepos=None):
if None == gdriverepos:
gdriverepos = SyncHandler.get_gdrive_repos()
if repokey in gdriverepos:
return gdriverepos[repokey]
return None
def set_gdrive_auth_tok(self):
gauth = self.get_argument('gauth', '')
if len(gauth) > 0:
GDriveSync.init_creds(gauth)
@staticmethod
def sanitize_loc(loc):
return re.sub(r'^[\.\\\/]*', '', loc)
cfg = read_config()
if __name__ == "__main__":
application = tornado.web.Application([
(r"/file-upload", UploadHandler),
(r"/file-list", BrowseHandler),
(r"/sync", SyncHandler),
(r"/sshkey", SSHKeyHandler),
(r"/pkginfo", PkgInfoHandler),
(r"/", PingHandler)
])
application.listen(cfg['port'])
tornado.ioloop.IOLoop.instance().start()
| {
"content_hash": "37bf6e1027caa78f3c88311548db4d26",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 120,
"avg_line_length": 33.61258278145695,
"alnum_prop": 0.5332479558664172,
"repo_name": "mr-justin/JuliaBox",
"id": "f2c3b3cc60d53a15ed19991b901914dbb112728e",
"size": "10170",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "container/interactive/IJulia/tornado/src/fmanage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6283"
},
{
"name": "HTML",
"bytes": "3261"
},
{
"name": "JavaScript",
"bytes": "54121"
},
{
"name": "Julia",
"bytes": "294"
},
{
"name": "Lua",
"bytes": "10378"
},
{
"name": "Makefile",
"bytes": "135"
},
{
"name": "Python",
"bytes": "408486"
},
{
"name": "Shell",
"bytes": "20351"
},
{
"name": "Smarty",
"bytes": "59656"
}
],
"symlink_target": ""
} |
def get_adaptor(wildcards):
pop = wildcards.population
lib = wildcards.library
adaptor = (
samples
[(samples["population"] == pop) & (samples["library"] == lib)]
["adaptor"]
.values
.tolist()
[0]
)
return adaptor
def get_phred(wildcards):
pop = wildcards.population
lib = wildcards.library
phred = (
samples
[(samples["population"] == pop) & (samples["library"] == lib)]
["phred"]
.values
.tolist()
[0]
)
return phred
def get_trimmomatic_params(wildcards):
# The YAML file introduces a \n
return params["trimmomatic"]["extra"].strip()
rule qc_trimmomatic:
"""Run trimmomatic on paired end mode
to eliminate Illumina adaptors andremove low quality regions and reads.
Inputs _1 and _2 are piped through gzip/pigz.
Outputs _1 and _2 are piped to gzip/pigz (level 9).
Outputs _3 and _4 are compressed with the builtin compressor from
Trimmomatic.
Further on they are catted and compressed with gzip/pigz (level 1).
Sequences will be stored permanently later on on CRAM
"""
input:
fwd = RAW + "{population}.{library}_1.fq.gz",
rev = RAW + "{population}.{library}_2.fq.gz"
output:
fwd = temp(QC + "{population}.{library}_1.fq.gz"),
rev = temp(QC + "{population}.{library}_2.fq.gz"),
fwd_unp = temp(QC + "{population}.{library}_3.fq.gz"),
rev_unp = temp(QC + "{population}.{library}_4.fq.gz")
params:
adaptor = get_adaptor,
phred = get_phred,
trimmomatic_params = get_trimmomatic_params
log:
QC + "{population}.{library}.trimmomatic_pe.log"
benchmark:
QC + "{population}.{library}.trimmomatic_pe.json"
threads:
4
priority:
50 # Do this and later the mappings
conda:
"qc.yml"
shell:
"""
trimmomatic PE \
-threads {threads} \
-{params.phred} \
<(gzip --decompress --stdout {input.fwd}) \
<(gzip --decompress --stdout {input.rev}) \
>(gzip --fast > {output.fwd}) \
>(gzip --fast > {output.fwd_unp}) \
>(gzip --fast > {output.rev}) \
>(gzip --fast > {output.rev_unp}) \
ILLUMINACLIP:{params.adaptor}:2:30:10 \
{params.trimmomatic_params} \
2> {log} 1>&2
"""
rule qc:
input:
[
QC + population + "." + library + "_1.fq.gz"
for population, library in (
samples
[["population", "library"]]
.values
.tolist()
)
]
| {
"content_hash": "ae8f375240105e4120b9a01f30f20cd7",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 75,
"avg_line_length": 28.50526315789474,
"alnum_prop": 0.5332348596750369,
"repo_name": "jlanga/smsk_popoolation",
"id": "93231866ee8bdd1be632573c299c8a326dac04e6",
"size": "2708",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/snakefiles/qc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "15487"
},
{
"name": "Perl",
"bytes": "607182"
},
{
"name": "Python",
"bytes": "39328"
},
{
"name": "R",
"bytes": "3647"
},
{
"name": "Shell",
"bytes": "2635"
}
],
"symlink_target": ""
} |
r"""
Format biological sequences (:mod:`skbio.format.sequences`)
===========================================================
.. currentmodule:: skbio.format.sequences
This module provides functions for writing sequence files in a variety of
different formats, the available formatters are listed below.
Functions
---------
.. autosummary::
:toctree: generated/
fasta_from_sequences
fasta_from_alignment
format_fastq_record
"""
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .fasta import fasta_from_sequences, fasta_from_alignment
from .fastq import format_fastq_record
__all__ = ['fasta_from_sequences', 'fasta_from_alignment',
'format_fastq_record']
from numpy.testing import Tester
test = Tester().test
| {
"content_hash": "b9e7cc29ca31b998b9ab7be26930ac53",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 78,
"avg_line_length": 27.736842105263158,
"alnum_prop": 0.5815939278937381,
"repo_name": "JWDebelius/scikit-bio",
"id": "343e40db52e5f9e0998315e0a27b3a2addd54562",
"size": "1054",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "skbio/format/sequences/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import os
import random
import tempfile
import unittest
from yalign.datatypes import Sentence
from yalign.evaluation import F_score
from yalign.wordpairscore import WordPairScore
from yalign.sequencealigner import SequenceAligner
from yalign.sentencepairscore import SentencePairScore
from yalign.input_conversion import parallel_corpus_to_documents
from yalign.train_data_generation import training_scrambling_from_documents, \
training_alignments_from_documents
from yalign.yalignmodel import YalignModel, random_sampling_maximizer, \
best_threshold, apply_threshold
class TestYalignModel(unittest.TestCase):
def setUp(self):
random.seed(hash("Y U NO?"))
base_path = os.path.dirname(os.path.abspath(__file__))
word_scores = os.path.join(base_path, "data", "test_word_scores_big.csv")
parallel_corpus = os.path.join(base_path, "data", "parallel-en-es.txt")
A, B = parallel_corpus_to_documents(parallel_corpus)
A = A[:25]
B = B[:25]
self.alignments = list(training_alignments_from_documents(A, B))
self.A, self.B, self.correct_alignments = \
list(training_scrambling_from_documents(A, B))
# Word score
word_pair_score = WordPairScore(word_scores)
# Sentence Score
sentence_pair_score = SentencePairScore()
sentence_pair_score.train(self.alignments, word_pair_score)
# Yalign model
self.min_ = sentence_pair_score.min_bound
self.max_ = sentence_pair_score.max_bound
gap_penalty = (self.min_ + self.max_) / 2.0
document_aligner = SequenceAligner(sentence_pair_score, gap_penalty)
self.model = YalignModel(document_aligner, 1)
def test_save_file_created(self):
tmp_folder = tempfile.mkdtemp()
self.model.save(tmp_folder)
model_path = os.path.join(tmp_folder, "aligner.pickle")
metadata_path = os.path.join(tmp_folder, "metadata.json")
self.assertTrue(os.path.exists(model_path))
self.assertTrue(os.path.exists(metadata_path))
def test_save_load_and_align(self):
doc1 = [Sentence([u"House"]),
Sentence([u"asoidfhuioasgh"])]
doc2 = [Sentence([u"Casa"])]
result_before_save = self.model.align(doc1, doc2)
# Save
tmp_folder = tempfile.mkdtemp()
self.model.save(tmp_folder)
# Load
new_model = YalignModel.load(tmp_folder)
result_after_load = new_model.align(doc1, doc2)
self.assertEqual(result_before_save, result_after_load)
self.assertEqual(self.model.threshold, new_model.threshold)
self.assertEqual(self.model.document_pair_aligner.penalty,
new_model.document_pair_aligner.penalty)
def test_reasonable_alignment(self):
doc1 = [Sentence([u"House"]),
Sentence([u"asoidfhuioasgh"])]
doc2 = [Sentence([u"Casa"])]
result = self.model.align(doc1, doc2)
result = [(list(x), list(y)) for x, y in result]
self.assertIn((list(doc1[0]), list(doc2[0])), result)
def test_optimize_gap_penalty_and_threshold_finishes(self):
self.model.optimize_gap_penalty_and_threshold(self.A, self.B,
self.correct_alignments)
def test_optimize_gap_penalty_and_threshold_is_best(self):
def evaluate(penalty, threshold):
self.model.document_pair_aligner.penalty = penalty
self.model.threshold = threshold
predicted = self.model.align_indexes(self.A, self.B)
return F_score(predicted, self.correct_alignments)[0]
random.seed(hash("12345"))
self.model.optimize_gap_penalty_and_threshold(self.A, self.B,
self.correct_alignments)
best_score = evaluate(self.model.document_pair_aligner.penalty,
self.model.threshold)
for _ in xrange(50):
penalty = random.uniform(self.min_, self.max_ / 2.0)
threshold = random.uniform(self.min_, self.max_)
score = evaluate(penalty, threshold)
self.assertGreaterEqual(best_score, score)
class TestOptimizers(unittest.TestCase):
def test_random_sampling_maximizer_maximizes(self):
def F(x):
return x * x * x + 1
random.seed(hash("Knock knock motherfucker"))
score, x = random_sampling_maximizer(F, -1, 1, n=100)
self.assertGreater(x, 0.9)
self.assertGreater(score, F(0.9))
def test_random_sampling_maximizer_more_is_better(self):
def F(x):
return -(x ** 0.5)
random.seed(hash("Want some? get some!"))
score_20, _ = random_sampling_maximizer(F, 5, 10, n=20)
score_100, _ = random_sampling_maximizer(F, 5, 10, n=100)
self.assertGreater(score_100, score_20)
def test_best_threshold1(self):
best_threshold([], [(0, 0, 0), (1, 1, 1)])
def test_best_threshold2(self):
score, threshold = best_threshold([(0, 0)],
[(0, 0, 0), (1, 1, 1)])
self.assertLess(threshold, 1)
self.assertGreater(score, 0)
def test_best_threshold3(self):
random.seed(hash("Son de plata y de acero, sivlerrrrhaaawks!"))
real = [(i, i, random.random()) for i in xrange(100)]
guess = random.sample(real, 50)
real = [(a, b) for a, b, _ in real]
best, _ = best_threshold(real, guess)
for i in range(100):
threshold = random.random()
score = F_score(apply_threshold(guess, threshold), real)[0]
self.assertLessEqual(score, best)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "e0f358573077f0aeae4b2df20ea2c937",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 81,
"avg_line_length": 41.85507246376812,
"alnum_prop": 0.614612188365651,
"repo_name": "pombredanne/yalign",
"id": "b073a267cf197d709472f75674f2b9401dcd6722",
"size": "5801",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "tests/test_yalignmodel.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "55117"
},
{
"name": "Python",
"bytes": "102053"
}
],
"symlink_target": ""
} |
"""
Baxter RSDK Inverse Kinematics Example
"""
import argparse
import struct
import sys
import rospy
from geometry_msgs.msg import (
PoseStamped,
Pose,
Point,
Quaternion,
)
from std_msgs.msg import Header
from baxter_core_msgs.srv import (
SolvePositionIK,
SolvePositionIKRequest,
)
def ik_test(limb):
rospy.init_node("rsdk_ik_service_client")
ns = "ExternalTools/" + limb + "/PositionKinematicsNode/IKService"
iksvc = rospy.ServiceProxy(ns, SolvePositionIK)
ikreq = SolvePositionIKRequest()
hdr = Header(stamp=rospy.Time.now(), frame_id='base')
poses = {
'left': PoseStamped(
header=hdr,
pose=Pose(
position=Point(
x=0.657579481614,
y=0.851981417433,
z=0.0388352386502,
),
orientation=Quaternion(
x=-0.366894936773,
y=0.885980397775,
z=0.108155782462,
w=0.262162481772,
),
),
),
'right': PoseStamped(
header=hdr,
pose=Pose(
position=Point(
x=0.656982770038,
y=-0.852598021641,
z=0.0388609422173,
),
orientation=Quaternion(
x=0.367048116303,
y=0.885911751787,
z=-0.108908281936,
w=0.261868353356,
),
),
),
}
ikreq.pose_stamp.append(poses[limb])
try:
rospy.wait_for_service(ns, 5.0)
resp = iksvc(ikreq)
except (rospy.ServiceException, rospy.ROSException), e:
rospy.logerr("Service call failed: %s" % (e,))
return 1
# Check if result valid, and type of seed ultimately used to get solution
# convert rospy's string representation of uint8[]'s to int's
resp_seeds = struct.unpack('<%dB' % len(resp.result_type),
resp.result_type)
if (resp_seeds[0] != resp.RESULT_INVALID):
seed_str = {
ikreq.SEED_USER: 'User Provided Seed',
ikreq.SEED_CURRENT: 'Current Joint Angles',
ikreq.SEED_NS_MAP: 'Nullspace Setpoints',
}.get(resp_seeds[0], 'None')
print("SUCCESS - Valid Joint Solution Found from Seed Type: %s" %
(seed_str,))
# Format solution into Limb API-compatible dictionary
limb_joints = dict(zip(resp.joints[0].name, resp.joints[0].position))
print "\nIK Joint Solution:\n", limb_joints
print "------------------"
print "Response Message:\n", resp
else:
print("INVALID POSE - No Valid Joint Solution Found.")
return 0
def main():
"""RSDK Inverse Kinematics Example
A simple example of using the Rethink Inverse Kinematics
Service which returns the joint angles and validity for
a requested Cartesian Pose.
Run this example, passing the *limb* to test, and the
example will call the Service with a sample Cartesian
Pose, pre-defined in the example code, printing the
response of whether a valid joint solution was found,
and if so, the corresponding joint angles.
"""
arg_fmt = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(formatter_class=arg_fmt,
description=main.__doc__)
parser.add_argument(
'-l', '--limb', choices=['left', 'right'], required=True,
help="the limb to test"
)
args = parser.parse_args(rospy.myargv()[1:])
return ik_test(args.limb)
if __name__ == '__main__':
sys.exit(main())
| {
"content_hash": "262de4b7ada61a193de037bf3b326811",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 77,
"avg_line_length": 31.15702479338843,
"alnum_prop": 0.553050397877984,
"repo_name": "u3099811/BaxterTictacToe",
"id": "d9850bbdd4fc57756cd49a862b23ff98a8f36138",
"size": "5347",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "src/baxter_examples/scripts/ik_service_client.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1667815"
},
{
"name": "C++",
"bytes": "768325"
},
{
"name": "CMake",
"bytes": "48663"
},
{
"name": "CSS",
"bytes": "11972"
},
{
"name": "HTML",
"bytes": "21143"
},
{
"name": "JavaScript",
"bytes": "31105"
},
{
"name": "Python",
"bytes": "503056"
},
{
"name": "Shell",
"bytes": "12242"
}
],
"symlink_target": ""
} |
import glob
import io
import re
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get("encoding", "utf8")
).read()
setup(
name="envtool",
version="0.1.0",
license="BSD",
description="A tool for managing envdirs and env files.",
long_description="%s\n%s" % (read("README.rst"), re.sub(":obj:`~?(.*?)`", r"``\1``", read("CHANGELOG.rst"))),
author="Mark Smith",
author_email="mark.smith@practicalpoetry.co.uk",
url="https://github.com/judy2k/envtool",
py_modules=[splitext(basename(i))[0] for i in glob.glob("*.py")],
include_package_data=True,
zip_safe=False,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: Unix",
"Operating System :: POSIX",
# "Operating System :: Microsoft :: Windows",
"Environment :: Console",
# "Intended Audience :: System Administrator",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
# "Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Utilities",
],
keywords=[
"environment", "envdir", "honcho", "foreman", "env",
],
install_requires=[
"future>=0.15.0",
"click>=4.0.0",
],
extras_require={
# eg: 'rst': ["docutils>=0.11"],
},
entry_points={
"console_scripts": [
"envtool=envtool:main",
]
},
cmdclass={'test': PyTest},
tests_require=[
"pytest>=2.7.2",
]
)
| {
"content_hash": "a40f133e2669debc9bc089fb6d5f4508",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 113,
"avg_line_length": 30.252747252747252,
"alnum_prop": 0.5935343261896113,
"repo_name": "judy2k/envtool",
"id": "6e986a9cbaaac1f5b4539a85733dd50434df02ce",
"size": "2779",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "8715"
}
],
"symlink_target": ""
} |
from chainer import backend
from chainer import function_node
from chainer.utils import type_check
class FlipLR(function_node.FunctionNode):
"""Flip array in the left/right direction."""
def check_type_forward(self, in_types):
type_check._argname(in_types, ('a',))
a_type = in_types[0]
type_check.expect(
a_type.dtype.kind == 'f',
a_type.ndim >= 2
)
def forward(self, inputs):
xp = backend.get_array_module(*inputs)
return xp.fliplr(inputs[0]),
def backward(self, indexes, grad_outputs):
return FlipLR().apply(grad_outputs)
def fliplr(a):
"""Flip array in the left/right direction.
Args:
xs (~chainer.Variable): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return FlipLR().apply((a,))[0]
| {
"content_hash": "eded300ab47b9e23647d75e1a1495808",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 49,
"avg_line_length": 23.027027027027028,
"alnum_prop": 0.6103286384976526,
"repo_name": "ktnyt/chainer",
"id": "97550c5d7e0a59f00ee514519c77809f57c271e1",
"size": "852",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chainer/functions/array/fliplr.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "C",
"bytes": "70"
},
{
"name": "C++",
"bytes": "1440363"
},
{
"name": "CMake",
"bytes": "42822"
},
{
"name": "Cuda",
"bytes": "53858"
},
{
"name": "Dockerfile",
"bytes": "1242"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "5128330"
},
{
"name": "Shell",
"bytes": "19475"
}
],
"symlink_target": ""
} |
"""
Copyright 2014 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""A tool to show the difference between URLs.
This tool is inspired by the UNIX utility `diff`.
"""
import argparse
import base64
import copy
import logging
import sys
class Error(Exception):
"""Base exception class."""
pass
class ParamDiffTypeError(Error):
"""Raised when an incorrect diff type used."""
pass
class HostnameParseError(Error):
"""Raised when unable to parse hostname from URL."""
pass
# TODO(macpd): investigate making this a namedtuple
class ParamDiffEntry(object):
"""Represents difference of 2 URL params with same name."""
LEFT_ONLY = 1
RIGHT_ONLY = 2
BOTH_DIFFER = 3
LEFT_HEADER_FORMAT = '{0}\n< {1}'
LEFT_ADDITIONAL_SEPARATOR = '\n< '
RIGHT_HEADER_FORMAT = '{0}\n> {1}'
RIGHT_ADDITIONAL_SEPARATOR = '\n> '
def __init__(self, name, left_value, right_value, diff_type):
self._name = name
self._left_val = list(left_value) if left_value else []
self._right_val = list(right_value) if right_value else []
try:
if self._valid_diff_type(diff_type):
self._type = diff_type
except ParamDiffTypeError:
logging.error("Incorrect diff type: %s", diff_type)
self._type = self.BOTH_DIFFER
def _valid_diff_type(self, diff_type):
if (diff_type != self.LEFT_ONLY and diff_type != self.RIGHT_ONLY and
diff_type != self.BOTH_DIFFER):
raise ParamDiffTypeError('%s is not a valid diff type. ', diff_type)
return True
@property
def name(self):
return self._name
def __str__(self):
ret = self._name
if self._type == self.LEFT_ONLY or self._type == self.BOTH_DIFFER:
ret = self.LEFT_HEADER_FORMAT.format(ret, self.LEFT_ADDITIONAL_SEPARATOR.join(self._left_val))
if self._type == self.RIGHT_ONLY or self._type == self.BOTH_DIFFER:
ret = self.RIGHT_HEADER_FORMAT.format(ret, self.RIGHT_ADDITIONAL_SEPARATOR.join(self._right_val))
return ret
class UrlDiffer(object):
"""Object to diff URLs.
Diffs URLs upon intialization."""
PATH_DELIM = '?'
PARAM_DELIM = '&'
NAME_VAL_DELIM = '='
SCHEME_DELIM = '://'
UNIX_SLASH = '/'
URL_ESCAPE_CHAR = '%'
URL_ESCAPE_SEQ_LEN = 3 # expected length of URL espace sequences, aka len('%25')
def __init__(self, left_url, right_url, names_only=False, hostnames=False,
url_decode_params=False, case_insensitive=False):
"""Initializes object and performs URL diffing."""
self._left_url = self._normalize_url(left_url)
self._right_url = self._normalize_url(right_url)
self._names_only = names_only
self._wants_hostname_diff = hostnames
self._url_decode_params = url_decode_params
self._case_insensitive = case_insensitive
self._diffs = []
self._do_diff()
def __str__(self):
ret = []
for diff in self._diffs:
if self._names_only:
ret.append(diff.name)
else:
ret.append(str(diff))
join_delim = '\n' if self._names_only else '\n\n'
return join_delim.join(ret)
def _normalize_url(self, url):
"""Strips white space, and removes all chars after #"""
ret = url.strip()
if '#' in ret:
idx = ret.index('#')
ret = ret[:idx]
return ret
def _get_hostname(self, url):
"""Parses the hostname from a URL"
Finds hostname between scheme and first unix slash.
"""
if self.SCHEME_DELIM in url:
scheme_idx = url.index(self.SCHEME_DELIM)
hostname_begin = scheme_idx + len(self.SCHEME_DELIM)
else:
hostname_begin = 0
if self.UNIX_SLASH in url[hostname_begin:]:
hostname_end = url.index(self.UNIX_SLASH, hostname_begin)
else:
hostname_end = hostname_begin + len(url[hostname_begin:])
return url[hostname_begin:hostname_end]
def _diff_hostnames(self, left, right):
"""Diffs hostnames, if different appends ParamDiffEntry to diffs list.
Args:
left: String; left hostname.
right: String; right hostname.
Returns:
Bool; True if different, else False.
"""
if left == right:
self._hostnames_differ = False
else:
self._hostnames_differ = True
self._diffs.append(ParamDiffEntry('Hostname', [left], [right],
ParamDiffEntry.BOTH_DIFFER))
return self._hostnames_differ
def _get_params(self, url):
"""Returns a dict of the url params.
Args:
url: String; URL to get parameter names and values from.
Returns:
Dict of parameter names that map to their values.
"""
param_dict = {}
if self.PATH_DELIM not in url:
return param_dict
params_pos = url.find(self.PATH_DELIM) + 1
for token in url[params_pos:].split(self.PARAM_DELIM):
if not token:
continue
if '=' not in token:
token_key = token
token_value = ''
else:
partitioned_param = token.partition(self.NAME_VAL_DELIM)
token_key = partitioned_param[0]
token_value = partitioned_param[2]
if self._url_decode_params:
token_key = self._url_decode(token_key)
token_value = self._url_decode(token_value)
value_list = param_dict.get(token_key, [])
value_list.append(token_value)
param_dict[token_key] = value_list
return param_dict
def _diff_params(self, left_params, right_params):
"""Returns a list of the diffence between dicts on key/values.
First all keys that exist in both URLs are compared, then keys only in the
left, followed by keys only in the right.
Args:
left_param: dict; param name -> values dict of the left URL.
right_param: dict; param name -> values dict of the right URL.
Returns:
List of ParamDiffEntry of differences between the left and right params.
"""
diffs = []
left_key_set = frozenset(left_params.keys())
right_key_set = frozenset(right_params.keys())
left_key_diff = left_key_set.difference(right_key_set)
right_key_diff = right_key_set.difference(left_key_set)
key_intersection = left_key_set.intersection(right_key_set)
for common_key in key_intersection:
left_val_set = set(left_params[common_key])
right_val_set = set(right_params[common_key])
left_diff = left_val_set.difference(right_val_set)
right_diff = right_val_set.difference(left_val_set)
if left_diff and right_diff:
diff_type = ParamDiffEntry.BOTH_DIFFER
elif left_diff:
diff_type = ParamDiffEntry.LEFT_ONLY
elif right_diff:
diff_type = ParamDiffEntry.RIGHT_ONLY
else:
# if no diff skip to next iteration
continue
diffs.append(
ParamDiffEntry(
common_key,
left_val_set.difference(right_val_set),
right_val_set.difference(left_val_set),
diff_type))
for left_key in left_key_diff:
diffs.append(ParamDiffEntry(
left_key, left_params[left_key], None, ParamDiffEntry.LEFT_ONLY))
for right_key in right_key_diff:
diffs.append(ParamDiffEntry(
right_key, None, right_params[right_key], ParamDiffEntry.RIGHT_ONLY))
return diffs
def _do_diff(self):
"""Performs all appropriate diffing operations."""
if self._case_insensitive:
self._left_url, self._right_url = self._left_url.lower(), self._right_url.lower()
if self._wants_hostname_diff:
self._left_hostname = self._get_hostname(self._left_url)
self._right_hostname = self._get_hostname(self._right_url)
self._diff_hostnames(self._left_hostname, self._right_hostname)
self._left_params_dict = self._get_params(self._left_url)
self._right_params_dict = self._get_params(self._right_url)
if not self._left_params_dict == self._right_params_dict:
self._diffs.extend(self._diff_params(
self._left_params_dict, self._right_params_dict))
def _url_decode(self, token):
"""URL decodes provided string.
Replaces all instances of %NN with the ascii value of hex(NN).
Args:
token: String to be decoded.
Returns:
String; deocded string.
"""
if self.URL_ESCAPE_CHAR not in token:
return token
new_token = []
cur = prev = 0
cur = token.find(self.URL_ESCAPE_CHAR, prev)
while cur != -1:
new_token.append(token[prev:cur])
decoded_hex_as_bytes = base64.b16decode(
token[cur+1:cur+self.URL_ESCAPE_SEQ_LEN], casefold=True)
new_token.append(decoded_hex_as_bytes.decode())
prev = cur + self.URL_ESCAPE_SEQ_LEN
cur = token.find(self.URL_ESCAPE_CHAR, prev)
new_token.append(token[prev:])
return ''.join(new_token)
def left_params(self):
"""Returns a deep coy of the left params dict."""
return copy.deepcopy(self._left_params_dict)
def right_params(self):
"""Returns a deep coy of the left params dict."""
return copy.deepcopy(self._right_params_dict)
def are_different(self):
"""Returns True if URLs differ, else false."""
return len(self._diffs) != 0
@property
def diff(self):
return copy.deepcopy(self._diffs)
def main():
"""Parses args, inits and prints differ, and exits with appropriate value."""
# TODO(macpd): provide option for second diff delimeter. This would allow one
# to diff multivalued param values.
# TODO(macpd): provide verbosity option
arg_parser = argparse.ArgumentParser(
description='show the difference between 2 urls. Inspired by the unix utility diff',
epilog='Currenty this tool discards everything after # if present. see https://github.com/google/url_diff for more information.')
arg_parser.add_argument('--hostname', default=False, required=False,
help='also diff URL hostname', action='store_true', dest='diff_hostname')
arg_parser.add_argument('--names', '-n', default=False, required=False,
help='only diff URL parameter names.', action='store_true', dest='names_only')
arg_parser.add_argument('--decode', '-d', default=False, required=False,
help='URL decode parameter names and values (if applicable). Decoded params will be used for comparison and printing.',
action='store_true', dest='decode_params')
arg_parser.add_argument('left_url', type=str, help='URL to diff against. Logically handled as the left argument of diff.', metavar='<left URL>')
arg_parser.add_argument('right_url', type=str, help='URL to diff against. Logically handled as the right argument of diff.', metavar='<right URL>', nargs='?', default='')
arg_parser.add_argument('--quiet', '-q', action='store_true', help='suppress output and return non-zero if URLs differ.',
default=False, required=False)
arg_parser.add_argument('--case_insensitive', '-i', action='store_true', help='Perform case insensitive diff. NOTE: this converts all input to lowercase.', default=False, required=False)
args = arg_parser.parse_args()
differ = UrlDiffer(args.left_url,
args.right_url,
names_only=args.names_only,
hostnames=args.diff_hostname,
url_decode_params=args.decode_params,
case_insensitive=args.case_insensitive)
if not args.quiet:
sys.stdout.write('%s\n' % differ)
sys.exit(1 if differ.are_different() else 0)
if __name__ == '__main__':
main()
| {
"content_hash": "b454a01b89f3e28f54b9e9f0e65329ec",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 188,
"avg_line_length": 34.98813056379822,
"alnum_prop": 0.6577050292596048,
"repo_name": "google/url_diff",
"id": "fcb1bf7aecd688bf4d8725c595c8443603bdd356",
"size": "11813",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "url_diff.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14788"
}
],
"symlink_target": ""
} |
import feedparser, sys, hashlib, csv, re, os
def read_in_recorded_hashes(f):
f.seek(0)
hashes = []
for row in csv.reader(f):
hashes.append(row[0])
return hashes
def process_summary_text(summary):
summary = re.sub("<[^<]+?>", "", summary) # strip HTML tags
summary = re.sub("\s+", " ", summary) # replace consecutive whitespace with single space
return summary
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: {} <user_id> <token>".format(sys.argv[0])
exit()
user_id, token = sys.argv[1:3]
feed_items = feedparser.parse("https://bitbucket.org/{}/rss/feed?token={}".format(user_id, token))
file_dir = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(file_dir, "work_log.csv"), "a+") as f:
hashes = read_in_recorded_hashes(f)
writer = csv.writer(f)
for entry in sorted(feed_items.entries, key=lambda k: k["published_parsed"]):
if (user_id not in entry["title"]):
continue
hash = hashlib.sha1(entry["published"] + entry["summary"]).hexdigest()
if hash not in hashes:
writer.writerow([hash, entry.published, process_summary_text(entry.summary)])
| {
"content_hash": "75699116fb2b0f69fcce629b9763f949",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 99,
"avg_line_length": 33,
"alnum_prop": 0.660427807486631,
"repo_name": "tombusby/Log-Bitbucket-History",
"id": "850209b6f7036efef5c2b913cdfc0042b9fd6c64",
"size": "1144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "logger_csv.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3213"
}
],
"symlink_target": ""
} |
"""Usage: python test_reproducible.py [init]
Used to test if the EMT potential still produces the same energies, forces
and stresses as it used to.
Call it the first time with the "init" argument to generate a data file.
Subsequent calls calculate the same quantities, but compare them with the
database.
"""
from numpy import *
from asap3 import *
import cPickle, os, sys, time
# PrintVersion(1)
def main():
datafile = "test_reproducible2.dat"
is_data = os.access(datafile, os.R_OK)
initialize = 0
if len(sys.argv) == 2 and sys.argv[1] == "init":
initialize = 1
if os.access(datafile, os.R_OK):
print "ERROR: Data file already exists. Delete it if you want to make a new one."
print ""
print __doc__
sys.exit(1)
elif len(sys.argv) != 1:
print "ERROR: Wrong command line."
print ""
print __doc__
sys.exit(1)
elif not os.access(datafile, os.R_OK):
print "ERROR: No data file."
print ""
print __doc__
sys.exit(1)
indata = cPickle.load(file("test_reproducible.in"))
lattice = Atoms(positions=indata['positions'], cell=indata['cell'], symbols=["Cu"]*len(indata['positions']), pbc=True)
#_v = Verbose(1)
# Make a lattice
#lattice = FCCOrtho(((1,-1,0),(1,1,-2),(1,1,1)), (15,8,14), Copper,
# symmetry=(1,1,1), half=1)
print "Number of atoms", len(lattice)
print lattice.get_cell()
# Perturb the positions
r = lattice.get_positions()
dr = 0.05 * sin(arange(3*len(lattice)))
dr.shape = (-1,3)
r += dr
lattice.set_positions(r)
atoms1 = Atoms(lattice)
atoms1.set_calculator(EMT())
print "Total energy:", atoms1.get_potential_energy()
print "Stress:", atoms1.get_stress()
#CNA(atoms1)
#plot = atoms1.GetPlot()
#time.sleep(10)
z = lattice.get_atomic_numbers()
z[0] = z[1] = 47
lattice.set_atomic_numbers(z)
atoms2 = Atoms(lattice)
atoms2.set_calculator(EMT())
if initialize:
Cu = {}
n = len(atoms1)
Cu["energies"] = atoms1.get_potential_energies()
Cu["forces"] = atoms1.get_forces()
Cu["stresses"] = atoms1.GetStresses()
assert Cu["energies"].shape == (n,)
assert Cu["forces"].shape == (n, 3)
assert Cu["stresses"].shape == (n, 6)
AgCu = {}
AgCu["energies"] = atoms2.GetPotentialEnergies()
AgCu["forces"] = atoms2.GetCartesianForces()
AgCu["stresses"] = atoms2.GetStresses()
data = {"Cu": Cu, "AgCu": AgCu}
cPickle.dump(data, open(datafile, "w"))
else:
data = cPickle.load(open(datafile))
Cu = data["Cu"]
AgCu = data["AgCu"]
print "*** Checking pure copper ***"
e = 0
e = e + evaluate("energies", Cu["energies"], atoms1.get_potential_energies())
e = e + evaluate("forces", Cu["forces"], atoms1.get_forces())
e = e + evaluate("stresses", Cu["stresses"], atoms1.get_stresses())
print "*** Checking silver in copper ***"
e = e + evaluate("energies", AgCu["energies"], atoms2.get_potential_energies())
e = e + evaluate("forces", AgCu["forces"], atoms2.get_forces())
e = e + evaluate("stresses", AgCu["stresses"], atoms2.get_stresses())
if e == 6:
print "*** All tests passed ***"
else:
print "*** THERE WERE ERRORS IN SOME TESTS! ***"
sys.exit(2)
def evaluate(text, expected, actual):
diff = max(abs(expected.flat[:] - actual.flat[:]))
passed = diff < 1e-10
if passed:
print "Checking %s: max error = %g OK" % (text, diff)
else:
print "Checking %s: max error = %g FAILED! <<<<<<<<<<" % (text, diff)
#print " average over all atoms:", sum(actual) / len(actual)
return passed
main()
| {
"content_hash": "fed694250d0fc1526b9053747d1dad78",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 122,
"avg_line_length": 33.46551724137931,
"alnum_prop": 0.5718701700154559,
"repo_name": "auag92/n2dm",
"id": "51b80a4421224251836dc7f5e65a1307862b2e0c",
"size": "3882",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Asap-3.8.4/Test/test_reproducible.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4529"
},
{
"name": "C++",
"bytes": "1472384"
},
{
"name": "CSS",
"bytes": "5059"
},
{
"name": "Jupyter Notebook",
"bytes": "7328"
},
{
"name": "Makefile",
"bytes": "86067"
},
{
"name": "Matlab",
"bytes": "87"
},
{
"name": "Python",
"bytes": "1232765"
},
{
"name": "Shell",
"bytes": "13226"
},
{
"name": "Smarty",
"bytes": "4212"
},
{
"name": "TeX",
"bytes": "5561"
}
],
"symlink_target": ""
} |
"""Implentation of Brocade SVI service Plugin."""
from oslo.config import cfg
from neutron.common import constants as l3_constants
from neutron.openstack.common import excutils
from neutron.openstack.common import log as logging
from neutron.plugins.ml2 import db
from neutron.plugins.ml2.drivers.brocade.db import models as brocade_db
from neutron.plugins.ml2.drivers.brocade.nos import nosdriver as driver
from neutron.services.l3_router import l3_router_plugin as router
DEVICE_OWNER_ROUTER_INTF = l3_constants.DEVICE_OWNER_ROUTER_INTF
DEVICE_OWNER_ROUTER_GW = l3_constants.DEVICE_OWNER_ROUTER_GW
DEVICE_OWNER_FLOATINGIP = l3_constants.DEVICE_OWNER_FLOATINGIP
ML2_BROCADE = [cfg.StrOpt('address', default='',
help=_('The address of the host to SSH to')),
cfg.StrOpt('username',
help=_('The SSH username to use')),
cfg.StrOpt('password', secret=True,
help=_('The SSH password to use')),
cfg.StrOpt('rbridge_id', default=1,
help=_('Rbridge id of provider edge router(s)')),
]
cfg.CONF.register_opts(ML2_BROCADE, "ml2_brocade")
LOG = logging.getLogger(__name__)
class BrocadeSVIPlugin(router.L3RouterPlugin):
"""Brocade SVI service Plugin."""
def __init__(self):
"""Initialize Brocade Plugin
Specify switch address and db configuration.
"""
super(BrocadeSVIPlugin, self).__init__()
self._switch = None
self._driver = None
self.brocade_init()
def brocade_init(self):
"""Brocade specific initialization."""
LOG.debug("brocadeSVIPlugin::brocade_init()")
self._switch = {'address': cfg.CONF.ml2_brocade.address,
'username': cfg.CONF.ml2_brocade.username,
'password': cfg.CONF.ml2_brocade.password,
'rbridge_id': cfg.CONF.ml2_brocade.rbridge_id
}
self._driver = driver.NOSdriver()
LOG.info(_("rbridge id %s"), self._switch['rbridge_id'])
def create_router(self, context, router):
""" creates a vrf on NOS device."""
LOG.debug("BrocadeSVIPlugin.create_router called: ")
with context.session.begin(subtransactions=True):
new_router = super(BrocadeSVIPlugin, self).create_router(context,
router)
# Router on VDX
try:
switch = self._switch
self._driver.create_router(switch['address'],
switch['username'],
switch['password'],
switch['rbridge_id'],
str(new_router['id']))
except Exception:
with excutils.save_and_reraise_exception():
with context.session.begin(subtransactions=True):
super(BrocadeSVIPlugin, self).delete_router(
context,
new_router['id'])
LOG.debug("BrocadeSVIPlugin.create_router: "
"router created on VDX switch")
return new_router
def delete_router(self, context, router_id):
""" delete a vrf on NOS device."""
router = super(BrocadeSVIPlugin, self).get_router(context, router_id)
super(BrocadeSVIPlugin, self).delete_router(context, router_id)
try:
switch = self._switch
self._driver.delete_router(switch['address'],
switch['username'],
switch['password'],
switch['rbridge_id'],
str(router['id']))
except Exception:
excutils.save_and_reraise_exception()
def add_router_interface(self, context, router_id, interface_info):
"""creates svi on NOS device and assigns ip addres to SVI."""
LOG.debug("BrocadeSVIPlugin.add_router_interface on VDX: "
"router_id=%(router_id)s "
"interface_info=%(interface_info)r",
{'router_id': router_id, 'interface_info': interface_info})
with context.session.begin(subtransactions=True):
info = super(BrocadeSVIPlugin, self).add_router_interface(
context, router_id, interface_info)
port = db.get_port(context.session, info["port_id"])
# shutting down neutron port to allow NOS to do Arp/Routing
port['admin_state_up'] = False
port['port'] = port
self._core_plugin.update_port(context, info["port_id"], port)
interface_info = info
subnet = self._core_plugin._get_subnet(context,
interface_info["subnet_id"])
cidr = subnet["cidr"]
net_addr, net_len = self.net_addr(cidr)
gateway_ip = subnet["gateway_ip"]
network_id = subnet['network_id']
bnet = brocade_db.get_network(context, network_id)
vlan_id = bnet['vlan']
gateway_ip_cidr = gateway_ip + '/' + str(net_len)
LOG.debug("Allocated cidr %(cidr)s from the pool, "
"network_id %(net_id)s "
"bnet %(bnet)s "
"vlan %(vlan_id)d " % ({'cidr': gateway_ip_cidr,
'net_id': network_id,
'bnet': bnet,
'vlan_id': int(vlan_id)}))
port_filters = {'network_id': [network_id],
'device_owner': [DEVICE_OWNER_ROUTER_INTF]}
port_count = self._core_plugin.get_ports_count(context,
port_filters)
LOG.info(_("BrocadeSVIPlugin.add_router_interface ports_count %d"),
port_count)
# port count is checked against 2 since the current port is already
# added to db
if port_count == 2:
# This subnet is already part of some router
# (this is not supported in this version of brocade svi plugin)
LOG.error(_("BrocadeSVIPlugin: adding redundant router "
"interface is not supported"))
raise Exception(_("BrocadeSVIPlugin:adding redundant router "
"interface is not supported"))
try:
switch = self._switch
self._driver.create_svi(switch['address'],
switch['username'],
switch['password'],
switch['rbridge_id'],
vlan_id,
gateway_ip_cidr,
str(router_id))
except Exception:
LOG.error(_("Failed to create Brocade resources to add router "
"interface. info=%(info)s, router_id=%(router_id)s"),
{"info": info, "router_id": router_id})
with excutils.save_and_reraise_exception():
with context.session.begin(subtransactions=True):
self.remove_router_interface(context, router_id,
interface_info)
return info
def remove_router_interface(self, context, router_id, interface_info):
"""Deletes svi from NOS device."""
LOG.debug("BrocadeSVIPlugin.remove_router_interface called: "
"router_id=%(router_id)s "
"interface_info=%(interface_info)r",
{'router_id': router_id, 'interface_info': interface_info})
with context.session.begin(subtransactions=True):
info = super(BrocadeSVIPlugin, self).remove_router_interface(
context, router_id, interface_info)
try:
subnet = self._core_plugin._get_subnet(context,
info['subnet_id'])
cidr = subnet['cidr']
net_addr, net_len = self.net_addr(cidr)
gateway_ip = subnet['gateway_ip']
network_id = subnet['network_id']
bnet = brocade_db.get_network(context, network_id)
vlan_id = bnet['vlan']
gateway_ip_cidr = gateway_ip + '/' + str(net_len)
LOG.debug("remove_router_interface removed cidr %(cidr)s"
" from the pool,"
" network_id %(net_id)s bnet %(bnet)s"
" vlan %(vlan_id)d" %
({'cidr': gateway_ip_cidr,
'net_id': network_id,
'bnet': bnet, 'vlan_id': int(vlan_id)}))
switch = self._switch
self._driver.delete_svi(switch['address'],
switch['username'],
switch['password'],
switch['rbridge_id'],
vlan_id,
gateway_ip_cidr,
str(router_id))
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_("Fail remove of interface from brocade router "
"interface. info=%(info)s, "
"router_id=%(router_id)s") %
({"info": info, "router_id": router_id}))
return True
@staticmethod
def net_addr(addr):
"""Get network address prefix and length from a given address."""
if addr is None:
return None, None
nw_addr, nw_len = addr.split('/')
nw_len = int(nw_len)
return nw_addr, nw_len
| {
"content_hash": "22f89e5c1f0301a820c917f3793784d1",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 79,
"avg_line_length": 46.71689497716895,
"alnum_prop": 0.4949662789561138,
"repo_name": "nash-x/hws",
"id": "122d7734c7049c574419742c463d88db7d9f8e44",
"size": "10885",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/services/l3_router/brocade/l3_router_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1043"
},
{
"name": "PLpgSQL",
"bytes": "12782"
},
{
"name": "Python",
"bytes": "20443623"
},
{
"name": "Shell",
"bytes": "4643"
}
],
"symlink_target": ""
} |
import numpy as np
from scipy import sparse as sp
def _get_sparse_save_kwargs(A):
return {
"data": A.data,
"indices": A.indices,
"indptr": A.indptr,
"shape": A.shape
}
def save_csr_matrix(filepath, A):
np.savez(filepath, **_get_sparse_save_kwargs(A))
def save_csc_matrix(filepath, A):
np.savez(filepath, **_get_sparse_save_kwargs(A))
def _get_sparse_load_args(obj):
return [(obj["data"], obj["indices"], obj["indptr"]), obj["shape"]]
def load_csr_matrix(filepath):
obj = np.load(filepath)
return sp.csr_matrix(*_get_sparse_load_args(obj))
def load_csc_matrix(filepath):
obj = np.load(filepath)
return sp.csc_matrix(*_get_sparse_load_args(obj))
class _SparseMatrixBuilder(object):
def __init__(self):
self.data = []
self.indices = []
self.indptr = [0]
def _add_elements(self, data, indices):
self.data += list(data)
self.indices += list(indices)
self.indptr.append(len(self.data))
class CSRMatrixBuilder(_SparseMatrixBuilder):
def __init__(self):
self.add_row = self._add_elements
super(CSRMatrixBuilder, self).__init__()
def get_matrix(self, shape=None, dtype=np.float32):
if shape is None:
shape = (len(self.indptr) - 1, max(self.indices) + 1)
new_data = np.array(self.data, dtype=dtype)
return sp.csr_matrix((new_data, self.indices, self.indptr), shape)
class CSCMatrixBuilder(_SparseMatrixBuilder):
def __init__(self):
self.add_column = self._add_elements
super(CSCMatrixBuilder, self).__init__()
def get_matrix(self, shape=None, dtype=np.float32):
if shape is None:
shape = (max(self.indices) + 1, len(self.indptr) - 1)
new_data = np.array(self.data, dtype=dtype)
return sp.csc_matrix((new_data, self.indices, self.indptr), shape)
| {
"content_hash": "a096c0be5a5445ef71dc9e24c7cfcf5d",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 70,
"avg_line_length": 29.0327868852459,
"alnum_prop": 0.6606437041219649,
"repo_name": "tbjohns/python-util",
"id": "5845c0856f022d2559ff4c5b156a513cc14fd95e",
"size": "1771",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/matrix.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2272"
}
],
"symlink_target": ""
} |
import operator
from datetime import datetime
import pytest
from numpy import nan
import numpy as np
import pandas as pd
from pandas import (Series, DataFrame, bdate_range,
isna, compat, _np_version_under1p12)
from pandas.tseries.offsets import BDay
import pandas.util.testing as tm
import pandas.util._test_decorators as td
from pandas.compat import range, PY36
from pandas.core.reshape.util import cartesian_product
import pandas.core.sparse.frame as spf
from pandas._libs.sparse import BlockIndex, IntIndex
from pandas.core.sparse.api import SparseSeries
from pandas.tests.series.test_api import SharedWithSparse
def _test_data1():
# nan-based
arr = np.arange(20, dtype=float)
index = np.arange(20)
arr[:2] = nan
arr[5:10] = nan
arr[-3:] = nan
return arr, index
def _test_data2():
# nan-based
arr = np.arange(15, dtype=float)
index = np.arange(15)
arr[7:12] = nan
arr[-1:] = nan
return arr, index
def _test_data1_zero():
# zero-based
arr, index = _test_data1()
arr[np.isnan(arr)] = 0
return arr, index
def _test_data2_zero():
# zero-based
arr, index = _test_data2()
arr[np.isnan(arr)] = 0
return arr, index
class TestSparseSeries(SharedWithSparse):
series_klass = SparseSeries
# SharedWithSparse tests use generic, series_klass-agnostic assertion
_assert_series_equal = staticmethod(tm.assert_sp_series_equal)
def setup_method(self, method):
arr, index = _test_data1()
date_index = bdate_range('1/1/2011', periods=len(index))
self.bseries = SparseSeries(arr, index=index, kind='block',
name='bseries')
self.ts = self.bseries
self.btseries = SparseSeries(arr, index=date_index, kind='block')
self.iseries = SparseSeries(arr, index=index, kind='integer',
name='iseries')
arr, index = _test_data2()
self.bseries2 = SparseSeries(arr, index=index, kind='block')
self.iseries2 = SparseSeries(arr, index=index, kind='integer')
arr, index = _test_data1_zero()
self.zbseries = SparseSeries(arr, index=index, kind='block',
fill_value=0, name='zbseries')
self.ziseries = SparseSeries(arr, index=index, kind='integer',
fill_value=0)
arr, index = _test_data2_zero()
self.zbseries2 = SparseSeries(arr, index=index, kind='block',
fill_value=0)
self.ziseries2 = SparseSeries(arr, index=index, kind='integer',
fill_value=0)
def test_constructor_dict_input(self):
# gh-16905
constructor_dict = {1: 1.}
index = [0, 1, 2]
# Series with index passed in
series = pd.Series(constructor_dict)
expected = SparseSeries(series, index=index)
result = SparseSeries(constructor_dict, index=index)
tm.assert_sp_series_equal(result, expected)
# Series with index and dictionary with no index
expected = SparseSeries(series)
result = SparseSeries(constructor_dict)
tm.assert_sp_series_equal(result, expected)
def test_constructor_dict_order(self):
# GH19018
# initialization ordering: by insertion order if python>= 3.6, else
# order by value
d = {'b': 1, 'a': 0, 'c': 2}
result = SparseSeries(d)
if PY36:
expected = SparseSeries([1, 0, 2], index=list('bac'))
else:
expected = SparseSeries([0, 1, 2], index=list('abc'))
tm.assert_sp_series_equal(result, expected)
def test_constructor_dtype(self):
arr = SparseSeries([np.nan, 1, 2, np.nan])
assert arr.dtype == np.float64
assert np.isnan(arr.fill_value)
arr = SparseSeries([np.nan, 1, 2, np.nan], fill_value=0)
assert arr.dtype == np.float64
assert arr.fill_value == 0
arr = SparseSeries([0, 1, 2, 4], dtype=np.int64, fill_value=np.nan)
assert arr.dtype == np.int64
assert np.isnan(arr.fill_value)
arr = SparseSeries([0, 1, 2, 4], dtype=np.int64)
assert arr.dtype == np.int64
assert arr.fill_value == 0
arr = SparseSeries([0, 1, 2, 4], fill_value=0, dtype=np.int64)
assert arr.dtype == np.int64
assert arr.fill_value == 0
def test_iteration_and_str(self):
[x for x in self.bseries]
str(self.bseries)
def test_construct_DataFrame_with_sp_series(self):
# it works!
df = DataFrame({'col': self.bseries})
# printing & access
df.iloc[:1]
df['col']
df.dtypes
str(df)
tm.assert_sp_series_equal(df['col'], self.bseries, check_names=False)
result = df.iloc[:, 0]
tm.assert_sp_series_equal(result, self.bseries, check_names=False)
# blocking
expected = Series({'col': 'float64:sparse'})
result = df.ftypes
tm.assert_series_equal(expected, result)
def test_constructor_preserve_attr(self):
arr = pd.SparseArray([1, 0, 3, 0], dtype=np.int64, fill_value=0)
assert arr.dtype == np.int64
assert arr.fill_value == 0
s = pd.SparseSeries(arr, name='x')
assert s.dtype == np.int64
assert s.fill_value == 0
def test_series_density(self):
# GH2803
ts = Series(np.random.randn(10))
ts[2:-2] = nan
sts = ts.to_sparse()
density = sts.density # don't die
assert density == 4 / 10.0
def test_sparse_to_dense(self):
arr, index = _test_data1()
series = self.bseries.to_dense()
tm.assert_series_equal(series, Series(arr, name='bseries'))
# see gh-14647
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
series = self.bseries.to_dense(sparse_only=True)
indexer = np.isfinite(arr)
exp = Series(arr[indexer], index=index[indexer], name='bseries')
tm.assert_series_equal(series, exp)
series = self.iseries.to_dense()
tm.assert_series_equal(series, Series(arr, name='iseries'))
arr, index = _test_data1_zero()
series = self.zbseries.to_dense()
tm.assert_series_equal(series, Series(arr, name='zbseries'))
series = self.ziseries.to_dense()
tm.assert_series_equal(series, Series(arr))
def test_to_dense_fill_value(self):
s = pd.Series([1, np.nan, np.nan, 3, np.nan])
res = SparseSeries(s).to_dense()
tm.assert_series_equal(res, s)
res = SparseSeries(s, fill_value=0).to_dense()
tm.assert_series_equal(res, s)
s = pd.Series([1, np.nan, 0, 3, 0])
res = SparseSeries(s, fill_value=0).to_dense()
tm.assert_series_equal(res, s)
res = SparseSeries(s, fill_value=0).to_dense()
tm.assert_series_equal(res, s)
s = pd.Series([np.nan, np.nan, np.nan, np.nan, np.nan])
res = SparseSeries(s).to_dense()
tm.assert_series_equal(res, s)
s = pd.Series([np.nan, np.nan, np.nan, np.nan, np.nan])
res = SparseSeries(s, fill_value=0).to_dense()
tm.assert_series_equal(res, s)
def test_dense_to_sparse(self):
series = self.bseries.to_dense()
bseries = series.to_sparse(kind='block')
iseries = series.to_sparse(kind='integer')
tm.assert_sp_series_equal(bseries, self.bseries)
tm.assert_sp_series_equal(iseries, self.iseries, check_names=False)
assert iseries.name == self.bseries.name
assert len(series) == len(bseries)
assert len(series) == len(iseries)
assert series.shape == bseries.shape
assert series.shape == iseries.shape
# non-NaN fill value
series = self.zbseries.to_dense()
zbseries = series.to_sparse(kind='block', fill_value=0)
ziseries = series.to_sparse(kind='integer', fill_value=0)
tm.assert_sp_series_equal(zbseries, self.zbseries)
tm.assert_sp_series_equal(ziseries, self.ziseries, check_names=False)
assert ziseries.name == self.zbseries.name
assert len(series) == len(zbseries)
assert len(series) == len(ziseries)
assert series.shape == zbseries.shape
assert series.shape == ziseries.shape
def test_to_dense_preserve_name(self):
assert (self.bseries.name is not None)
result = self.bseries.to_dense()
assert result.name == self.bseries.name
def test_constructor(self):
# test setup guys
assert np.isnan(self.bseries.fill_value)
assert isinstance(self.bseries.sp_index, BlockIndex)
assert np.isnan(self.iseries.fill_value)
assert isinstance(self.iseries.sp_index, IntIndex)
assert self.zbseries.fill_value == 0
tm.assert_numpy_array_equal(self.zbseries.values.values,
self.bseries.to_dense().fillna(0).values)
# pass SparseSeries
def _check_const(sparse, name):
# use passed series name
result = SparseSeries(sparse)
tm.assert_sp_series_equal(result, sparse)
assert sparse.name == name
assert result.name == name
# use passed name
result = SparseSeries(sparse, name='x')
tm.assert_sp_series_equal(result, sparse, check_names=False)
assert result.name == 'x'
_check_const(self.bseries, 'bseries')
_check_const(self.iseries, 'iseries')
_check_const(self.zbseries, 'zbseries')
# Sparse time series works
date_index = bdate_range('1/1/2000', periods=len(self.bseries))
s5 = SparseSeries(self.bseries, index=date_index)
assert isinstance(s5, SparseSeries)
# pass Series
bseries2 = SparseSeries(self.bseries.to_dense())
tm.assert_numpy_array_equal(self.bseries.sp_values, bseries2.sp_values)
# pass dict?
# don't copy the data by default
values = np.ones(self.bseries.npoints)
sp = SparseSeries(values, sparse_index=self.bseries.sp_index)
sp.sp_values[:5] = 97
assert values[0] == 97
assert len(sp) == 20
assert sp.shape == (20, )
# but can make it copy!
sp = SparseSeries(values, sparse_index=self.bseries.sp_index,
copy=True)
sp.sp_values[:5] = 100
assert values[0] == 97
assert len(sp) == 20
assert sp.shape == (20, )
def test_constructor_scalar(self):
data = 5
sp = SparseSeries(data, np.arange(100))
sp = sp.reindex(np.arange(200))
assert (sp.loc[:99] == data).all()
assert isna(sp.loc[100:]).all()
data = np.nan
sp = SparseSeries(data, np.arange(100))
assert len(sp) == 100
assert sp.shape == (100, )
def test_constructor_ndarray(self):
pass
def test_constructor_nonnan(self):
arr = [0, 0, 0, nan, nan]
sp_series = SparseSeries(arr, fill_value=0)
tm.assert_numpy_array_equal(sp_series.values.values, np.array(arr))
assert len(sp_series) == 5
assert sp_series.shape == (5, )
def test_constructor_empty(self):
# see gh-9272
sp = SparseSeries()
assert len(sp.index) == 0
assert sp.shape == (0, )
def test_copy_astype(self):
cop = self.bseries.astype(np.float64)
assert cop is not self.bseries
assert cop.sp_index is self.bseries.sp_index
assert cop.dtype == np.float64
cop2 = self.iseries.copy()
tm.assert_sp_series_equal(cop, self.bseries)
tm.assert_sp_series_equal(cop2, self.iseries)
# test that data is copied
cop[:5] = 97
assert cop.sp_values[0] == 97
assert self.bseries.sp_values[0] != 97
# correct fill value
zbcop = self.zbseries.copy()
zicop = self.ziseries.copy()
tm.assert_sp_series_equal(zbcop, self.zbseries)
tm.assert_sp_series_equal(zicop, self.ziseries)
# no deep copy
view = self.bseries.copy(deep=False)
view.sp_values[:5] = 5
assert (self.bseries.sp_values[:5] == 5).all()
def test_shape(self):
# see gh-10452
assert self.bseries.shape == (20, )
assert self.btseries.shape == (20, )
assert self.iseries.shape == (20, )
assert self.bseries2.shape == (15, )
assert self.iseries2.shape == (15, )
assert self.zbseries2.shape == (15, )
assert self.ziseries2.shape == (15, )
def test_astype(self):
with pytest.raises(ValueError):
self.bseries.astype(np.int64)
def test_astype_all(self):
orig = pd.Series(np.array([1, 2, 3]))
s = SparseSeries(orig)
types = [np.float64, np.float32, np.int64,
np.int32, np.int16, np.int8]
for typ in types:
res = s.astype(typ)
assert res.dtype == typ
tm.assert_series_equal(res.to_dense(), orig.astype(typ))
def test_kind(self):
assert self.bseries.kind == 'block'
assert self.iseries.kind == 'integer'
def test_to_frame(self):
# GH 9850
s = pd.SparseSeries([1, 2, 0, nan, 4, nan, 0], name='x')
exp = pd.SparseDataFrame({'x': [1, 2, 0, nan, 4, nan, 0]})
tm.assert_sp_frame_equal(s.to_frame(), exp)
exp = pd.SparseDataFrame({'y': [1, 2, 0, nan, 4, nan, 0]})
tm.assert_sp_frame_equal(s.to_frame(name='y'), exp)
s = pd.SparseSeries([1, 2, 0, nan, 4, nan, 0], name='x', fill_value=0)
exp = pd.SparseDataFrame({'x': [1, 2, 0, nan, 4, nan, 0]},
default_fill_value=0)
tm.assert_sp_frame_equal(s.to_frame(), exp)
exp = pd.DataFrame({'y': [1, 2, 0, nan, 4, nan, 0]})
tm.assert_frame_equal(s.to_frame(name='y').to_dense(), exp)
def test_pickle(self):
def _test_roundtrip(series):
unpickled = tm.round_trip_pickle(series)
tm.assert_sp_series_equal(series, unpickled)
tm.assert_series_equal(series.to_dense(), unpickled.to_dense())
self._check_all(_test_roundtrip)
def _check_all(self, check_func):
check_func(self.bseries)
check_func(self.iseries)
check_func(self.zbseries)
check_func(self.ziseries)
def test_getitem(self):
def _check_getitem(sp, dense):
for idx, val in compat.iteritems(dense):
tm.assert_almost_equal(val, sp[idx])
for i in range(len(dense)):
tm.assert_almost_equal(sp[i], dense[i])
# j = np.float64(i)
# assert_almost_equal(sp[j], dense[j])
# API change 1/6/2012
# negative getitem works
# for i in xrange(len(dense)):
# assert_almost_equal(sp[-i], dense[-i])
_check_getitem(self.bseries, self.bseries.to_dense())
_check_getitem(self.btseries, self.btseries.to_dense())
_check_getitem(self.zbseries, self.zbseries.to_dense())
_check_getitem(self.iseries, self.iseries.to_dense())
_check_getitem(self.ziseries, self.ziseries.to_dense())
# exception handling
pytest.raises(Exception, self.bseries.__getitem__,
len(self.bseries) + 1)
# index not contained
pytest.raises(Exception, self.btseries.__getitem__,
self.btseries.index[-1] + BDay())
def test_get_get_value(self):
tm.assert_almost_equal(self.bseries.get(10), self.bseries[10])
assert self.bseries.get(len(self.bseries) + 1) is None
dt = self.btseries.index[10]
result = self.btseries.get(dt)
expected = self.btseries.to_dense()[dt]
tm.assert_almost_equal(result, expected)
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
tm.assert_almost_equal(
self.bseries.get_value(10), self.bseries[10])
def test_set_value(self):
idx = self.btseries.index[7]
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
self.btseries.set_value(idx, 0)
assert self.btseries[idx] == 0
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
self.iseries.set_value('foobar', 0)
assert self.iseries.index[-1] == 'foobar'
assert self.iseries['foobar'] == 0
def test_getitem_slice(self):
idx = self.bseries.index
res = self.bseries[::2]
assert isinstance(res, SparseSeries)
expected = self.bseries.reindex(idx[::2])
tm.assert_sp_series_equal(res, expected)
res = self.bseries[:5]
assert isinstance(res, SparseSeries)
tm.assert_sp_series_equal(res, self.bseries.reindex(idx[:5]))
res = self.bseries[5:]
tm.assert_sp_series_equal(res, self.bseries.reindex(idx[5:]))
# negative indices
res = self.bseries[:-3]
tm.assert_sp_series_equal(res, self.bseries.reindex(idx[:-3]))
def test_take(self):
def _compare_with_dense(sp):
dense = sp.to_dense()
def _compare(idx):
dense_result = dense.take(idx).values
sparse_result = sp.take(idx)
assert isinstance(sparse_result, SparseSeries)
tm.assert_almost_equal(dense_result,
sparse_result.values.values)
_compare([1., 2., 3., 4., 5., 0.])
_compare([7, 2, 9, 0, 4])
_compare([3, 6, 3, 4, 7])
self._check_all(_compare_with_dense)
pytest.raises(Exception, self.bseries.take,
[0, len(self.bseries) + 1])
# Corner case
sp = SparseSeries(np.ones(10) * nan)
exp = pd.Series(np.repeat(nan, 5))
tm.assert_series_equal(sp.take([0, 1, 2, 3, 4]), exp)
with tm.assert_produces_warning(FutureWarning):
sp.take([1, 5], convert=True)
with tm.assert_produces_warning(FutureWarning):
sp.take([1, 5], convert=False)
def test_numpy_take(self):
sp = SparseSeries([1.0, 2.0, 3.0])
indices = [1, 2]
if not _np_version_under1p12:
tm.assert_series_equal(np.take(sp, indices, axis=0).to_dense(),
np.take(sp.to_dense(), indices, axis=0))
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.take,
sp, indices, out=np.empty(sp.shape))
msg = "the 'mode' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.take,
sp, indices, out=None, mode='clip')
def test_setitem(self):
self.bseries[5] = 7.
assert self.bseries[5] == 7.
def test_setslice(self):
self.bseries[5:10] = 7.
tm.assert_series_equal(self.bseries[5:10].to_dense(),
Series(7., index=range(5, 10),
name=self.bseries.name))
def test_operators(self):
def _check_op(a, b, op):
sp_result = op(a, b)
adense = a.to_dense() if isinstance(a, SparseSeries) else a
bdense = b.to_dense() if isinstance(b, SparseSeries) else b
dense_result = op(adense, bdense)
tm.assert_almost_equal(sp_result.to_dense(), dense_result)
def check(a, b):
_check_op(a, b, operator.add)
_check_op(a, b, operator.sub)
_check_op(a, b, operator.truediv)
_check_op(a, b, operator.floordiv)
_check_op(a, b, operator.mul)
_check_op(a, b, lambda x, y: operator.add(y, x))
_check_op(a, b, lambda x, y: operator.sub(y, x))
_check_op(a, b, lambda x, y: operator.truediv(y, x))
_check_op(a, b, lambda x, y: operator.floordiv(y, x))
_check_op(a, b, lambda x, y: operator.mul(y, x))
# NaN ** 0 = 1 in C?
# _check_op(a, b, operator.pow)
# _check_op(a, b, lambda x, y: operator.pow(y, x))
check(self.bseries, self.bseries)
check(self.iseries, self.iseries)
check(self.bseries, self.iseries)
check(self.bseries, self.bseries2)
check(self.bseries, self.iseries2)
check(self.iseries, self.iseries2)
# scalar value
check(self.bseries, 5)
# zero-based
check(self.zbseries, self.zbseries * 2)
check(self.zbseries, self.zbseries2)
check(self.ziseries, self.ziseries2)
# with dense
result = self.bseries + self.bseries.to_dense()
tm.assert_sp_series_equal(result, self.bseries + self.bseries)
def test_binary_operators(self):
# skipping for now #####
import pytest
pytest.skip("skipping sparse binary operators test")
def _check_inplace_op(iop, op):
tmp = self.bseries.copy()
expected = op(tmp, self.bseries)
iop(tmp, self.bseries)
tm.assert_sp_series_equal(tmp, expected)
inplace_ops = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'pow']
for op in inplace_ops:
_check_inplace_op(getattr(operator, "i%s" % op),
getattr(operator, op))
def test_abs(self):
s = SparseSeries([1, 2, -3], name='x')
expected = SparseSeries([1, 2, 3], name='x')
result = s.abs()
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
result = abs(s)
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
result = np.abs(s)
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
s = SparseSeries([1, -2, 2, -3], fill_value=-2, name='x')
expected = SparseSeries([1, 2, 3], sparse_index=s.sp_index,
fill_value=2, name='x')
result = s.abs()
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
result = abs(s)
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
result = np.abs(s)
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
def test_reindex(self):
def _compare_with_series(sps, new_index):
spsre = sps.reindex(new_index)
series = sps.to_dense()
seriesre = series.reindex(new_index)
seriesre = seriesre.to_sparse(fill_value=sps.fill_value)
tm.assert_sp_series_equal(spsre, seriesre)
tm.assert_series_equal(spsre.to_dense(), seriesre.to_dense())
_compare_with_series(self.bseries, self.bseries.index[::2])
_compare_with_series(self.bseries, list(self.bseries.index[::2]))
_compare_with_series(self.bseries, self.bseries.index[:10])
_compare_with_series(self.bseries, self.bseries.index[5:])
_compare_with_series(self.zbseries, self.zbseries.index[::2])
_compare_with_series(self.zbseries, self.zbseries.index[:10])
_compare_with_series(self.zbseries, self.zbseries.index[5:])
# special cases
same_index = self.bseries.reindex(self.bseries.index)
tm.assert_sp_series_equal(self.bseries, same_index)
assert same_index is not self.bseries
# corner cases
sp = SparseSeries([], index=[])
# TODO: sp_zero is not used anywhere...remove?
sp_zero = SparseSeries([], index=[], fill_value=0) # noqa
_compare_with_series(sp, np.arange(10))
# with copy=False
reindexed = self.bseries.reindex(self.bseries.index, copy=True)
reindexed.sp_values[:] = 1.
assert (self.bseries.sp_values != 1.).all()
reindexed = self.bseries.reindex(self.bseries.index, copy=False)
reindexed.sp_values[:] = 1.
tm.assert_numpy_array_equal(self.bseries.sp_values, np.repeat(1., 10))
def test_sparse_reindex(self):
length = 10
def _check(values, index1, index2, fill_value):
first_series = SparseSeries(values, sparse_index=index1,
fill_value=fill_value)
reindexed = first_series.sparse_reindex(index2)
assert reindexed.sp_index is index2
int_indices1 = index1.to_int_index().indices
int_indices2 = index2.to_int_index().indices
expected = Series(values, index=int_indices1)
expected = expected.reindex(int_indices2).fillna(fill_value)
tm.assert_almost_equal(expected.values, reindexed.sp_values)
# make sure level argument asserts
# TODO: expected is not used anywhere...remove?
expected = expected.reindex(int_indices2).fillna(fill_value) # noqa
def _check_with_fill_value(values, first, second, fill_value=nan):
i_index1 = IntIndex(length, first)
i_index2 = IntIndex(length, second)
b_index1 = i_index1.to_block_index()
b_index2 = i_index2.to_block_index()
_check(values, i_index1, i_index2, fill_value)
_check(values, b_index1, b_index2, fill_value)
def _check_all(values, first, second):
_check_with_fill_value(values, first, second, fill_value=nan)
_check_with_fill_value(values, first, second, fill_value=0)
index1 = [2, 4, 5, 6, 8, 9]
values1 = np.arange(6.)
_check_all(values1, index1, [2, 4, 5])
_check_all(values1, index1, [2, 3, 4, 5, 6, 7, 8, 9])
_check_all(values1, index1, [0, 1])
_check_all(values1, index1, [0, 1, 7, 8, 9])
_check_all(values1, index1, [])
first_series = SparseSeries(values1,
sparse_index=IntIndex(length, index1),
fill_value=nan)
with tm.assert_raises_regex(TypeError,
'new index must be a SparseIndex'):
reindexed = first_series.sparse_reindex(0) # noqa
def test_repr(self):
# TODO: These aren't used
bsrepr = repr(self.bseries) # noqa
isrepr = repr(self.iseries) # noqa
def test_iter(self):
pass
def test_truncate(self):
pass
def test_fillna(self):
pass
def test_groupby(self):
pass
def test_reductions(self):
def _compare_with_dense(obj, op):
sparse_result = getattr(obj, op)()
series = obj.to_dense()
dense_result = getattr(series, op)()
assert sparse_result == dense_result
to_compare = ['count', 'sum', 'mean', 'std', 'var', 'skew']
def _compare_all(obj):
for op in to_compare:
_compare_with_dense(obj, op)
_compare_all(self.bseries)
self.bseries.sp_values[5:10] = np.NaN
_compare_all(self.bseries)
_compare_all(self.zbseries)
self.zbseries.sp_values[5:10] = np.NaN
_compare_all(self.zbseries)
series = self.zbseries.copy()
series.fill_value = 2
_compare_all(series)
nonna = Series(np.random.randn(20)).to_sparse()
_compare_all(nonna)
nonna2 = Series(np.random.randn(20)).to_sparse(fill_value=0)
_compare_all(nonna2)
def test_dropna(self):
sp = SparseSeries([0, 0, 0, nan, nan, 5, 6], fill_value=0)
sp_valid = sp.dropna()
expected = sp.to_dense().dropna()
expected = expected[expected != 0]
exp_arr = pd.SparseArray(expected.values, fill_value=0, kind='block')
tm.assert_sp_array_equal(sp_valid.values, exp_arr)
tm.assert_index_equal(sp_valid.index, expected.index)
assert len(sp_valid.sp_values) == 2
result = self.bseries.dropna()
expected = self.bseries.to_dense().dropna()
assert not isinstance(result, SparseSeries)
tm.assert_series_equal(result, expected)
def test_homogenize(self):
def _check_matches(indices, expected):
data = {}
for i, idx in enumerate(indices):
data[i] = SparseSeries(idx.to_int_index().indices,
sparse_index=idx, fill_value=np.nan)
# homogenized is only valid with NaN fill values
homogenized = spf.homogenize(data)
for k, v in compat.iteritems(homogenized):
assert (v.sp_index.equals(expected))
indices1 = [BlockIndex(10, [2], [7]), BlockIndex(10, [1, 6], [3, 4]),
BlockIndex(10, [0], [10])]
expected1 = BlockIndex(10, [2, 6], [2, 3])
_check_matches(indices1, expected1)
indices2 = [BlockIndex(10, [2], [7]), BlockIndex(10, [2], [7])]
expected2 = indices2[0]
_check_matches(indices2, expected2)
# must have NaN fill value
data = {'a': SparseSeries(np.arange(7), sparse_index=expected2,
fill_value=0)}
with tm.assert_raises_regex(TypeError, "NaN fill value"):
spf.homogenize(data)
def test_fill_value_corner(self):
cop = self.zbseries.copy()
cop.fill_value = 0
result = self.bseries / cop
assert np.isnan(result.fill_value)
cop2 = self.zbseries.copy()
cop2.fill_value = 1
result = cop2 / cop
# 1 / 0 is inf
assert np.isinf(result.fill_value)
def test_fill_value_when_combine_const(self):
# GH12723
s = SparseSeries([0, 1, np.nan, 3, 4, 5], index=np.arange(6))
exp = s.fillna(0).add(2)
res = s.add(2, fill_value=0)
tm.assert_series_equal(res, exp)
def test_shift(self):
series = SparseSeries([nan, 1., 2., 3., nan, nan], index=np.arange(6))
shifted = series.shift(0)
assert shifted is not series
tm.assert_sp_series_equal(shifted, series)
f = lambda s: s.shift(1)
_dense_series_compare(series, f)
f = lambda s: s.shift(-2)
_dense_series_compare(series, f)
series = SparseSeries([nan, 1., 2., 3., nan, nan],
index=bdate_range('1/1/2000', periods=6))
f = lambda s: s.shift(2, freq='B')
_dense_series_compare(series, f)
f = lambda s: s.shift(2, freq=BDay())
_dense_series_compare(series, f)
def test_shift_nan(self):
# GH 12908
orig = pd.Series([np.nan, 2, np.nan, 4, 0, np.nan, 0])
sparse = orig.to_sparse()
tm.assert_sp_series_equal(sparse.shift(0), orig.shift(0).to_sparse())
tm.assert_sp_series_equal(sparse.shift(1), orig.shift(1).to_sparse())
tm.assert_sp_series_equal(sparse.shift(2), orig.shift(2).to_sparse())
tm.assert_sp_series_equal(sparse.shift(3), orig.shift(3).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-1), orig.shift(-1).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-2), orig.shift(-2).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-3), orig.shift(-3).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-4), orig.shift(-4).to_sparse())
sparse = orig.to_sparse(fill_value=0)
tm.assert_sp_series_equal(sparse.shift(0),
orig.shift(0).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(1),
orig.shift(1).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(2),
orig.shift(2).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(3),
orig.shift(3).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(-1),
orig.shift(-1).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(-2),
orig.shift(-2).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(-3),
orig.shift(-3).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(-4),
orig.shift(-4).to_sparse(fill_value=0))
def test_shift_dtype(self):
# GH 12908
orig = pd.Series([1, 2, 3, 4], dtype=np.int64)
sparse = orig.to_sparse()
tm.assert_sp_series_equal(sparse.shift(0), orig.shift(0).to_sparse())
sparse = orig.to_sparse(fill_value=np.nan)
tm.assert_sp_series_equal(sparse.shift(0),
orig.shift(0).to_sparse(fill_value=np.nan))
# shift(1) or more span changes dtype to float64
tm.assert_sp_series_equal(sparse.shift(1), orig.shift(1).to_sparse())
tm.assert_sp_series_equal(sparse.shift(2), orig.shift(2).to_sparse())
tm.assert_sp_series_equal(sparse.shift(3), orig.shift(3).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-1), orig.shift(-1).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-2), orig.shift(-2).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-3), orig.shift(-3).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-4), orig.shift(-4).to_sparse())
def test_shift_dtype_fill_value(self):
# GH 12908
orig = pd.Series([1, 0, 0, 4], dtype=np.int64)
for v in [0, 1, np.nan]:
sparse = orig.to_sparse(fill_value=v)
tm.assert_sp_series_equal(sparse.shift(0),
orig.shift(0).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(1),
orig.shift(1).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(2),
orig.shift(2).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(3),
orig.shift(3).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(-1),
orig.shift(-1).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(-2),
orig.shift(-2).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(-3),
orig.shift(-3).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(-4),
orig.shift(-4).to_sparse(fill_value=v))
def test_combine_first(self):
s = self.bseries
result = s[::2].combine_first(s)
result2 = s[::2].combine_first(s.to_dense())
expected = s[::2].to_dense().combine_first(s.to_dense())
expected = expected.to_sparse(fill_value=s.fill_value)
tm.assert_sp_series_equal(result, result2)
tm.assert_sp_series_equal(result, expected)
@pytest.mark.parametrize('deep', [True, False])
@pytest.mark.parametrize('fill_value', [0, 1, np.nan, None])
def test_memory_usage_deep(self, deep, fill_value):
values = [0, 1, np.nan, None]
sparse_series = SparseSeries(values, fill_value=fill_value)
dense_series = Series(values)
sparse_usage = sparse_series.memory_usage(deep=deep)
dense_usage = dense_series.memory_usage(deep=deep)
assert sparse_usage < dense_usage
class TestSparseHandlingMultiIndexes(object):
def setup_method(self, method):
miindex = pd.MultiIndex.from_product(
[["x", "y"], ["10", "20"]], names=['row-foo', 'row-bar'])
micol = pd.MultiIndex.from_product(
[['a', 'b', 'c'], ["1", "2"]], names=['col-foo', 'col-bar'])
dense_multiindex_frame = pd.DataFrame(
index=miindex, columns=micol).sort_index().sort_index(axis=1)
self.dense_multiindex_frame = dense_multiindex_frame.fillna(value=3.14)
def test_to_sparse_preserve_multiindex_names_columns(self):
sparse_multiindex_frame = self.dense_multiindex_frame.to_sparse()
sparse_multiindex_frame = sparse_multiindex_frame.copy()
tm.assert_index_equal(sparse_multiindex_frame.columns,
self.dense_multiindex_frame.columns)
def test_round_trip_preserve_multiindex_names(self):
sparse_multiindex_frame = self.dense_multiindex_frame.to_sparse()
round_trip_multiindex_frame = sparse_multiindex_frame.to_dense()
tm.assert_frame_equal(self.dense_multiindex_frame,
round_trip_multiindex_frame,
check_column_type=True,
check_names=True)
@td.skip_if_no_scipy
class TestSparseSeriesScipyInteraction(object):
# Issue 8048: add SparseSeries coo methods
def setup_method(self, method):
import scipy.sparse
# SparseSeries inputs used in tests, the tests rely on the order
self.sparse_series = []
s = pd.Series([3.0, nan, 1.0, 2.0, nan, nan])
s.index = pd.MultiIndex.from_tuples([(1, 2, 'a', 0),
(1, 2, 'a', 1),
(1, 1, 'b', 0),
(1, 1, 'b', 1),
(2, 1, 'b', 0),
(2, 1, 'b', 1)],
names=['A', 'B', 'C', 'D'])
self.sparse_series.append(s.to_sparse())
ss = self.sparse_series[0].copy()
ss.index.names = [3, 0, 1, 2]
self.sparse_series.append(ss)
ss = pd.Series([
nan
] * 12, index=cartesian_product((range(3), range(4)))).to_sparse()
for k, v in zip([(0, 0), (1, 2), (1, 3)], [3.0, 1.0, 2.0]):
ss[k] = v
self.sparse_series.append(ss)
# results used in tests
self.coo_matrices = []
self.coo_matrices.append(scipy.sparse.coo_matrix(
([3.0, 1.0, 2.0], ([0, 1, 1], [0, 2, 3])), shape=(3, 4)))
self.coo_matrices.append(scipy.sparse.coo_matrix(
([3.0, 1.0, 2.0], ([1, 0, 0], [0, 2, 3])), shape=(3, 4)))
self.coo_matrices.append(scipy.sparse.coo_matrix(
([3.0, 1.0, 2.0], ([0, 1, 1], [0, 0, 1])), shape=(3, 2)))
self.ils = [[(1, 2), (1, 1), (2, 1)], [(1, 1), (1, 2), (2, 1)],
[(1, 2, 'a'), (1, 1, 'b'), (2, 1, 'b')]]
self.jls = [[('a', 0), ('a', 1), ('b', 0), ('b', 1)], [0, 1]]
def test_to_coo_text_names_integer_row_levels_nosort(self):
ss = self.sparse_series[0]
kwargs = {'row_levels': [0, 1], 'column_levels': [2, 3]}
result = (self.coo_matrices[0], self.ils[0], self.jls[0])
self._run_test(ss, kwargs, result)
def test_to_coo_text_names_integer_row_levels_sort(self):
ss = self.sparse_series[0]
kwargs = {'row_levels': [0, 1],
'column_levels': [2, 3],
'sort_labels': True}
result = (self.coo_matrices[1], self.ils[1], self.jls[0])
self._run_test(ss, kwargs, result)
def test_to_coo_text_names_text_row_levels_nosort_col_level_single(self):
ss = self.sparse_series[0]
kwargs = {'row_levels': ['A', 'B', 'C'],
'column_levels': ['D'],
'sort_labels': False}
result = (self.coo_matrices[2], self.ils[2], self.jls[1])
self._run_test(ss, kwargs, result)
def test_to_coo_integer_names_integer_row_levels_nosort(self):
ss = self.sparse_series[1]
kwargs = {'row_levels': [3, 0], 'column_levels': [1, 2]}
result = (self.coo_matrices[0], self.ils[0], self.jls[0])
self._run_test(ss, kwargs, result)
def test_to_coo_text_names_text_row_levels_nosort(self):
ss = self.sparse_series[0]
kwargs = {'row_levels': ['A', 'B'], 'column_levels': ['C', 'D']}
result = (self.coo_matrices[0], self.ils[0], self.jls[0])
self._run_test(ss, kwargs, result)
def test_to_coo_bad_partition_nonnull_intersection(self):
ss = self.sparse_series[0]
pytest.raises(ValueError, ss.to_coo, ['A', 'B', 'C'], ['C', 'D'])
def test_to_coo_bad_partition_small_union(self):
ss = self.sparse_series[0]
pytest.raises(ValueError, ss.to_coo, ['A'], ['C', 'D'])
def test_to_coo_nlevels_less_than_two(self):
ss = self.sparse_series[0]
ss.index = np.arange(len(ss.index))
pytest.raises(ValueError, ss.to_coo)
def test_to_coo_bad_ilevel(self):
ss = self.sparse_series[0]
pytest.raises(KeyError, ss.to_coo, ['A', 'B'], ['C', 'D', 'E'])
def test_to_coo_duplicate_index_entries(self):
ss = pd.concat([self.sparse_series[0],
self.sparse_series[0]]).to_sparse()
pytest.raises(ValueError, ss.to_coo, ['A', 'B'], ['C', 'D'])
def test_from_coo_dense_index(self):
ss = SparseSeries.from_coo(self.coo_matrices[0], dense_index=True)
check = self.sparse_series[2]
tm.assert_sp_series_equal(ss, check)
def test_from_coo_nodense_index(self):
ss = SparseSeries.from_coo(self.coo_matrices[0], dense_index=False)
check = self.sparse_series[2]
check = check.dropna().to_sparse()
tm.assert_sp_series_equal(ss, check)
def test_from_coo_long_repr(self):
# GH 13114
# test it doesn't raise error. Formatting is tested in test_format
import scipy.sparse
sparse = SparseSeries.from_coo(scipy.sparse.rand(350, 18))
repr(sparse)
def _run_test(self, ss, kwargs, check):
results = ss.to_coo(**kwargs)
self._check_results_to_coo(results, check)
# for every test, also test symmetry property (transpose), switch
# row_levels and column_levels
d = kwargs.copy()
d['row_levels'] = kwargs['column_levels']
d['column_levels'] = kwargs['row_levels']
results = ss.to_coo(**d)
results = (results[0].T, results[2], results[1])
self._check_results_to_coo(results, check)
def _check_results_to_coo(self, results, check):
(A, il, jl) = results
(A_result, il_result, jl_result) = check
# convert to dense and compare
tm.assert_numpy_array_equal(A.todense(), A_result.todense())
# or compare directly as difference of sparse
# assert(abs(A - A_result).max() < 1e-12) # max is failing in python
# 2.6
assert il == il_result
assert jl == jl_result
def test_concat(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
for kind in ['integer', 'block']:
sparse1 = pd.SparseSeries(val1, name='x', kind=kind)
sparse2 = pd.SparseSeries(val2, name='y', kind=kind)
res = pd.concat([sparse1, sparse2])
exp = pd.concat([pd.Series(val1), pd.Series(val2)])
exp = pd.SparseSeries(exp, kind=kind)
tm.assert_sp_series_equal(res, exp)
sparse1 = pd.SparseSeries(val1, fill_value=0, name='x', kind=kind)
sparse2 = pd.SparseSeries(val2, fill_value=0, name='y', kind=kind)
res = pd.concat([sparse1, sparse2])
exp = pd.concat([pd.Series(val1), pd.Series(val2)])
exp = pd.SparseSeries(exp, fill_value=0, kind=kind)
tm.assert_sp_series_equal(res, exp)
def test_concat_axis1(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
sparse1 = pd.SparseSeries(val1, name='x')
sparse2 = pd.SparseSeries(val2, name='y')
res = pd.concat([sparse1, sparse2], axis=1)
exp = pd.concat([pd.Series(val1, name='x'),
pd.Series(val2, name='y')], axis=1)
exp = pd.SparseDataFrame(exp)
tm.assert_sp_frame_equal(res, exp)
def test_concat_different_fill(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
for kind in ['integer', 'block']:
sparse1 = pd.SparseSeries(val1, name='x', kind=kind)
sparse2 = pd.SparseSeries(val2, name='y', kind=kind, fill_value=0)
res = pd.concat([sparse1, sparse2])
exp = pd.concat([pd.Series(val1), pd.Series(val2)])
exp = pd.SparseSeries(exp, kind=kind)
tm.assert_sp_series_equal(res, exp)
res = pd.concat([sparse2, sparse1])
exp = pd.concat([pd.Series(val2), pd.Series(val1)])
exp = pd.SparseSeries(exp, kind=kind, fill_value=0)
tm.assert_sp_series_equal(res, exp)
def test_concat_axis1_different_fill(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
sparse1 = pd.SparseSeries(val1, name='x')
sparse2 = pd.SparseSeries(val2, name='y', fill_value=0)
res = pd.concat([sparse1, sparse2], axis=1)
exp = pd.concat([pd.Series(val1, name='x'),
pd.Series(val2, name='y')], axis=1)
assert isinstance(res, pd.SparseDataFrame)
tm.assert_frame_equal(res.to_dense(), exp)
def test_concat_different_kind(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
sparse1 = pd.SparseSeries(val1, name='x', kind='integer')
sparse2 = pd.SparseSeries(val2, name='y', kind='block', fill_value=0)
res = pd.concat([sparse1, sparse2])
exp = pd.concat([pd.Series(val1), pd.Series(val2)])
exp = pd.SparseSeries(exp, kind='integer')
tm.assert_sp_series_equal(res, exp)
res = pd.concat([sparse2, sparse1])
exp = pd.concat([pd.Series(val2), pd.Series(val1)])
exp = pd.SparseSeries(exp, kind='block', fill_value=0)
tm.assert_sp_series_equal(res, exp)
def test_concat_sparse_dense(self):
# use first input's fill_value
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
for kind in ['integer', 'block']:
sparse = pd.SparseSeries(val1, name='x', kind=kind)
dense = pd.Series(val2, name='y')
res = pd.concat([sparse, dense])
exp = pd.concat([pd.Series(val1), dense])
exp = pd.SparseSeries(exp, kind=kind)
tm.assert_sp_series_equal(res, exp)
res = pd.concat([dense, sparse, dense])
exp = pd.concat([dense, pd.Series(val1), dense])
exp = pd.SparseSeries(exp, kind=kind)
tm.assert_sp_series_equal(res, exp)
sparse = pd.SparseSeries(val1, name='x', kind=kind, fill_value=0)
dense = pd.Series(val2, name='y')
res = pd.concat([sparse, dense])
exp = pd.concat([pd.Series(val1), dense])
exp = pd.SparseSeries(exp, kind=kind, fill_value=0)
tm.assert_sp_series_equal(res, exp)
res = pd.concat([dense, sparse, dense])
exp = pd.concat([dense, pd.Series(val1), dense])
exp = pd.SparseSeries(exp, kind=kind, fill_value=0)
tm.assert_sp_series_equal(res, exp)
def test_value_counts(self):
vals = [1, 2, nan, 0, nan, 1, 2, nan, nan, 1, 2, 0, 1, 1]
dense = pd.Series(vals, name='xx')
sparse = pd.SparseSeries(vals, name='xx')
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
sparse = pd.SparseSeries(vals, name='xx', fill_value=0)
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
def test_value_counts_dup(self):
vals = [1, 2, nan, 0, nan, 1, 2, nan, nan, 1, 2, 0, 1, 1]
# numeric op may cause sp_values to include the same value as
# fill_value
dense = pd.Series(vals, name='xx') / 0.
sparse = pd.SparseSeries(vals, name='xx') / 0.
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
vals = [1, 2, 0, 0, 0, 1, 2, 0, 0, 1, 2, 0, 1, 1]
dense = pd.Series(vals, name='xx') * 0.
sparse = pd.SparseSeries(vals, name='xx') * 0.
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
def test_value_counts_int(self):
vals = [1, 2, 0, 1, 2, 1, 2, 0, 1, 1]
dense = pd.Series(vals, name='xx')
# fill_value is np.nan, but should not be included in the result
sparse = pd.SparseSeries(vals, name='xx')
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
sparse = pd.SparseSeries(vals, name='xx', fill_value=0)
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
def test_isna(self):
# GH 8276
s = pd.SparseSeries([np.nan, np.nan, 1, 2, np.nan], name='xxx')
res = s.isna()
exp = pd.SparseSeries([True, True, False, False, True], name='xxx',
fill_value=True)
tm.assert_sp_series_equal(res, exp)
# if fill_value is not nan, True can be included in sp_values
s = pd.SparseSeries([np.nan, 0., 1., 2., 0.], name='xxx',
fill_value=0.)
res = s.isna()
assert isinstance(res, pd.SparseSeries)
exp = pd.Series([True, False, False, False, False], name='xxx')
tm.assert_series_equal(res.to_dense(), exp)
def test_notna(self):
# GH 8276
s = pd.SparseSeries([np.nan, np.nan, 1, 2, np.nan], name='xxx')
res = s.notna()
exp = pd.SparseSeries([False, False, True, True, False], name='xxx',
fill_value=False)
tm.assert_sp_series_equal(res, exp)
# if fill_value is not nan, True can be included in sp_values
s = pd.SparseSeries([np.nan, 0., 1., 2., 0.], name='xxx',
fill_value=0.)
res = s.notna()
assert isinstance(res, pd.SparseSeries)
exp = pd.Series([False, True, True, True, True], name='xxx')
tm.assert_series_equal(res.to_dense(), exp)
def _dense_series_compare(s, f):
result = f(s)
assert (isinstance(result, SparseSeries))
dense_result = f(s.to_dense())
tm.assert_series_equal(result.to_dense(), dense_result)
class TestSparseSeriesAnalytics(object):
def setup_method(self, method):
arr, index = _test_data1()
self.bseries = SparseSeries(arr, index=index, kind='block',
name='bseries')
arr, index = _test_data1_zero()
self.zbseries = SparseSeries(arr, index=index, kind='block',
fill_value=0, name='zbseries')
def test_cumsum(self):
result = self.bseries.cumsum()
expected = SparseSeries(self.bseries.to_dense().cumsum())
tm.assert_sp_series_equal(result, expected)
result = self.zbseries.cumsum()
expected = self.zbseries.to_dense().cumsum()
tm.assert_series_equal(result, expected)
axis = 1 # Series is 1-D, so only axis = 0 is valid.
msg = "No axis named {axis}".format(axis=axis)
with tm.assert_raises_regex(ValueError, msg):
self.bseries.cumsum(axis=axis)
def test_numpy_cumsum(self):
result = np.cumsum(self.bseries)
expected = SparseSeries(self.bseries.to_dense().cumsum())
tm.assert_sp_series_equal(result, expected)
result = np.cumsum(self.zbseries)
expected = self.zbseries.to_dense().cumsum()
tm.assert_series_equal(result, expected)
msg = "the 'dtype' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.cumsum,
self.bseries, dtype=np.int64)
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.cumsum,
self.zbseries, out=result)
def test_numpy_func_call(self):
# no exception should be raised even though
# numpy passes in 'axis=None' or `axis=-1'
funcs = ['sum', 'cumsum', 'var', 'mean',
'prod', 'cumprod', 'std', 'argsort',
'min', 'max']
for func in funcs:
for series in ('bseries', 'zbseries'):
getattr(np, func)(getattr(self, series))
def test_deprecated_numpy_func_call(self):
# NOTE: These should be add to the 'test_numpy_func_call' test above
# once the behavior of argmin/argmax is corrected.
funcs = ['argmin', 'argmax']
for func in funcs:
for series in ('bseries', 'zbseries'):
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
getattr(np, func)(getattr(self, series))
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
getattr(getattr(self, series), func)()
def test_deprecated_reindex_axis(self):
# https://github.com/pandas-dev/pandas/issues/17833
with tm.assert_produces_warning(FutureWarning) as m:
self.bseries.reindex_axis([0, 1, 2])
assert 'reindex' in str(m[0].message)
@pytest.mark.parametrize(
'datetime_type', (np.datetime64,
pd.Timestamp,
lambda x: datetime.strptime(x, '%Y-%m-%d')))
def test_constructor_dict_datetime64_index(datetime_type):
# GH 9456
dates = ['1984-02-19', '1988-11-06', '1989-12-03', '1990-03-15']
values = [42544017.198965244, 1234565, 40512335.181958228, -1]
result = SparseSeries(dict(zip(map(datetime_type, dates), values)))
expected = SparseSeries(values, map(pd.Timestamp, dates))
tm.assert_sp_series_equal(result, expected)
| {
"content_hash": "69ffe367360a36d99dade534a54a12ea",
"timestamp": "",
"source": "github",
"line_count": 1456,
"max_line_length": 80,
"avg_line_length": 38.005494505494504,
"alnum_prop": 0.5600513228278156,
"repo_name": "kdebrab/pandas",
"id": "921c30234660f180c08252025d37e296e7b52bc3",
"size": "55371",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pandas/tests/sparse/series/test_series.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3847"
},
{
"name": "C",
"bytes": "431689"
},
{
"name": "C++",
"bytes": "17193"
},
{
"name": "HTML",
"bytes": "551714"
},
{
"name": "Makefile",
"bytes": "563"
},
{
"name": "PowerShell",
"bytes": "2970"
},
{
"name": "Python",
"bytes": "13653350"
},
{
"name": "Shell",
"bytes": "25368"
},
{
"name": "Smarty",
"bytes": "2045"
}
],
"symlink_target": ""
} |
"""The RPC-service-side bridge between RPC Framework and GRPC-on-the-wire."""
import abc
import enum
import logging
import threading
import time
from grpc._adapter import _intermediary_low
from grpc._links import _constants
from grpc.framework.foundation import logging_pool
from grpc.framework.foundation import relay
from grpc.framework.interfaces.links import links
_IDENTITY = lambda x: x
_TERMINATION_KIND_TO_CODE = {
links.Ticket.Termination.COMPLETION: _intermediary_low.Code.OK,
links.Ticket.Termination.CANCELLATION: _intermediary_low.Code.CANCELLED,
links.Ticket.Termination.EXPIRATION:
_intermediary_low.Code.DEADLINE_EXCEEDED,
links.Ticket.Termination.SHUTDOWN: _intermediary_low.Code.UNAVAILABLE,
links.Ticket.Termination.RECEPTION_FAILURE: _intermediary_low.Code.INTERNAL,
links.Ticket.Termination.TRANSMISSION_FAILURE:
_intermediary_low.Code.INTERNAL,
links.Ticket.Termination.LOCAL_FAILURE: _intermediary_low.Code.UNKNOWN,
links.Ticket.Termination.REMOTE_FAILURE: _intermediary_low.Code.UNKNOWN,
}
_STOP = _intermediary_low.Event.Kind.STOP
_WRITE = _intermediary_low.Event.Kind.WRITE_ACCEPTED
_COMPLETE = _intermediary_low.Event.Kind.COMPLETE_ACCEPTED
_SERVICE = _intermediary_low.Event.Kind.SERVICE_ACCEPTED
_READ = _intermediary_low.Event.Kind.READ_ACCEPTED
_FINISH = _intermediary_low.Event.Kind.FINISH
@enum.unique
class _Read(enum.Enum):
READING = 'reading'
# TODO(issue 2916): This state will again be necessary after eliminating the
# "early_read" field of _RPCState and going back to only reading when granted
# allowance to read.
# AWAITING_ALLOWANCE = 'awaiting allowance'
CLOSED = 'closed'
@enum.unique
class _HighWrite(enum.Enum):
OPEN = 'open'
CLOSED = 'closed'
@enum.unique
class _LowWrite(enum.Enum):
"""The possible categories of low-level write state."""
OPEN = 'OPEN'
ACTIVE = 'ACTIVE'
CLOSED = 'CLOSED'
class _RPCState(object):
def __init__(
self, request_deserializer, response_serializer, sequence_number, read,
early_read, allowance, high_write, low_write, premetadataed,
terminal_metadata, code, message, due):
self.request_deserializer = request_deserializer
self.response_serializer = response_serializer
self.sequence_number = sequence_number
self.read = read
# TODO(issue 2916): Eliminate this by eliminating the necessity of calling
# call.read just to advance the RPC.
self.early_read = early_read # A raw (not deserialized) read.
self.allowance = allowance
self.high_write = high_write
self.low_write = low_write
self.premetadataed = premetadataed
self.terminal_metadata = terminal_metadata
self.code = code
self.message = message
self.due = due
def _no_longer_due(kind, rpc_state, key, rpc_states):
rpc_state.due.remove(kind)
if not rpc_state.due:
del rpc_states[key]
def _metadatafy(call, metadata):
for metadata_key, metadata_value in metadata:
call.add_metadata(metadata_key, metadata_value)
def _status(termination_kind, high_code, details):
low_details = b'' if details is None else details
if high_code is None:
low_code = _TERMINATION_KIND_TO_CODE[termination_kind]
else:
low_code = _constants.HIGH_STATUS_CODE_TO_LOW_STATUS_CODE[high_code]
return _intermediary_low.Status(low_code, low_details)
class _Kernel(object):
def __init__(self, request_deserializers, response_serializers, ticket_relay):
self._lock = threading.Lock()
self._request_deserializers = request_deserializers
self._response_serializers = response_serializers
self._relay = ticket_relay
self._completion_queue = None
self._due = set()
self._server = None
self._rpc_states = {}
self._pool = None
def _on_service_acceptance_event(self, event, server):
server.service(None)
service_acceptance = event.service_acceptance
call = service_acceptance.call
call.accept(self._completion_queue, call)
try:
group, method = service_acceptance.method.split('/')[1:3]
except ValueError:
logging.info('Illegal path "%s"!', service_acceptance.method)
return
request_deserializer = self._request_deserializers.get(
(group, method), _IDENTITY)
response_serializer = self._response_serializers.get(
(group, method), _IDENTITY)
call.read(call)
self._rpc_states[call] = _RPCState(
request_deserializer, response_serializer, 1, _Read.READING, None, 1,
_HighWrite.OPEN, _LowWrite.OPEN, False, None, None, None,
set((_READ, _FINISH,)))
ticket = links.Ticket(
call, 0, group, method, links.Ticket.Subscription.FULL,
service_acceptance.deadline - time.time(), None, event.metadata, None,
None, None, None, None, 'TODO: Service Context Object!')
self._relay.add_value(ticket)
def _on_read_event(self, event):
call = event.tag
rpc_state = self._rpc_states[call]
if event.bytes is None:
rpc_state.read = _Read.CLOSED
payload = None
termination = links.Ticket.Termination.COMPLETION
_no_longer_due(_READ, rpc_state, call, self._rpc_states)
else:
if 0 < rpc_state.allowance:
payload = rpc_state.request_deserializer(event.bytes)
termination = None
rpc_state.allowance -= 1
call.read(call)
else:
rpc_state.early_read = event.bytes
_no_longer_due(_READ, rpc_state, call, self._rpc_states)
return
# TODO(issue 2916): Instead of returning:
# rpc_state.read = _Read.AWAITING_ALLOWANCE
ticket = links.Ticket(
call, rpc_state.sequence_number, None, None, None, None, None, None,
payload, None, None, None, termination, None)
rpc_state.sequence_number += 1
self._relay.add_value(ticket)
def _on_write_event(self, event):
call = event.tag
rpc_state = self._rpc_states[call]
if rpc_state.high_write is _HighWrite.CLOSED:
if rpc_state.terminal_metadata is not None:
_metadatafy(call, rpc_state.terminal_metadata)
status = _status(
links.Ticket.Termination.COMPLETION, rpc_state.code,
rpc_state.message)
call.status(status, call)
rpc_state.low_write = _LowWrite.CLOSED
rpc_state.due.add(_COMPLETE)
rpc_state.due.remove(_WRITE)
else:
ticket = links.Ticket(
call, rpc_state.sequence_number, None, None, None, None, 1, None,
None, None, None, None, None, None)
rpc_state.sequence_number += 1
self._relay.add_value(ticket)
rpc_state.low_write = _LowWrite.OPEN
_no_longer_due(_WRITE, rpc_state, call, self._rpc_states)
def _on_finish_event(self, event):
call = event.tag
rpc_state = self._rpc_states[call]
_no_longer_due(_FINISH, rpc_state, call, self._rpc_states)
code = event.status.code
if code is _intermediary_low.Code.OK:
return
if code is _intermediary_low.Code.CANCELLED:
termination = links.Ticket.Termination.CANCELLATION
elif code is _intermediary_low.Code.DEADLINE_EXCEEDED:
termination = links.Ticket.Termination.EXPIRATION
else:
termination = links.Ticket.Termination.TRANSMISSION_FAILURE
ticket = links.Ticket(
call, rpc_state.sequence_number, None, None, None, None, None, None,
None, None, None, None, termination, None)
rpc_state.sequence_number += 1
self._relay.add_value(ticket)
def _spin(self, completion_queue, server):
while True:
event = completion_queue.get(None)
with self._lock:
if event.kind is _STOP:
self._due.remove(_STOP)
elif event.kind is _READ:
self._on_read_event(event)
elif event.kind is _WRITE:
self._on_write_event(event)
elif event.kind is _COMPLETE:
_no_longer_due(
_COMPLETE, self._rpc_states.get(event.tag), event.tag,
self._rpc_states)
elif event.kind is _intermediary_low.Event.Kind.FINISH:
self._on_finish_event(event)
elif event.kind is _SERVICE:
if self._server is None:
self._due.remove(_SERVICE)
else:
self._on_service_acceptance_event(event, server)
else:
logging.error('Illegal event! %s', (event,))
if not self._due and not self._rpc_states:
completion_queue.stop()
return
def add_ticket(self, ticket):
with self._lock:
call = ticket.operation_id
rpc_state = self._rpc_states.get(call)
if rpc_state is None:
return
if ticket.initial_metadata is not None:
_metadatafy(call, ticket.initial_metadata)
call.premetadata()
rpc_state.premetadataed = True
elif not rpc_state.premetadataed:
if (ticket.terminal_metadata is not None or
ticket.payload is not None or
ticket.termination is not None or
ticket.code is not None or
ticket.message is not None):
call.premetadata()
rpc_state.premetadataed = True
if ticket.allowance is not None:
if rpc_state.early_read is None:
rpc_state.allowance += ticket.allowance
else:
payload = rpc_state.request_deserializer(rpc_state.early_read)
rpc_state.allowance += ticket.allowance - 1
rpc_state.early_read = None
if rpc_state.read is _Read.READING:
call.read(call)
rpc_state.due.add(_READ)
termination = None
else:
termination = links.Ticket.Termination.COMPLETION
early_read_ticket = links.Ticket(
call, rpc_state.sequence_number, None, None, None, None, None,
None, payload, None, None, None, termination, None)
rpc_state.sequence_number += 1
self._relay.add_value(early_read_ticket)
if ticket.payload is not None:
call.write(rpc_state.response_serializer(ticket.payload), call)
rpc_state.due.add(_WRITE)
rpc_state.low_write = _LowWrite.ACTIVE
if ticket.terminal_metadata is not None:
rpc_state.terminal_metadata = ticket.terminal_metadata
if ticket.code is not None:
rpc_state.code = ticket.code
if ticket.message is not None:
rpc_state.message = ticket.message
if ticket.termination is links.Ticket.Termination.COMPLETION:
rpc_state.high_write = _HighWrite.CLOSED
if rpc_state.low_write is _LowWrite.OPEN:
if rpc_state.terminal_metadata is not None:
_metadatafy(call, rpc_state.terminal_metadata)
status = _status(
links.Ticket.Termination.COMPLETION, rpc_state.code,
rpc_state.message)
call.status(status, call)
rpc_state.due.add(_COMPLETE)
rpc_state.low_write = _LowWrite.CLOSED
elif ticket.termination is not None:
if rpc_state.terminal_metadata is not None:
_metadatafy(call, rpc_state.terminal_metadata)
status = _status(
ticket.termination, rpc_state.code, rpc_state.message)
call.status(status, call)
rpc_state.due.add(_COMPLETE)
def add_port(self, address, server_credentials):
with self._lock:
if self._server is None:
self._completion_queue = _intermediary_low.CompletionQueue()
self._server = _intermediary_low.Server(self._completion_queue)
if server_credentials is None:
return self._server.add_http2_addr(address)
else:
return self._server.add_secure_http2_addr(address, server_credentials)
def start(self):
with self._lock:
if self._server is None:
self._completion_queue = _intermediary_low.CompletionQueue()
self._server = _intermediary_low.Server(self._completion_queue)
self._pool = logging_pool.pool(1)
self._pool.submit(self._spin, self._completion_queue, self._server)
self._server.start()
self._server.service(None)
self._due.add(_SERVICE)
def begin_stop(self):
with self._lock:
self._server.stop()
self._due.add(_STOP)
self._server = None
def end_stop(self):
with self._lock:
pool = self._pool
pool.shutdown(wait=True)
class ServiceLink(links.Link):
"""A links.Link for use on the service-side of a gRPC connection.
Implementations of this interface are only valid for use between calls to
their start method and one of their stop methods.
"""
@abc.abstractmethod
def add_port(self, address, server_credentials):
"""Adds a port on which to service RPCs after this link has been started.
Args:
address: The address on which to service RPCs with a port number of zero
requesting that a port number be automatically selected and used.
server_credentials: An _intermediary_low.ServerCredentials object, or
None for insecure service.
Returns:
An integer port on which RPCs will be serviced after this link has been
started. This is typically the same number as the port number contained
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
@abc.abstractmethod
def start(self):
"""Starts this object.
This method must be called before attempting to use this Link in ticket
exchange.
"""
raise NotImplementedError()
@abc.abstractmethod
def begin_stop(self):
"""Indicate imminent link stop and immediate rejection of new RPCs.
New RPCs will be rejected as soon as this method is called, but ongoing RPCs
will be allowed to continue until they terminate. This method does not
block.
"""
raise NotImplementedError()
@abc.abstractmethod
def end_stop(self):
"""Finishes stopping this link.
begin_stop must have been called exactly once before calling this method.
All in-progress RPCs will be terminated immediately.
"""
raise NotImplementedError()
class _ServiceLink(ServiceLink):
def __init__(self, request_deserializers, response_serializers):
self._relay = relay.relay(None)
self._kernel = _Kernel(
{} if request_deserializers is None else request_deserializers,
{} if response_serializers is None else response_serializers,
self._relay)
def accept_ticket(self, ticket):
self._kernel.add_ticket(ticket)
def join_link(self, link):
self._relay.set_behavior(link.accept_ticket)
def add_port(self, address, server_credentials):
return self._kernel.add_port(address, server_credentials)
def start(self):
self._relay.start()
return self._kernel.start()
def begin_stop(self):
self._kernel.begin_stop()
def end_stop(self):
self._kernel.end_stop()
self._relay.stop()
def service_link(request_deserializers, response_serializers):
"""Creates a ServiceLink.
Args:
request_deserializers: A dict from group-method pair to request object
deserialization behavior.
response_serializers: A dict from group-method pair to response ojbect
serialization behavior.
Returns:
A ServiceLink.
"""
return _ServiceLink(request_deserializers, response_serializers)
| {
"content_hash": "881104993fd2d406cc3c36da7d57f708",
"timestamp": "",
"source": "github",
"line_count": 445,
"max_line_length": 80,
"avg_line_length": 34.26516853932584,
"alnum_prop": 0.6701862539349422,
"repo_name": "yinsu/grpc",
"id": "34d3b262c985d87ee96560f1042465f7a781bae5",
"size": "16777",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/python/grpcio/grpc/_links/service.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4084"
},
{
"name": "C",
"bytes": "3678910"
},
{
"name": "C#",
"bytes": "660363"
},
{
"name": "C++",
"bytes": "1045916"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "JavaScript",
"bytes": "199121"
},
{
"name": "Makefile",
"bytes": "1214793"
},
{
"name": "Objective-C",
"bytes": "254993"
},
{
"name": "PHP",
"bytes": "71664"
},
{
"name": "Protocol Buffer",
"bytes": "110494"
},
{
"name": "Python",
"bytes": "1471363"
},
{
"name": "Ruby",
"bytes": "351557"
},
{
"name": "Shell",
"bytes": "25857"
},
{
"name": "Swift",
"bytes": "5275"
}
],
"symlink_target": ""
} |
from ircb.storeclient.base import BaseStore
from ircb.lib.constants.signals import (STORE_CHANNEL_CREATE,
STORE_CHANNEL_CREATED,
STORE_CHANNEL_GET,
STORE_CHANNEL_GOT,
STORE_CHANNEL_UPDATE,
STORE_CHANNEL_UPDATED,
STORE_CHANNEL_DELETE,
STORE_CHANNEL_DELETED,
STORE_CHANNEL_CREATE_OR_UPDATE)
class ChannelStore(BaseStore):
CREATE_SIGNAL = STORE_CHANNEL_CREATE
CREATED_SIGNAL = STORE_CHANNEL_CREATED
GET_SIGNAL = STORE_CHANNEL_GET
GOT_SIGNAL = STORE_CHANNEL_GOT
UPDATE_SIGNAL = STORE_CHANNEL_UPDATE
UPDATED_SIGNAL = STORE_CHANNEL_UPDATED
DELETE_SIGNAL = STORE_CHANNEL_DELETE
DELETED_SIGNAL = STORE_CHANNEL_DELETED
CREATE_OR_UPDATE_SIGNAL = STORE_CHANNEL_CREATE_OR_UPDATE
| {
"content_hash": "7af982a6f2da9b905c9665bdc91f7519",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 71,
"avg_line_length": 46.59090909090909,
"alnum_prop": 0.5307317073170732,
"repo_name": "Ghost-script/ircb",
"id": "1f125f3bb71dda46bee91616d41f3a72e27c233a",
"size": "1049",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "ircb/storeclient/channel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45337"
}
],
"symlink_target": ""
} |
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_pie01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'pie'})
data = [
[2, 4, 6],
[60, 30, 10],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
chart.add_series({'categories': '=Sheet1!$A$1:$A$3',
'values': '=Sheet1!$B$1:$B$3',
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| {
"content_hash": "7452029cf149568d33e2d18c57614edc",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 68,
"avg_line_length": 25.872340425531913,
"alnum_prop": 0.5600328947368421,
"repo_name": "jkyeung/XlsxWriter",
"id": "24183f7bdd3afaa9c6c348dc5542547dc4f82374",
"size": "1389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xlsxwriter/test/comparison/test_chart_pie01.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5113"
},
{
"name": "CSS",
"bytes": "16544"
},
{
"name": "HTML",
"bytes": "13100"
},
{
"name": "Makefile",
"bytes": "7819"
},
{
"name": "Perl",
"bytes": "3504"
},
{
"name": "Python",
"bytes": "2430294"
},
{
"name": "Shell",
"bytes": "6064"
}
],
"symlink_target": ""
} |
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Record(Base):
__tablename__ = 'records'
id = Column(Integer, primary_key=True)
data = Column(String(255))
if __name__ == "__main__":
from sqlalchemy import create_engine
from settings import DB_URI
engine = create_engine(DB_URI)
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
| {
"content_hash": "eb8bb001450ebf2ee1434e6ecfcd63b6",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 55,
"avg_line_length": 26.736842105263158,
"alnum_prop": 0.7204724409448819,
"repo_name": "RunnerPro/ProApi",
"id": "6ed95cfd8fc00d75b1fd714ed6073c22c61cf02e",
"size": "531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "167726"
},
{
"name": "HTML",
"bytes": "4371"
},
{
"name": "JavaScript",
"bytes": "48122"
},
{
"name": "Makefile",
"bytes": "1504"
},
{
"name": "Python",
"bytes": "5297"
}
],
"symlink_target": ""
} |
from canvas_sdk import client, utils
def retrieve_assignment_overridden_dates_for_quizzes(request_ctx, course_id, quiz_assignment_overrides_0_quiz_ids=None, per_page=None, **request_kwargs):
"""
Retrieve the actual due-at, unlock-at, and available-at dates for quizzes
based on the assignment overrides active for the current API user.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param course_id: (required) ID
:type course_id: string
:param quiz_assignment_overrides_0_quiz_ids: (optional) An array of quiz IDs. If omitted, overrides for all quizzes available to the operating user will be returned.
:type quiz_assignment_overrides_0_quiz_ids: integer or None
:param per_page: (optional) Set how many results canvas should return, defaults to config.LIMIT_PER_PAGE
:type per_page: integer or None
:return: Retrieve assignment-overridden dates for quizzes
:rtype: requests.Response (with array data)
"""
if per_page is None:
per_page = request_ctx.per_page
path = '/v1/courses/{course_id}/quizzes/assignment_overrides'
payload = {
'quiz_assignment_overrides[0][quiz_ids]' : quiz_assignment_overrides_0_quiz_ids,
'per_page' : per_page,
}
url = request_ctx.base_api_url + path.format(course_id=course_id)
response = client.get(request_ctx, url, payload=payload, **request_kwargs)
return response
| {
"content_hash": "3996c1a57c88868ce01ce7fcb6a19f6b",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 173,
"avg_line_length": 45.06060606060606,
"alnum_prop": 0.6940147948890383,
"repo_name": "penzance/canvas_python_sdk",
"id": "f2b5717567b10b7fdc1a28bf8229930986602fec",
"size": "1487",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "canvas_sdk/methods/quiz_assignment_overrides.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1066725"
}
],
"symlink_target": ""
} |
import copy
import datetime
import simplejson
from types import NoneType
from django.db import models
from django.forms.widgets import Textarea
from django.utils.timezone import is_aware
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.forms.utils import ValidationError
JSON_INVALID = ValidationError(_('Enter valid JSON.'))
class JSONFormField(CharField):
def __init__(self, *args, **kwargs):
self.load_kwargs = kwargs.pop('load_kwargs', {})
self.dump_kwargs = kwargs.pop('dump_kwargs', {})
super(JSONFormField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value and not self.required:
return None
if isinstance(value, basestring):
try:
value = simplejson.loads(value, **self.load_kwargs)
except ValueError:
raise JSON_INVALID
return value
def prepare_value(self, value):
if isinstance(value, basestring):
value = simplejson.loads(value, **self.load_kwargs)
kwargs = self.dump_kwargs.copy()
kwargs['sort_keys'] = True
if isinstance(self.widget, Textarea):
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
kwargs['ensure_ascii'] = False
return simplejson.dumps(value, **kwargs)
class JSONEncoder(simplejson.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date, time, datetime.
"""
def default(self, o):
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
if is_aware(o):
raise ValueError(_("JSON can't represent timezone-aware times."))
r = o.isoformat()
if o.microsecond:
r = r[:12]
return r
else:
return super(JSONEncoder, self).default(o)
class JSONFieldBase(models.Field):
DEFAULT_SEPARATORS = (',', ':')
def __init__(self, *args, **kwargs):
if 'default' not in kwargs:
kwargs['default'] = None
elif not isinstance(kwargs['default'], (list, dict, NoneType)) and not callable(kwargs['default']):
raise AssertionError('Default can be None, list or dict.')
if 'blank' not in kwargs:
kwargs['blank'] = True
use_decimal = kwargs.pop('use_decimal', False)
self.dump_kwargs = {'cls': JSONEncoder,
'separators': self.DEFAULT_SEPARATORS,
'use_decimal': use_decimal}
self.dump_kwargs.update(kwargs.pop('dump_kwargs', {}))
self.load_kwargs = {'use_decimal': use_decimal}
self.load_kwargs.update(kwargs.pop('load_kwargs', {}))
super(JSONFieldBase, self).__init__(*args, **kwargs)
def to_python(self, value):
if isinstance(value, basestring):
if value == '':
return None
try:
return simplejson.loads(value, **self.load_kwargs)
except ValueError:
raise JSON_INVALID
return value
def from_db_value(self, value, expression, connection, context):
if value in (None, ''):
return None
return simplejson.loads(value, **self.load_kwargs)
def get_prep_value(self, value):
if value is None and self.null:
return None
return simplejson.dumps(value, **self.dump_kwargs)
def get_default(self):
return self.default() if callable(self.default) else copy.deepcopy(self.default)
def formfield(self, **kwargs):
if 'form_class' not in kwargs:
kwargs['form_class'] = JSONFormField
field = super(JSONFieldBase, self).formfield(**kwargs)
if not field.help_text:
field.help_text = _('JSON data')
if isinstance(field, JSONFormField):
field.load_kwargs = self.load_kwargs
field.dump_kwargs = self.dump_kwargs
return field
def deconstruct(self):
name, path, args, kwargs = super(JSONFieldBase, self).deconstruct()
kwargs['dump_kwargs'] = self.dump_kwargs
kwargs['load_kwargs'] = self.load_kwargs
return name, path, args, kwargs
class JSONField(JSONFieldBase, models.TextField):
pass
class JSONCharField(JSONFieldBase, models.CharField):
pass
| {
"content_hash": "f641b92d555f86c4c7fda8bdf4b435c8",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 107,
"avg_line_length": 34.73134328358209,
"alnum_prop": 0.5898152127202406,
"repo_name": "liminspace/dju-common",
"id": "3de1037b6a475d441bdea2c55e9458d99a47768d",
"size": "4654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dju_common/fields/json.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "86777"
},
{
"name": "CoffeeScript",
"bytes": "3455"
},
{
"name": "HTML",
"bytes": "6934"
},
{
"name": "JavaScript",
"bytes": "46773"
},
{
"name": "Python",
"bytes": "117388"
}
],
"symlink_target": ""
} |
""" Contains the base classes for rollover targets and rollover target banks."""
# rollover.py
# Mission Pinball Framework
# Written by Brian Madden & Gabe Knuth
# Released under the MIT License. (See license info at the end of this file.)
# Documentation and more info at http://missionpinball.com/mpf
import logging
from mpf.devices.target import Target, TargetGroup
class Rollover(Target):
"""Represents a single drop target in a pinball machine."""
config_section = 'Rollovers'
collection = 'rollovers'
def __init__(self, machine, name, config, collection=None):
self.log = logging.getLogger('Rollover.' + name)
super(Rollover, self).__init__(machine, name, config, collection)
self.device_str = 'rollover'
class RolloverGroup(TargetGroup):
"""Represents a group of rollover targets in a pinball machine by grouping
together multiple Rollover class devices.
"""
config_section = 'RolloverGroups'
collection = 'rollover_groups'
def __init__(self, machine, name, config, collection=None):
self.log = logging.getLogger('RolloverGroup.' + name)
self.device_str = 'rollovers'
self.member_collection = machine.rollovers
super(RolloverGroup, self).__init__(machine, name, config, collection)
# The MIT License (MIT)
# Copyright (c) 2013-2015 Brian Madden and Gabe Knuth
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
| {
"content_hash": "cc78c70be51ac2290be9ddfa278d6002",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 80,
"avg_line_length": 39.225806451612904,
"alnum_prop": 0.7360197368421053,
"repo_name": "jabdoa2/mpf",
"id": "53ec5f5529d4551d6a5715c4f708b232122bd57d",
"size": "2432",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mpf/devices/rollover.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1839"
},
{
"name": "Python",
"bytes": "1109877"
}
],
"symlink_target": ""
} |
"""This module contains the general information for LsbootUsb ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class LsbootUsbConsts:
STATE_DISABLED = "Disabled"
STATE_ENABLED = "Enabled"
SUBTYPE_ = ""
SUBTYPE_USB_CD = "usb-cd"
SUBTYPE_USB_FDD = "usb-fdd"
SUBTYPE_USB_HDD = "usb-hdd"
TYPE_USB = "USB"
class LsbootUsb(ManagedObject):
"""This is LsbootUsb class."""
consts = LsbootUsbConsts()
naming_props = set([u'name'])
mo_meta = {
"classic": MoMeta("LsbootUsb", "lsbootUsb", "usb-[name]", VersionMeta.Version201a, "InputOutput", 0x1ff, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"modular": MoMeta("LsbootUsb", "lsbootUsb", "usb-[name]", VersionMeta.Version2013e, "InputOutput", 0x1ff, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"])
}
prop_meta = {
"classic": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version201a, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version201a, MoPropertyMeta.NAMING, 0x4, None, None, r"""(([a-zA-Z0-9]{1})|([a-zA-Z0-9]{1}[a-zA-Z0-9_\-]{0,28}[a-zA-Z0-9]{1})|([a-zA-Z0-9]{2}))""", [], []),
"order": MoPropertyMeta("order", "order", "uint", VersionMeta.Version201a, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, [], ["1-255"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version201a, MoPropertyMeta.READ_WRITE, 0x10, 0, 255, None, [], []),
"state": MoPropertyMeta("state", "state", "string", VersionMeta.Version201a, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version201a, MoPropertyMeta.READ_WRITE, 0x40, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"subtype": MoPropertyMeta("subtype", "subtype", "string", VersionMeta.Version201a, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, ["", "usb-cd", "usb-fdd", "usb-hdd"], []),
"type": MoPropertyMeta("type", "type", "string", VersionMeta.Version201a, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["USB"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version201a, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
},
"modular": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version2013e, MoPropertyMeta.NAMING, 0x4, None, None, r"""(([a-zA-Z0-9]{1})|([a-zA-Z0-9]{1}[a-zA-Z0-9_\-]{0,28}[a-zA-Z0-9]{1})|([a-zA-Z0-9]{2}))""", [], []),
"order": MoPropertyMeta("order", "order", "uint", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, [], ["1-255"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x10, 0, 255, None, [], []),
"state": MoPropertyMeta("state", "state", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x40, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"subtype": MoPropertyMeta("subtype", "subtype", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, ["", "usb-cd", "usb-fdd", "usb-hdd"], []),
"type": MoPropertyMeta("type", "type", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["USB"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
},
}
prop_map = {
"classic": {
"dn": "dn",
"name": "name",
"order": "order",
"rn": "rn",
"state": "state",
"status": "status",
"subtype": "subtype",
"type": "type",
"childAction": "child_action",
},
"modular": {
"dn": "dn",
"name": "name",
"order": "order",
"rn": "rn",
"state": "state",
"status": "status",
"subtype": "subtype",
"type": "type",
"childAction": "child_action",
},
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.order = None
self.state = None
self.status = None
self.subtype = None
self.type = None
self.child_action = None
ManagedObject.__init__(self, "LsbootUsb", parent_mo_or_dn, **kwargs)
| {
"content_hash": "ed98902256454addb4640788663c4528",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 231,
"avg_line_length": 54.04123711340206,
"alnum_prop": 0.5734452499046165,
"repo_name": "ragupta-git/ImcSdk",
"id": "90513b6ea108d56a33fac2521368f9b5b959254e",
"size": "5242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "imcsdk/mometa/lsboot/LsbootUsb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1042023"
}
],
"symlink_target": ""
} |
from django import forms
from django.forms.models import modelform_factory
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin import widgets
from wagtail.wagtailadmin.forms import (
BaseCollectionMemberForm, collection_member_permission_formset_factory
)
from wagtail.wagtaildocs.models import Document
from wagtail.wagtaildocs.permissions import permission_policy as documents_permission_policy
class BaseDocumentForm(BaseCollectionMemberForm):
permission_policy = documents_permission_policy
def get_document_form(model):
fields = model.admin_form_fields
if 'collection' not in fields:
# force addition of the 'collection' field, because leaving it out can
# cause dubious results when multiple collections exist (e.g adding the
# document to the root collection where the user may not have permission) -
# and when only one collection exists, it will get hidden anyway.
fields = list(fields) + ['collection']
return modelform_factory(
model,
form=BaseDocumentForm,
fields=fields,
widgets={
'tags': widgets.AdminTagWidget,
'file': forms.FileInput()
})
def get_document_multi_form(model):
return modelform_factory(
model,
form=BaseDocumentForm,
fields=['title', 'collection', 'tags'],
widgets={
'tags': widgets.AdminTagWidget,
'file': forms.FileInput()
})
GroupDocumentPermissionFormSet = collection_member_permission_formset_factory(
Document,
[
('add_document', _("Add"), _("Add/edit documents you own")),
('change_document', _("Edit"), _("Edit any document")),
],
'wagtaildocs/permissions/includes/document_permissions_formset.html'
)
| {
"content_hash": "b62b2dcbaa0ba44f281818e8288eb3f4",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 92,
"avg_line_length": 33.407407407407405,
"alnum_prop": 0.6879157427937915,
"repo_name": "davecranwell/wagtail",
"id": "a443116d907a58fc68b52c09075341b0b2599c38",
"size": "1804",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wagtail/wagtaildocs/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "159671"
},
{
"name": "HTML",
"bytes": "267389"
},
{
"name": "JavaScript",
"bytes": "109257"
},
{
"name": "Makefile",
"bytes": "548"
},
{
"name": "Python",
"bytes": "2063070"
},
{
"name": "Shell",
"bytes": "7388"
}
],
"symlink_target": ""
} |
import flask
from digits.utils.routing import request_wants_json, job_from_request
from digits.webapp import app, scheduler, autodoc
from digits.dataset import tasks
from forms import GenericImageDatasetForm
from job import GenericImageDatasetJob
NAMESPACE = '/datasets/images/generic'
@app.route(NAMESPACE + '/new', methods=['GET'])
@autodoc('datasets')
def generic_image_dataset_new():
"""
Returns a form for a new GenericImageDatasetJob
"""
form = GenericImageDatasetForm()
return flask.render_template('datasets/images/generic/new.html', form=form)
@app.route(NAMESPACE + '.json', methods=['POST'])
@app.route(NAMESPACE, methods=['POST'])
@autodoc(['datasets', 'api'])
def generic_image_dataset_create():
"""
Creates a new GenericImageDatasetJob
Returns JSON when requested: {job_id,name,status} or {errors:[]}
"""
form = GenericImageDatasetForm()
if not form.validate_on_submit():
if request_wants_json():
return flask.jsonify({'errors': form.errors}), 400
else:
return flask.render_template('datasets/images/generic/new.html', form=form), 400
job = None
try:
job = GenericImageDatasetJob(
name = form.dataset_name.data,
mean_file = form.prebuilt_mean_file.data.strip(),
)
if form.method.data == 'prebuilt':
pass
else:
raise ValueError('method not supported')
force_same_shape = form.force_same_shape.data
job.tasks.append(
tasks.AnalyzeDbTask(
job_dir = job.dir(),
database = form.prebuilt_train_images.data,
purpose = form.prebuilt_train_images.label.text,
force_same_shape = force_same_shape,
)
)
if form.prebuilt_train_labels.data:
job.tasks.append(
tasks.AnalyzeDbTask(
job_dir = job.dir(),
database = form.prebuilt_train_labels.data,
purpose = form.prebuilt_train_labels.label.text,
force_same_shape = force_same_shape,
)
)
if form.prebuilt_val_images.data:
job.tasks.append(
tasks.AnalyzeDbTask(
job_dir = job.dir(),
database = form.prebuilt_val_images.data,
purpose = form.prebuilt_val_images.label.text,
force_same_shape = force_same_shape,
)
)
if form.prebuilt_val_labels.data:
job.tasks.append(
tasks.AnalyzeDbTask(
job_dir = job.dir(),
database = form.prebuilt_val_labels.data,
purpose = form.prebuilt_val_labels.label.text,
force_same_shape = force_same_shape,
)
)
scheduler.add_job(job)
if request_wants_json():
return flask.jsonify(job.json_dict())
else:
return flask.redirect(flask.url_for('datasets_show', job_id=job.id()))
except:
if job:
scheduler.delete_job(job)
raise
def show(job):
"""
Called from digits.dataset.views.datasets_show()
"""
return flask.render_template('datasets/images/generic/show.html', job=job)
@app.route(NAMESPACE + '/summary', methods=['GET'])
@autodoc('datasets')
def generic_image_dataset_summary():
"""
Return a short HTML summary of a DatasetJob
"""
job = job_from_request()
return flask.render_template('datasets/images/generic/summary.html', dataset=job)
| {
"content_hash": "a0df0db99b3fbafd197bcc4745bdffd2",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 92,
"avg_line_length": 33.91304347826087,
"alnum_prop": 0.5441025641025641,
"repo_name": "andreydung/DIGITS",
"id": "b86519b0b00b4f45af429ae09e781de3f8347945",
"size": "3965",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "digits/dataset/images/generic/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "711"
},
{
"name": "HTML",
"bytes": "155804"
},
{
"name": "JavaScript",
"bytes": "107641"
},
{
"name": "Python",
"bytes": "470489"
},
{
"name": "Shell",
"bytes": "1383"
}
],
"symlink_target": ""
} |
from Quartz import *
import Quartz
import Utilities
import sys
def drawQuartzRomanText(context):
text = "Quartz"
textlen = len(text)
fontSize = 60
opaqueBlack = [0.0, 0.0, 0.0, 1.0]
opaqueRed = [0.663, 0.0, 0.031, 1.0]
# Set the fill color space. This sets the
# fill painting color to opaque black.
CGContextSetFillColorSpace(context,
Utilities.getTheCalibratedRGBColorSpace())
# The Cocoa framework calls the draw method with an undefined
# value of the text matrix. It's best to set it to what is needed by
# this code: the identity transform.
CGContextSetTextMatrix(context, CGAffineTransformIdentity)
# Set the font with the PostScript name "Times-Roman", at
# fontSize points, with the MacRoman encoding.
CGContextSelectFont(context, "Times-Roman", fontSize, kCGEncodingMacRoman)
# The default text drawing mode is fill. Draw the text at (70, 400).
CGContextShowTextAtPoint(context, 70, 400, text, textlen)
# Set the fill color to red.
CGContextSetFillColor(context, opaqueRed)
# Draw the next piece of text where the previous one left off.
CGContextShowText(context, text, textlen)
for i in range(3):
# Get the current text pen position.
p = CGContextGetTextPosition(context)
# Translate to the current text pen position.
CGContextTranslateCTM(context, p.x, p.y)
# Rotate clockwise by 90 degrees for the next
# piece of text.
CGContextRotateCTM(context, Utilities.DEGREES_TO_RADIANS(-90))
# Draw the next piece of text in blac at the origin.
CGContextSetFillColor(context, opaqueBlack)
CGContextShowTextAtPoint(context, 0, 0, text, textlen)
# Draw the next piece of text where the previous piece
# left off and paint it with red.
CGContextSetFillColor(context, opaqueRed)
CGContextShowText(context, text, textlen)
def myCGContextStrokeLineSegments(context, s, count):
# CGContextStrokeLineSegments is available only on Tiger and later
# so if it isn't available, use an emulation of
# CGContextStrokeLineSegments. It is better to use the
# built-in CGContextStrokeLineSegments since it has significant
# performance optimizations on some hardware.
if hasattr(Quartz, 'CGContextStrokeLineSegments'):
CGContextStrokeLineSegments(context, s, count)
else:
CGContextBeginPath(context)
for k in xrange(0, count, 2):
CGContextMoveToPoint(context, s[k].x, s[k].y)
CGContextAddLineToPoint(context, s[k+1].x, s[k+1].y)
CGContextStrokePath(context)
_gridLines = []
def drawGridLines(context):
numlines = 60
if not _gridLines:
stepsize = 4.0
val = 0
for i in xrange(0, 2*numlines, 2):
_gridLines.append(CGPointMake(val, -60))
_gridLines.append(CGPointMake(val, 200))
val += stepsize
val = -20
for i in xrange(2*numlines, 4*numlines, 2):
_gridLines.append(CGPointMake(0, val))
_gridLines.append(CGPointMake(400, val))
val += stepsize
myCGContextStrokeLineSegments(context, _gridLines, len(_gridLines))
def drawQuartzTextWithTextModes(context):
fillText = "Fill "
strokeText = "Stroke "
fillAndStrokeText = "FillStroke "
invisibleText = "Invisible "
clipText = "ClipText "
fillStrokeClipText = "FillStrokeClip "
fontSize = 40.0
extraLeading = 5.0
dash = (1,1)
opaqueRed = (1.0, 0.0, 0.0, 1.0)
# Set the fill and stroke color space. This sets the
# fill and stroke painting color to opaque black.
CGContextSetFillColorSpace(context,
Utilities.getTheCalibratedRGBColorSpace())
CGContextSetStrokeColorSpace(context,
Utilities.getTheCalibratedRGBColorSpace())
# The Cocoa framework calls the draw method with an undefined
# value of the text matrix. It's best to set it to what is needed by
# this code: the identity transform.
CGContextSetTextMatrix(context, CGAffineTransformIdentity)
# Set the font with the PostScript name "Times-Roman", at
# fontSize points, with the MacRoman encoding.
CGContextSelectFont(context, "Times-Roman", fontSize, kCGEncodingMacRoman)
# ---- Text Line 1 ----
# Default text drawing mode is fill. Draw the text at (10, 400).
CGContextShowTextAtPoint(context, 10, 400, fillText, len(fillText))
# Set the fill color to red.
CGContextSetFillColor(context, opaqueRed)
CGContextSetTextPosition(context, 180, 400)
CGContextShowText(context, fillText, len(fillText))
# Translate down for the next line of text.
CGContextTranslateCTM(context, 0, -(fontSize + extraLeading))
# ---- Text Line 2 ----
# Now stroke the text by setting the text drawing mode
# to kCGTextStroke. When stroking text, Quartz uses the stroke
# color in the graphics state.
CGContextSetTextDrawingMode(context, kCGTextStroke)
CGContextShowTextAtPoint(context, 10, 400, strokeText, len(strokeText))
# When stroking text, the line width and other gstate parameters
# that affect stroking affect text stroking as well.
CGContextSetLineWidth(context, 2)
CGContextSetLineDash(context, 0, dash, 2)
CGContextSetTextPosition(context, 180, 400)
CGContextShowText(context, strokeText, len(strokeText))
# Reset the line dash and line width to their defaults.
CGContextSetLineDash(context, 0, None, 0)
CGContextSetLineWidth(context, 1)
# Translate down for the next line of text.
CGContextTranslateCTM(context, 0, -(fontSize + extraLeading))
# ---- Text Line 3 ----
# Set the text drawing mode so that text is both filled and
# stroked. This produces text that is filled with the fill
# color and stroked with the stroke color.
CGContextSetTextDrawingMode(context, kCGTextFillStroke)
CGContextShowTextAtPoint(context, 10, 400,
fillAndStrokeText, len(fillAndStrokeText))
# Now draw again with a thicker stroke width.
CGContextSetLineWidth(context, 2)
CGContextSetTextPosition(context, 180, 400)
CGContextShowText(context, fillAndStrokeText, len(fillAndStrokeText))
CGContextSetLineWidth(context, 1)
CGContextTranslateCTM(context, 0, -(fontSize + extraLeading))
# ---- Text Line 4 ----
# Set the text drawing mode to invisible so that the next piece of
# text does not appear. Quartz updates the text position as
# if it had been drawn.
CGContextSetTextDrawingMode(context, kCGTextInvisible)
CGContextShowTextAtPoint(context, 10, 400,
invisibleText, len(invisibleText))
CGContextSetTextDrawingMode(context, kCGTextFill)
CGContextSetTextPosition(context, 180, 400)
CGContextShowText(context, fillText, len(fillText))
CGContextTranslateCTM(context, 0, -(fontSize + extraLeading))
# ---- Text Line 5 ----
CGContextSaveGState(context)
if 1:
# Use the text as a clipping path.
CGContextSetTextDrawingMode(context, kCGTextClip)
CGContextShowTextAtPoint(context, 10, 400, clipText, len(clipText))
# Position and draw a grid of lines.
CGContextTranslateCTM(context, 10, 400)
drawGridLines(context)
CGContextRestoreGState(context)
CGContextSaveGState(context)
if 1:
# The current text position is that after the last piece
# of text has been drawn. Since CGContextSaveGState/
# CGContextRestoreGState do not affect the text position or
# the text matrix, the text position is that after the last
# text was "drawn", that drawn with the kCGTextClip mode
# above. This is where the next text drawn will go if it
# isn't explicitly positioned.
nextTextPosition = CGContextGetTextPosition(context)
# Draw so that the text is filled, stroked, and then used
# the clip subsequent drawing.
CGContextSetTextDrawingMode(context, kCGTextFillStrokeClip)
# Explicitly set the text position.
CGContextSetTextPosition(context, 180, 400)
nextTextPosition = CGContextGetTextPosition(context)
CGContextShowText(context, fillStrokeClipText, len(fillStrokeClipText))
# Adjust the location of the grid lines so that they overlap the
# text just drawn.
CGContextTranslateCTM(context, nextTextPosition.x, nextTextPosition.y)
# Draw the grid lines clipped by the text.
drawGridLines(context)
CGContextRestoreGState(context)
# showFlippedTextAtPoint is a cover routine for CGContextShowText
# that is useful for drawing text in a coordinate system where the y axis
# is flipped relative to the default Quartz coordinate system.
#
# This code assumes that the text matrix is only used to
# flip the text, not to perform scaling or any other
# possible use of the text matrix.
#
# This function preserves the a, b, c, and d components of
# the text matrix across its execution but updates the
# tx, ty components (the text position) to reflect the
# text just drawn. If all the text you draw is flipped, it
# isn't necessary to continually set the text matrix. Instead
# you could simply call CGContextSetTextMatrix once with
# the flipped matrix each time your drawing
# code is called.
def showFlippedTextAtPoint(c, x, y, text, textLen):
t = CGAffineTransform(1.0, 0.0, 0.0, -1.0, 0.0, 0.0)
# Get the existing text matrix.
s = CGContextGetTextMatrix(c)
# Set the text matrix to the one that flips in y.
CGContextSetTextMatrix(c, t)
# Draw the text at the point.
CGContextShowTextAtPoint(c, x, y, text, textLen)
# Get the updated text position.
p = CGContextGetTextPosition(c)
# Update the saved text matrix to reflect the updated
# text position.
s.tx = p.x ; s.ty = p.y
# Reset to the text matrix in effect when this
# routine was called but with the text position updated.
CGContextSetTextMatrix(c, s)
def drawQuartzTextWithTextMatrix(context):
fontSize = 60.0
extraLeading = 10.0
text = "Quartz "
textlen = len(text)
# The Cocoa framework calls the draw method with an undefined
# value of the text matrix. It's best to set it to what is needed by
# this code. Initially that is the identity transform.
CGContextSetTextMatrix(context, CGAffineTransformIdentity)
# Set the font with the PostScript name "Times-Roman", at
# fontSize points, with the MacRoman encoding.
CGContextSelectFont(context, "Times-Roman", fontSize, kCGEncodingMacRoman)
# ---- Text Line 1 ----
# Draw the text at (10, 600).
CGContextShowTextAtPoint(context, 10, 600, text, textlen)
# Get the current text position. The text pen is at the trailing
# point from the text just drawn.
textPosition = CGContextGetTextPosition(context)
# Set the text matrix to one that flips text in y and sets
# the text position to the user space coordinate (0,0).
t = CGAffineTransformMake(1, 0, 0, -1, 0, 0)
CGContextSetTextMatrix(context, t)
# Set the text position to the point where the previous text ended.
CGContextSetTextPosition(context, textPosition.x, textPosition.y)
# Draw the text at the current text position. It will be drawn
# flipped in y, relative to the text drawn previously.
CGContextShowText(context, text, textlen)
# ---- Text Line 2 ----
# Translate down for the next piece of text.
CGContextTranslateCTM(context, 0, -(3*fontSize + extraLeading))
CGContextSaveGState(context)
if 1:
# Change the text matrix to {1, 0, 0, 3, 0, 0}, which
# scales text by a factor of 1 in x and 3 in y.
# This scaling doesn't affect any drawing other than text
# drawing since only text drawing is transformed by
# the text matrix.
t = CGAffineTransformMake(1, 0, 0, 3, 0, 0)
CGContextSetTextMatrix(context, t)
# This text is scaled relative to the previous text
# because of the text matrix scaling.
CGContextShowTextAtPoint(context, 10, 600, text, textlen)
# This restores the graphics state to what it was at the time
# of the last CGContextSaveGState, but since the text matrix
# isn't part of the Quartz graphics state, it isn't affected.
CGContextRestoreGState(context)
# The text matrix isn't affected by CGContextSaveGState and
# CGContextRestoreGState. You can see this by observing that
# the next text piece appears immediately after the first piece
# and with the same text scaling as that text drawn with the
# text matrix established before we did CGContextRestoreGState.
CGContextShowText(context, text, textlen)
# ---- Text Line 3 ----
# Translate down for the next piece of text.
CGContextTranslateCTM(context, 0, -(fontSize + extraLeading))
# Reset the text matrix to the identity matrix.
CGContextSetTextMatrix(context, CGAffineTransformIdentity)
# Now draw text in a flipped coordinate system.
CGContextSaveGState(context)
if 1:
# Flip the coordinate system to mimic a coordinate system with the origin
# at the top-left corner of a window. The new origin is at 600 units in
# +y from the old origin and the y axis now increases with positive y
# going down the window.
CGContextConcatCTM(context, CGAffineTransformMake(1, 0, 0, -1, 0, 600))
# This text will be flipped along with the CTM.
CGContextShowTextAtPoint(context, 10, 10, text, textlen)
# Obtain the user space coordinates of the current text position.
textPosition = CGContextGetTextPosition(context)
# Draw text at that point but flipped in y.
showFlippedTextAtPoint(context, textPosition.x, textPosition.y, text, textlen)
CGContextRestoreGState(context)
| {
"content_hash": "16573642f089c68a851130fbc8655c9b",
"timestamp": "",
"source": "github",
"line_count": 351,
"max_line_length": 86,
"avg_line_length": 39.655270655270655,
"alnum_prop": 0.6999066024858107,
"repo_name": "albertz/music-player",
"id": "2adf0fae702488265b0c573594bfa045e5b0515c",
"size": "13919",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mac/pyobjc-framework-Quartz/Examples/Programming with Quartz/BasicDrawing/QuartzTextDrawing.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "47481"
},
{
"name": "C",
"bytes": "435926"
},
{
"name": "C++",
"bytes": "149133"
},
{
"name": "CSS",
"bytes": "16435"
},
{
"name": "HTML",
"bytes": "914432"
},
{
"name": "JavaScript",
"bytes": "52869"
},
{
"name": "M",
"bytes": "10808"
},
{
"name": "Makefile",
"bytes": "13304"
},
{
"name": "Mathematica",
"bytes": "61418"
},
{
"name": "Objective-C",
"bytes": "2082720"
},
{
"name": "Objective-C++",
"bytes": "62427"
},
{
"name": "PostScript",
"bytes": "2783"
},
{
"name": "Prolog",
"bytes": "217"
},
{
"name": "Python",
"bytes": "7789845"
},
{
"name": "QMake",
"bytes": "9667"
},
{
"name": "Roff",
"bytes": "8329"
},
{
"name": "Shell",
"bytes": "3521"
}
],
"symlink_target": ""
} |
"""Support for the Netatmo cameras."""
import logging
import pyatmo
import requests
import voluptuous as vol
from homeassistant.components.camera import SUPPORT_STREAM, Camera
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
ATTR_CAMERA_LIGHT_MODE,
ATTR_PERSON,
ATTR_PERSONS,
ATTR_PSEUDO,
CAMERA_LIGHT_MODES,
DATA_CAMERAS,
DATA_EVENTS,
DATA_HANDLER,
DATA_PERSONS,
DOMAIN,
EVENT_TYPE_OFF,
EVENT_TYPE_ON,
MANUFACTURER,
MODELS,
SERVICE_SET_CAMERA_LIGHT,
SERVICE_SET_PERSON_AWAY,
SERVICE_SET_PERSONS_HOME,
SIGNAL_NAME,
)
from .data_handler import CAMERA_DATA_CLASS_NAME
from .netatmo_entity_base import NetatmoBase
_LOGGER = logging.getLogger(__name__)
DEFAULT_QUALITY = "high"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Netatmo camera platform."""
if "access_camera" not in entry.data["token"]["scope"]:
_LOGGER.info(
"Cameras are currently not supported with this authentication method"
)
return
data_handler = hass.data[DOMAIN][entry.entry_id][DATA_HANDLER]
async def get_entities():
"""Retrieve Netatmo entities."""
await data_handler.register_data_class(
CAMERA_DATA_CLASS_NAME, CAMERA_DATA_CLASS_NAME, None
)
data = data_handler.data
if not data.get(CAMERA_DATA_CLASS_NAME):
return []
data_class = data_handler.data[CAMERA_DATA_CLASS_NAME]
entities = []
try:
all_cameras = []
for home in data_class.cameras.values():
for camera in home.values():
all_cameras.append(camera)
for camera in all_cameras:
_LOGGER.debug("Adding camera %s %s", camera["id"], camera["name"])
entities.append(
NetatmoCamera(
data_handler,
camera["id"],
camera["type"],
camera["home_id"],
DEFAULT_QUALITY,
)
)
for person_id, person_data in data_handler.data[
CAMERA_DATA_CLASS_NAME
].persons.items():
hass.data[DOMAIN][DATA_PERSONS][person_id] = person_data.get(
ATTR_PSEUDO
)
except pyatmo.NoDevice:
_LOGGER.debug("No cameras found")
return entities
async_add_entities(await get_entities(), True)
platform = entity_platform.current_platform.get()
if data_handler.data[CAMERA_DATA_CLASS_NAME] is not None:
platform.async_register_entity_service(
SERVICE_SET_PERSONS_HOME,
{vol.Required(ATTR_PERSONS): vol.All(cv.ensure_list, [cv.string])},
"_service_set_persons_home",
)
platform.async_register_entity_service(
SERVICE_SET_PERSON_AWAY,
{vol.Optional(ATTR_PERSON): cv.string},
"_service_set_person_away",
)
platform.async_register_entity_service(
SERVICE_SET_CAMERA_LIGHT,
{vol.Required(ATTR_CAMERA_LIGHT_MODE): vol.In(CAMERA_LIGHT_MODES)},
"_service_set_camera_light",
)
class NetatmoCamera(NetatmoBase, Camera):
"""Representation of a Netatmo camera."""
def __init__(
self,
data_handler,
camera_id,
camera_type,
home_id,
quality,
):
"""Set up for access to the Netatmo camera images."""
Camera.__init__(self)
super().__init__(data_handler)
self._data_classes.append(
{"name": CAMERA_DATA_CLASS_NAME, SIGNAL_NAME: CAMERA_DATA_CLASS_NAME}
)
self._id = camera_id
self._home_id = home_id
self._device_name = self._data.get_camera(camera_id=camera_id).get("name")
self._name = f"{MANUFACTURER} {self._device_name}"
self._model = camera_type
self._unique_id = f"{self._id}-{self._model}"
self._quality = quality
self._vpnurl = None
self._localurl = None
self._status = None
self._sd_status = None
self._alim_status = None
self._is_local = None
async def async_added_to_hass(self) -> None:
"""Entity created."""
await super().async_added_to_hass()
for event_type in (EVENT_TYPE_OFF, EVENT_TYPE_ON):
self._listeners.append(
async_dispatcher_connect(
self.hass,
f"signal-{DOMAIN}-webhook-{event_type}",
self.handle_event,
)
)
self.hass.data[DOMAIN][DATA_CAMERAS][self._id] = self._device_name
@callback
def handle_event(self, event):
"""Handle webhook events."""
data = event["data"]
if not data.get("camera_id"):
return
if data["home_id"] == self._home_id and data["camera_id"] == self._id:
if data["push_type"] in ["NACamera-off", "NACamera-disconnection"]:
self.is_streaming = False
self._status = "off"
elif data["push_type"] in ["NACamera-on", "NACamera-connection"]:
self.is_streaming = True
self._status = "on"
self.async_write_ha_state()
return
def camera_image(self):
"""Return a still image response from the camera."""
try:
if self._localurl:
response = requests.get(
f"{self._localurl}/live/snapshot_720.jpg", timeout=10
)
elif self._vpnurl:
response = requests.get(
f"{self._vpnurl}/live/snapshot_720.jpg",
timeout=10,
verify=True,
)
else:
_LOGGER.error("Welcome/Presence VPN URL is None")
(self._vpnurl, self._localurl) = self._data.camera_urls(
camera_id=self._id
)
return None
except requests.exceptions.RequestException as error:
_LOGGER.info("Welcome/Presence URL changed: %s", error)
self._data.update_camera_urls(camera_id=self._id)
(self._vpnurl, self._localurl) = self._data.camera_urls(camera_id=self._id)
return None
return response.content
@property
def device_state_attributes(self):
"""Return the Netatmo-specific camera state attributes."""
return {
"id": self._id,
"status": self._status,
"sd_status": self._sd_status,
"alim_status": self._alim_status,
"is_local": self._is_local,
"vpn_url": self._vpnurl,
"local_url": self._localurl,
}
@property
def available(self):
"""Return True if entity is available."""
return bool(self._alim_status == "on" or self._status == "disconnected")
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_STREAM
@property
def brand(self):
"""Return the camera brand."""
return MANUFACTURER
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return bool(self._status == "on")
@property
def is_on(self):
"""Return true if on."""
return self.is_streaming
def turn_off(self):
"""Turn off camera."""
self._data.set_state(
home_id=self._home_id, camera_id=self._id, monitoring="off"
)
def turn_on(self):
"""Turn on camera."""
self._data.set_state(home_id=self._home_id, camera_id=self._id, monitoring="on")
async def stream_source(self):
"""Return the stream source."""
url = "{0}/live/files/{1}/index.m3u8"
if self._localurl:
return url.format(self._localurl, self._quality)
return url.format(self._vpnurl, self._quality)
@property
def model(self):
"""Return the camera model."""
return MODELS[self._model]
@callback
def async_update_callback(self):
"""Update the entity's state."""
camera = self._data.get_camera(self._id)
self._vpnurl, self._localurl = self._data.camera_urls(self._id)
self._status = camera.get("status")
self._sd_status = camera.get("sd_status")
self._alim_status = camera.get("alim_status")
self._is_local = camera.get("is_local")
self.is_streaming = bool(self._status == "on")
if self._model == "NACamera": # Smart Indoor Camera
self.hass.data[DOMAIN][DATA_EVENTS][self._id] = self.process_events(
self._data.events.get(self._id, {})
)
elif self._model == "NOC": # Smart Outdoor Camera
self.hass.data[DOMAIN][DATA_EVENTS][self._id] = self.process_events(
self._data.outdoor_events.get(self._id, {})
)
def process_events(self, events):
"""Add meta data to events."""
for event in events.values():
if "video_id" not in event:
continue
if self._is_local:
event[
"media_url"
] = f"{self._localurl}/vod/{event['video_id']}/files/{self._quality}/index.m3u8"
else:
event[
"media_url"
] = f"{self._vpnurl}/vod/{event['video_id']}/files/{self._quality}/index.m3u8"
return events
def _service_set_persons_home(self, **kwargs):
"""Service to change current home schedule."""
persons = kwargs.get(ATTR_PERSONS)
person_ids = []
for person in persons:
for pid, data in self._data.persons.items():
if data.get("pseudo") == person:
person_ids.append(pid)
self._data.set_persons_home(person_ids=person_ids, home_id=self._home_id)
_LOGGER.debug("Set %s as at home", persons)
def _service_set_person_away(self, **kwargs):
"""Service to mark a person as away or set the home as empty."""
person = kwargs.get(ATTR_PERSON)
person_id = None
if person:
for pid, data in self._data.persons.items():
if data.get("pseudo") == person:
person_id = pid
if person_id is not None:
self._data.set_persons_away(
person_id=person_id,
home_id=self._home_id,
)
_LOGGER.debug("Set %s as away", person)
else:
self._data.set_persons_away(
person_id=person_id,
home_id=self._home_id,
)
_LOGGER.debug("Set home as empty")
def _service_set_camera_light(self, **kwargs):
"""Service to set light mode."""
mode = kwargs.get(ATTR_CAMERA_LIGHT_MODE)
_LOGGER.debug("Turn camera '%s' %s", self._name, mode)
self._data.set_state(
home_id=self._home_id,
camera_id=self._id,
floodlight=mode,
)
| {
"content_hash": "f24f54e0e1a3650fc8fd2d8bcc4351bc",
"timestamp": "",
"source": "github",
"line_count": 349,
"max_line_length": 96,
"avg_line_length": 32.67908309455587,
"alnum_prop": 0.5458132398071022,
"repo_name": "tchellomello/home-assistant",
"id": "dff6013c7c69892aef38ccfd6a6f3ead173accbd",
"size": "11405",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/netatmo/camera.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "26713364"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from copy import copy
import numpy as nm
from sfepy.base.base import output, get_default, OneTypeList, Struct, basestr
from sfepy.discrete import Equations, Variables, Region, Integral, Integrals
from sfepy.discrete.common.fields import setup_extra_data
import six
def apply_ebc_to_matrix(mtx, ebc_rows, epbc_rows=None):
"""
Apply E(P)BC to matrix rows: put 1 to the diagonal for EBC DOFs, 1 to the
diagonal for master EPBC DOFs, -1 to the [master, slave] entries. It is
assumed, that the matrix contains zeros in EBC and master EPBC DOFs rows
and columns.
"""
data, prows, cols = mtx.data, mtx.indptr, mtx.indices
# Does not change the sparsity pattern.
for ir in ebc_rows:
for ic in range(prows[ir], prows[ir + 1]):
if (cols[ic] == ir):
data[ic] = 1.0
if epbc_rows is not None:
import warnings
from scipy.sparse import SparseEfficiencyWarning
master, slave = epbc_rows
with warnings.catch_warnings():
warnings.simplefilter('ignore', SparseEfficiencyWarning)
# Changes sparsity pattern in-place - allocates new entries! The master
# DOFs are not allocated by Equations.create_matrix_graph(), see
# create_adof_conns().
mtx[master, master] = 1.0
mtx[master, slave] = -1.0
##
# 02.10.2007, c
class Evaluator(Struct):
"""
This class provides the functions required by a nonlinear solver for a
given problem.
"""
def __init__(self, problem, matrix_hook=None):
Struct.__init__(self, problem=problem, matrix_hook=matrix_hook)
@staticmethod
def new_ulf_iteration(problem, nls, vec, it, err, err0):
vec = problem.equations.make_full_vec(vec)
problem.equations.set_state(vec)
upd_vars = problem.conf.options.get('mesh_update_variables', None)
for varname in upd_vars:
try:
state = problem.equations.variables[varname]
except IndexError:
msg = 'variable "%s" does not exist!' % varname
raise KeyError( msg )
nods = state.field.get_dofs_in_region(state.field.region, merge=True)
coors = problem.domain.get_mesh_coors().copy()
coors[nods, :] += state().reshape(len(nods), state.n_components)
if len(state.field.mappings0) == 0:
state.field.save_mappings()
state.field.clear_mappings()
problem.set_mesh_coors(coors, update_fields=False, actual=True,
clear_all=False)
def eval_residual(self, vec, is_full=False):
if not is_full and self.problem.active_only:
vec = self.make_full_vec(vec)
vec_r = self.problem.equations.eval_residuals(vec)
if self.matrix_hook is not None:
vec_r = self.matrix_hook(vec_r, self.problem, call_mode='residual')
if self.problem.equations.variables.has_lcbc:
mtx_lcbc = self.problem.equations.get_lcbc_operator()
vec_rr = mtx_lcbc.T * vec_r
if self.matrix_hook is not None:
vec_rr = self.matrix_hook(vec_rr, self.problem,
call_mode='lcbc_residual')
vec_r = vec_rr
return vec_r
def eval_tangent_matrix(self, vec, mtx=None, is_full=False):
if isinstance(vec, basestr) and vec == 'linear':
return get_default(mtx, self.problem.mtx_a)
if not is_full and self.problem.active_only:
vec = self.make_full_vec( vec )
pb = self.problem
if mtx is None:
mtx = pb.mtx_a
mtx = pb.equations.eval_tangent_matrices(vec, mtx)
if not pb.active_only:
apply_ebc_to_matrix(mtx, *pb.get_ebc_indices())
if self.matrix_hook is not None:
mtx = self.matrix_hook(mtx, pb, call_mode='basic')
if self.problem.equations.variables.has_lcbc:
mtx_lcbc = self.problem.equations.get_lcbc_operator()
mtx_r = mtx_lcbc.T * mtx * mtx_lcbc
mtx_r = mtx_r.tocsr()
mtx_r.sort_indices()
if self.matrix_hook is not None:
mtx_r = self.matrix_hook(mtx_r, self.problem, call_mode='lcbc')
mtx = mtx_r
return mtx
def make_full_vec(self, vec):
return self.problem.equations.make_full_vec(vec)
def create_evaluable(expression, fields, materials, variables, integrals,
regions=None,
ebcs=None, epbcs=None, lcbcs=None,
ts=None, functions=None,
auto_init=False, mode='eval', extra_args=None,
active_only=True, eterm_options=None, verbose=True,
kwargs=None):
"""
Create evaluable object (equations and corresponding variables)
from the `expression` string.
Parameters
----------
expression : str
The expression to evaluate.
fields : dict
The dictionary of fields used in `variables`.
materials : Materials instance
The materials used in the expression.
variables : Variables instance
The variables used in the expression.
integrals : Integrals instance
The integrals to be used.
regions : Region instance or list of Region instances
The region(s) to be used. If not given, the regions defined
within the fields domain are used.
ebcs : Conditions instance, optional
The essential (Dirichlet) boundary conditions for 'weak'
mode.
epbcs : Conditions instance, optional
The periodic boundary conditions for 'weak'
mode.
lcbcs : Conditions instance, optional
The linear combination boundary conditions for 'weak'
mode.
ts : TimeStepper instance, optional
The time stepper.
functions : Functions instance, optional
The user functions for boundary conditions, materials
etc.
auto_init : bool
Set values of all variables to all zeros.
mode : one of 'eval', 'el_avg', 'qp', 'weak'
The evaluation mode - 'weak' means the finite element
assembling, 'qp' requests the values in quadrature points,
'el_avg' element averages and 'eval' means integration over
each term region.
extra_args : dict, optional
Extra arguments to be passed to terms in the expression.
active_only : bool
If True, in 'weak' mode, the (tangent) matrices and residual
vectors (right-hand sides) contain only active DOFs.
eterm_options : dict, optional
The einsum-based terms evaluation options.
verbose : bool
If False, reduce verbosity.
kwargs : dict, optional
The variables (dictionary of (variable name) : (Variable
instance)) to be used in the expression.
Returns
-------
equation : Equation instance
The equation that is ready to be evaluated.
variables : Variables instance
The variables used in the equation.
"""
if kwargs is None:
kwargs = {}
if regions is not None:
if isinstance(regions, Region):
regions = [regions]
regions = OneTypeList(Region, regions)
else:
regions = fields[list(fields.keys())[0]].domain.regions
# Create temporary variables.
aux_vars = Variables(variables)
if extra_args is None:
extra_args = kwargs
else:
extra_args = copy(extra_args)
extra_args.update(kwargs)
if ts is not None:
extra_args.update({'ts' : ts})
equations = Equations.from_conf({'tmp' : expression},
aux_vars, regions, materials, integrals,
user=extra_args,
eterm_options=eterm_options,
verbose=verbose)
equations.collect_conn_info()
# The true variables used in the expression.
variables = equations.variables
if auto_init:
for var in variables:
var.init_data(step=0)
if mode == 'weak':
equations.time_update(ts, ebcs, epbcs, lcbcs, functions,
active_only=active_only, verbose=verbose)
else:
for eq in equations:
for term in eq.terms:
term.time_update(ts)
setup_extra_data(equations.conn_info)
return equations, variables
def eval_equations(equations, variables, names=None, preserve_caches=False,
mode='eval', dw_mode='vector', term_mode=None,
active_only=True, verbose=True):
"""
Evaluate the equations.
Parameters
----------
equations : Equations instance
The equations returned by :func:`create_evaluable()`.
variables : Variables instance
The variables returned by :func:`create_evaluable()`.
names : str or sequence of str, optional
Evaluate only equations of the given name(s).
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
mode : one of 'eval', 'el_avg', 'qp', 'weak'
The evaluation mode - 'weak' means the finite element
assembling, 'qp' requests the values in quadrature points,
'el_avg' element averages and 'eval' means integration over
each term region.
dw_mode : 'vector' or 'matrix'
The assembling mode for 'weak' evaluation mode.
term_mode : str
The term call mode - some terms support different call modes
and depending on the call mode different values are
returned.
active_only : bool
If True, in 'weak' mode, the (tangent) matrices and residual
vectors (right-hand sides) contain only active DOFs.
verbose : bool
If False, reduce verbosity.
Returns
-------
out : dict or result
The evaluation result. In 'weak' mode it is the vector or sparse
matrix, depending on `dw_mode`. Otherwise, it is a dict of results with
equation names as keys or a single result for a single equation.
"""
asm_obj = None
if mode == 'weak':
if dw_mode == 'vector':
asm_obj = equations.create_reduced_vec()
else:
asm_obj = equations.create_matrix_graph(active_only=active_only,
verbose=verbose)
if not preserve_caches:
equations.invalidate_term_caches()
out = equations.evaluate(names=names, mode=mode, dw_mode=dw_mode,
term_mode=term_mode, asm_obj=asm_obj)
if variables.has_lcbc and mode == 'weak':
mtx_lcbc = variables.mtx_lcbc
if dw_mode == 'vector':
out = mtx_lcbc.T * out
elif dw_mode == 'matrix':
out = mtx_lcbc.T * out * mtx_lcbc
out = out.tocsr()
out.sort_indices()
return out
def eval_in_els_and_qp(expression, iels, coors,
fields, materials, variables,
functions=None, mode='eval', term_mode=None,
extra_args=None, active_only=True, verbose=True,
kwargs=None):
"""
Evaluate an expression in given elements and points.
Parameters
----------
expression : str
The expression to evaluate.
fields : dict
The dictionary of fields used in `variables`.
materials : Materials instance
The materials used in the expression.
variables : Variables instance
The variables used in the expression.
functions : Functions instance, optional
The user functions for materials etc.
mode : one of 'eval', 'el_avg', 'qp'
The evaluation mode - 'qp' requests the values in quadrature points,
'el_avg' element averages and 'eval' means integration over
each term region.
term_mode : str
The term call mode - some terms support different call modes
and depending on the call mode different values are
returned.
extra_args : dict, optional
Extra arguments to be passed to terms in the expression.
active_only : bool
If True, in 'weak' mode, the (tangent) matrices and residual
vectors (right-hand sides) contain only active DOFs.
verbose : bool
If False, reduce verbosity.
kwargs : dict, optional
The variables (dictionary of (variable name) : (Variable
instance)) to be used in the expression.
Returns
-------
out : array
The result of the evaluation.
"""
weights = nm.ones_like(coors[:, 0])
integral = Integral('ie', coors=coors, weights=weights)
domain = list(fields.values())[0].domain
region = Region('Elements', 'given elements', domain, '')
region.cells = iels
region.update_shape()
domain.regions.append(region)
for field in six.itervalues(fields):
field.clear_mappings(clear_all=True)
field.clear_qp_base()
aux = create_evaluable(expression, fields, materials,
variables.itervalues(), Integrals([integral]),
functions=functions, mode=mode,
extra_args=extra_args, active_only=active_only,
verbose=verbose, kwargs=kwargs)
equations, variables = aux
out = eval_equations(equations, variables,
preserve_caches=False,
mode=mode, term_mode=term_mode,
active_only=active_only)
domain.regions.pop()
return out
def assemble_by_blocks(conf_equations, problem, ebcs=None, epbcs=None,
dw_mode='matrix', active_only=True):
"""Instead of a global matrix, return its building blocks as defined in
`conf_equations`. The name and row/column variables of each block have to
be encoded in the equation's name, as in::
conf_equations = {
'A,v,u' : "dw_lin_elastic.i1.Y2( inclusion.D, v, u )",
}
Notes
-----
`ebcs`, `epbcs` must be either lists of BC names, or BC configuration
dictionaries.
"""
if isinstance( ebcs, list ) and isinstance( epbcs, list ):
bc_mode = 0
elif isinstance( ebcs, dict ) and isinstance( epbcs, dict ):
bc_mode = 1
else:
raise TypeError('bad BC!')
matrices = {}
for key, mtx_term in six.iteritems(conf_equations):
ks = key.split( ',' )
mtx_name, var_names = ks[0], ks[1:]
output( mtx_name, var_names )
problem.set_equations({'eq': mtx_term})
variables = problem.get_variables()
indx = variables.get_indx
if bc_mode == 0:
problem.select_bcs( ebc_names = ebcs, epbc_names = epbcs )
else:
problem.time_update(ebcs=ebcs, epbcs=epbcs)
ir = indx(var_names[0], reduced=True, allow_dual=True)
ic = indx(var_names[1], reduced=True, allow_dual=True)
problem.update_materials()
mtx = problem.evaluate(mtx_term, auto_init=True,
mode='weak', dw_mode='matrix',
copy_materials=False, active_only=active_only)
matrices[mtx_name] = mtx[ir,ic]
return matrices
| {
"content_hash": "ffafedac29c5bfcf09ca506db6c061a4",
"timestamp": "",
"source": "github",
"line_count": 434,
"max_line_length": 83,
"avg_line_length": 35.345622119815665,
"alnum_prop": 0.5990873533246415,
"repo_name": "sfepy/sfepy",
"id": "54591c78535091f8ac77098644ae8adf4e74efbe",
"size": "15340",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sfepy/discrete/evaluate.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "471175"
},
{
"name": "GLSL",
"bytes": "8269"
},
{
"name": "MATLAB",
"bytes": "1918"
},
{
"name": "Makefile",
"bytes": "489"
},
{
"name": "PowerShell",
"bytes": "3121"
},
{
"name": "Python",
"bytes": "3553188"
}
],
"symlink_target": ""
} |
import flask
import auth
import config
import model
import util
from main import app
bitbucket_config = dict(
access_token_method='POST',
access_token_url='https://bitbucket.org/site/oauth2/access_token',
authorize_url='https://bitbucket.org/site/oauth2/authorize',
base_url='https://api.bitbucket.org/2.0/',
consumer_key=config.CONFIG_DB.bitbucket_key,
consumer_secret=config.CONFIG_DB.bitbucket_secret,
)
bitbucket = auth.create_oauth_app(bitbucket_config, 'bitbucket')
@app.route('/api/auth/callback/bitbucket/')
def bitbucket_authorized():
response = bitbucket.authorized_response()
if response is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (response['access_token'], '')
me = bitbucket.get('user')
user_db = retrieve_user_from_bitbucket(me.data)
return auth.signin_user_db(user_db)
@bitbucket.tokengetter
def get_bitbucket_oauth_token():
return flask.session.get('oauth_token')
@app.route('/signin/bitbucket/')
def signin_bitbucket():
return auth.signin_oauth(bitbucket)
def retrieve_user_from_bitbucket(response):
auth_id = 'bitbucket_%s' % response['username']
user_db = model.User.get_by('auth_ids', auth_id)
if user_db:
return user_db
emails = bitbucket.get('user/emails').data['values']
email = ''.join([e['email'] for e in emails if e['is_primary']][0:1])
return auth.create_user_db(
auth_id=auth_id,
name=response['display_name'],
username=response['username'],
email=email,
verified=bool(email),
)
| {
"content_hash": "29406f75cdc631c34446127fcb9ecf5e",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 71,
"avg_line_length": 27.32758620689655,
"alnum_prop": 0.7097791798107256,
"repo_name": "jakedotio/gae-init",
"id": "3b25019bf5bbc04203bd2518aa289c2719a62d57",
"size": "1602",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "main/auth/bitbucket.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5385"
},
{
"name": "CoffeeScript",
"bytes": "16318"
},
{
"name": "HTML",
"bytes": "68768"
},
{
"name": "JavaScript",
"bytes": "65"
},
{
"name": "Python",
"bytes": "119296"
},
{
"name": "Shell",
"bytes": "1082"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=30, unique=True, verbose_name='username')),
('email', models.EmailField(max_length=254, unique=True, verbose_name='Email address')),
],
options={
'abstract': False,
},
),
]
| {
"content_hash": "de769935be4819c0797326b7c71197c8",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 255,
"avg_line_length": 37.57142857142857,
"alnum_prop": 0.5950570342205324,
"repo_name": "hchen1202/django-react",
"id": "caef2772b17e8b2155b83c2d4d82b0fa9e439ea3",
"size": "1125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/accounts/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "86034"
},
{
"name": "HTML",
"bytes": "139252"
},
{
"name": "JavaScript",
"bytes": "183314"
},
{
"name": "Python",
"bytes": "7188635"
},
{
"name": "Shell",
"bytes": "3250"
}
],
"symlink_target": ""
} |
from django.apps import AppConfig
class DonationsConfig(AppConfig):
name = 'donations'
verbose_name = "Django Donations"
| {
"content_hash": "03ba6a60656a569bf615e78bfb02374a",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 37,
"avg_line_length": 21.833333333333332,
"alnum_prop": 0.7404580152671756,
"repo_name": "founders4schools/django-donations",
"id": "9f9ec074a2e6ab44ee0e48507c27221fb0b2fde6",
"size": "131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "donations/apps.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "317"
},
{
"name": "Python",
"bytes": "52244"
}
],
"symlink_target": ""
} |
__author__ = 'efouh'
from docutils import nodes
from docutils.parsers.rst import directives
from docutils.parsers.rst import Directive
import random
import os, sys
import re
sys.path.append(os.path.abspath('./source'))
import conf
def setup(app):
app.add_directive('odsalink',odsalink)
CODE = """\
<link href="%(address)s" rel="stylesheet" type="text/css" />
"""
class odsalink(Directive):
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
def run(self):
""" Restructured text extension for including CSS and other libraries """
self.options['address'] = os.path.relpath(conf.odsa_path,conf.ebook_path).replace('\\', '/') + '/' + self.arguments[0]
res = CODE % self.options
return [nodes.raw('', res, format='html')]
source = """\
This is some text.
.. odsalink:: address
This is some more text.
"""
if __name__ == '__main__':
from docutils.core import publish_parts
directives.register_directive('odsalink',odsalink)
doc_parts = publish_parts(source,
settings_overrides={'output_encoding': 'utf8',
'initial_header_level': 2},
writer_name="html")
print doc_parts['html_body']
| {
"content_hash": "bfe9315f99456fe5a8a5e0545cc98548",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 126,
"avg_line_length": 23.09259259259259,
"alnum_prop": 0.6471531676022454,
"repo_name": "RJFreund/OpenDSA",
"id": "5da27bcdf0da9b45a67000decfcfc94104cdf718",
"size": "1762",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "RST/ODSAextensions/odsa/odsalink/odsalink.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1946"
},
{
"name": "C++",
"bytes": "47021"
},
{
"name": "CSS",
"bytes": "269359"
},
{
"name": "Erlang",
"bytes": "2168"
},
{
"name": "Groff",
"bytes": "3834"
},
{
"name": "HTML",
"bytes": "2780057"
},
{
"name": "Java",
"bytes": "167818"
},
{
"name": "JavaScript",
"bytes": "6545823"
},
{
"name": "Makefile",
"bytes": "69209"
},
{
"name": "PHP",
"bytes": "309"
},
{
"name": "Processing",
"bytes": "158385"
},
{
"name": "Python",
"bytes": "89397"
},
{
"name": "Shell",
"bytes": "2725"
},
{
"name": "TeX",
"bytes": "80510"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(384, 221)
self.gridLayout_4 = QtGui.QGridLayout(Form)
self.gridLayout_4.setMargin(3)
self.gridLayout_4.setSpacing(3)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.groupBox_2 = QtGui.QGroupBox(Form)
self.groupBox_2.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.gridLayout_3 = QtGui.QGridLayout(self.groupBox_2)
self.gridLayout_3.setMargin(5)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.widget = QtGui.QWidget(self.groupBox_2)
self.widget.setMinimumSize(QtCore.QSize(100, 0))
self.widget.setObjectName(_fromUtf8("widget"))
self.gridLayout = QtGui.QGridLayout(self.widget)
self.gridLayout.setMargin(0)
self.gridLayout.setHorizontalSpacing(10)
self.gridLayout.setVerticalSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label_5 = QtGui.QLabel(self.widget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5.sizePolicy().hasHeightForWidth())
self.label_5.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout.addWidget(self.label_5, 0, 0, 1, 1)
self.xPosLabel = QtGui.QLabel(self.widget)
self.xPosLabel.setObjectName(_fromUtf8("xPosLabel"))
self.gridLayout.addWidget(self.xPosLabel, 0, 1, 1, 1)
self.label_7 = QtGui.QLabel(self.widget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7.sizePolicy().hasHeightForWidth())
self.label_7.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_7.setFont(font)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)
self.yPosLabel = QtGui.QLabel(self.widget)
self.yPosLabel.setObjectName(_fromUtf8("yPosLabel"))
self.gridLayout.addWidget(self.yPosLabel, 1, 1, 1, 1)
self.label_8 = QtGui.QLabel(self.widget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8.sizePolicy().hasHeightForWidth())
self.label_8.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_8.setFont(font)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.gridLayout.addWidget(self.label_8, 2, 0, 1, 1)
self.zPosLabel = QtGui.QLabel(self.widget)
self.zPosLabel.setObjectName(_fromUtf8("zPosLabel"))
self.gridLayout.addWidget(self.zPosLabel, 2, 1, 1, 1)
self.updatePosBtn = QtGui.QPushButton(self.widget)
self.updatePosBtn.setObjectName(_fromUtf8("updatePosBtn"))
self.gridLayout.addWidget(self.updatePosBtn, 3, 0, 1, 2)
self.gridLayout_3.addWidget(self.widget, 0, 0, 1, 1)
self.joyBtn = JoystickButton(self.groupBox_2)
self.joyBtn.setMinimumSize(QtCore.QSize(50, 50))
self.joyBtn.setText(_fromUtf8(""))
self.joyBtn.setObjectName(_fromUtf8("joyBtn"))
self.gridLayout_3.addWidget(self.joyBtn, 0, 2, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.fineStepRadio = QtGui.QRadioButton(self.groupBox_2)
self.fineStepRadio.setChecked(True)
self.fineStepRadio.setObjectName(_fromUtf8("fineStepRadio"))
self.horizontalLayout_2.addWidget(self.fineStepRadio)
self.coarseStepRadio = QtGui.QRadioButton(self.groupBox_2)
self.coarseStepRadio.setObjectName(_fromUtf8("coarseStepRadio"))
self.horizontalLayout_2.addWidget(self.coarseStepRadio)
self.gridLayout_3.addLayout(self.horizontalLayout_2, 1, 0, 1, 3)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_3.addItem(spacerItem, 0, 1, 1, 1)
self.gridLayout_4.addWidget(self.groupBox_2, 0, 0, 1, 1)
self.limitsGroup = QtGui.QGroupBox(Form)
self.limitsGroup.setAlignment(QtCore.Qt.AlignCenter)
self.limitsGroup.setCheckable(False)
self.limitsGroup.setObjectName(_fromUtf8("limitsGroup"))
self.gridLayout_2 = QtGui.QGridLayout(self.limitsGroup)
self.gridLayout_2.setSpacing(1)
self.gridLayout_2.setContentsMargins(3, 0, 3, 0)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.xMinBtn = QtGui.QPushButton(self.limitsGroup)
self.xMinBtn.setMaximumSize(QtCore.QSize(70, 20))
self.xMinBtn.setObjectName(_fromUtf8("xMinBtn"))
self.gridLayout_2.addWidget(self.xMinBtn, 0, 3, 1, 1)
self.xMinLabel = QtGui.QLabel(self.limitsGroup)
self.xMinLabel.setObjectName(_fromUtf8("xMinLabel"))
self.gridLayout_2.addWidget(self.xMinLabel, 0, 4, 1, 1)
self.xMaxLabel = QtGui.QLabel(self.limitsGroup)
self.xMaxLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.xMaxLabel.setObjectName(_fromUtf8("xMaxLabel"))
self.gridLayout_2.addWidget(self.xMaxLabel, 0, 6, 1, 1)
self.xMaxBtn = QtGui.QPushButton(self.limitsGroup)
self.xMaxBtn.setMaximumSize(QtCore.QSize(70, 20))
self.xMaxBtn.setObjectName(_fromUtf8("xMaxBtn"))
self.gridLayout_2.addWidget(self.xMaxBtn, 0, 7, 1, 1)
self.yMinBtn = QtGui.QPushButton(self.limitsGroup)
self.yMinBtn.setMaximumSize(QtCore.QSize(70, 20))
self.yMinBtn.setObjectName(_fromUtf8("yMinBtn"))
self.gridLayout_2.addWidget(self.yMinBtn, 1, 3, 1, 1)
self.yMinLabel = QtGui.QLabel(self.limitsGroup)
self.yMinLabel.setObjectName(_fromUtf8("yMinLabel"))
self.gridLayout_2.addWidget(self.yMinLabel, 1, 4, 1, 1)
self.label_3 = QtGui.QLabel(self.limitsGroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3.sizePolicy().hasHeightForWidth())
self.label_3.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_2.addWidget(self.label_3, 1, 5, 1, 1)
self.yMaxLabel = QtGui.QLabel(self.limitsGroup)
self.yMaxLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.yMaxLabel.setObjectName(_fromUtf8("yMaxLabel"))
self.gridLayout_2.addWidget(self.yMaxLabel, 1, 6, 1, 1)
self.yMaxBtn = QtGui.QPushButton(self.limitsGroup)
self.yMaxBtn.setMaximumSize(QtCore.QSize(70, 20))
self.yMaxBtn.setObjectName(_fromUtf8("yMaxBtn"))
self.gridLayout_2.addWidget(self.yMaxBtn, 1, 7, 1, 1)
self.zMinBtn = QtGui.QPushButton(self.limitsGroup)
self.zMinBtn.setMaximumSize(QtCore.QSize(70, 20))
self.zMinBtn.setObjectName(_fromUtf8("zMinBtn"))
self.gridLayout_2.addWidget(self.zMinBtn, 2, 3, 1, 1)
self.zMinLabel = QtGui.QLabel(self.limitsGroup)
self.zMinLabel.setObjectName(_fromUtf8("zMinLabel"))
self.gridLayout_2.addWidget(self.zMinLabel, 2, 4, 1, 1)
self.label = QtGui.QLabel(self.limitsGroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout_2.addWidget(self.label, 2, 5, 1, 1)
self.zMaxLabel = QtGui.QLabel(self.limitsGroup)
self.zMaxLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.zMaxLabel.setObjectName(_fromUtf8("zMaxLabel"))
self.gridLayout_2.addWidget(self.zMaxLabel, 2, 6, 1, 1)
self.zMaxBtn = QtGui.QPushButton(self.limitsGroup)
self.zMaxBtn.setMaximumSize(QtCore.QSize(70, 20))
self.zMaxBtn.setObjectName(_fromUtf8("zMaxBtn"))
self.gridLayout_2.addWidget(self.zMaxBtn, 2, 7, 1, 1)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.label_4 = QtGui.QLabel(self.limitsGroup)
self.label_4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.horizontalLayout_3.addWidget(self.label_4)
self.maxSpeedSpin = SpinBox(self.limitsGroup)
self.maxSpeedSpin.setObjectName(_fromUtf8("maxSpeedSpin"))
self.horizontalLayout_3.addWidget(self.maxSpeedSpin)
self.gridLayout_2.addLayout(self.horizontalLayout_3, 3, 3, 1, 5)
self.xMinCheck = QtGui.QCheckBox(self.limitsGroup)
self.xMinCheck.setText(_fromUtf8(""))
self.xMinCheck.setObjectName(_fromUtf8("xMinCheck"))
self.gridLayout_2.addWidget(self.xMinCheck, 0, 0, 1, 1)
self.yMinCheck = QtGui.QCheckBox(self.limitsGroup)
self.yMinCheck.setText(_fromUtf8(""))
self.yMinCheck.setObjectName(_fromUtf8("yMinCheck"))
self.gridLayout_2.addWidget(self.yMinCheck, 1, 0, 1, 1)
self.zMinCheck = QtGui.QCheckBox(self.limitsGroup)
self.zMinCheck.setText(_fromUtf8(""))
self.zMinCheck.setObjectName(_fromUtf8("zMinCheck"))
self.gridLayout_2.addWidget(self.zMinCheck, 2, 0, 1, 1)
self.xMaxCheck = QtGui.QCheckBox(self.limitsGroup)
self.xMaxCheck.setText(_fromUtf8(""))
self.xMaxCheck.setObjectName(_fromUtf8("xMaxCheck"))
self.gridLayout_2.addWidget(self.xMaxCheck, 0, 9, 1, 1)
self.yMaxCheck = QtGui.QCheckBox(self.limitsGroup)
self.yMaxCheck.setText(_fromUtf8(""))
self.yMaxCheck.setObjectName(_fromUtf8("yMaxCheck"))
self.gridLayout_2.addWidget(self.yMaxCheck, 1, 9, 1, 1)
self.zMaxCheck = QtGui.QCheckBox(self.limitsGroup)
self.zMaxCheck.setText(_fromUtf8(""))
self.zMaxCheck.setObjectName(_fromUtf8("zMaxCheck"))
self.gridLayout_2.addWidget(self.zMaxCheck, 2, 9, 1, 1)
self.label_2 = QtGui.QLabel(self.limitsGroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout_2.addWidget(self.label_2, 0, 5, 1, 1)
self.xMinSpin = SpinBox(self.limitsGroup)
self.xMinSpin.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.xMinSpin.setDecimals(3)
self.xMinSpin.setMinimum(-25000.0)
self.xMinSpin.setMaximum(25000.0)
self.xMinSpin.setSingleStep(0.1)
self.xMinSpin.setObjectName(_fromUtf8("xMinSpin"))
self.gridLayout_2.addWidget(self.xMinSpin, 0, 2, 1, 1)
self.yMinSpin = SpinBox(self.limitsGroup)
self.yMinSpin.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.yMinSpin.setDecimals(3)
self.yMinSpin.setMinimum(-25000.0)
self.yMinSpin.setMaximum(25000.0)
self.yMinSpin.setSingleStep(0.1)
self.yMinSpin.setObjectName(_fromUtf8("yMinSpin"))
self.gridLayout_2.addWidget(self.yMinSpin, 1, 2, 1, 1)
self.zMinSpin = SpinBox(self.limitsGroup)
self.zMinSpin.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.zMinSpin.setDecimals(3)
self.zMinSpin.setMinimum(-25000.0)
self.zMinSpin.setMaximum(25000.0)
self.zMinSpin.setSingleStep(0.1)
self.zMinSpin.setObjectName(_fromUtf8("zMinSpin"))
self.gridLayout_2.addWidget(self.zMinSpin, 2, 2, 1, 1)
self.xMaxSpin = SpinBox(self.limitsGroup)
self.xMaxSpin.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.xMaxSpin.setDecimals(3)
self.xMaxSpin.setMinimum(-25000.0)
self.xMaxSpin.setMaximum(25000.0)
self.xMaxSpin.setSingleStep(0.1)
self.xMaxSpin.setObjectName(_fromUtf8("xMaxSpin"))
self.gridLayout_2.addWidget(self.xMaxSpin, 0, 8, 1, 1)
self.yMaxSpin = SpinBox(self.limitsGroup)
self.yMaxSpin.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.yMaxSpin.setDecimals(3)
self.yMaxSpin.setMinimum(-25000.0)
self.yMaxSpin.setMaximum(25000.0)
self.yMaxSpin.setSingleStep(0.1)
self.yMaxSpin.setObjectName(_fromUtf8("yMaxSpin"))
self.gridLayout_2.addWidget(self.yMaxSpin, 1, 8, 1, 1)
self.zMaxSpin = SpinBox(self.limitsGroup)
self.zMaxSpin.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.zMaxSpin.setDecimals(3)
self.zMaxSpin.setMinimum(-25000.0)
self.zMaxSpin.setMaximum(25000.0)
self.zMaxSpin.setSingleStep(0.1)
self.zMaxSpin.setObjectName(_fromUtf8("zMaxSpin"))
self.gridLayout_2.addWidget(self.zMaxSpin, 2, 8, 1, 1)
self.gridLayout_4.addWidget(self.limitsGroup, 1, 0, 1, 1)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_4.addItem(spacerItem1, 1, 1, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.groupBox_2.setTitle(_translate("Form", "Position", None))
self.label_5.setText(_translate("Form", "X", None))
self.xPosLabel.setText(_translate("Form", "0", None))
self.label_7.setText(_translate("Form", "Y", None))
self.yPosLabel.setText(_translate("Form", "0", None))
self.label_8.setText(_translate("Form", "Z", None))
self.zPosLabel.setText(_translate("Form", "0", None))
self.updatePosBtn.setText(_translate("Form", "Update", None))
self.fineStepRadio.setText(_translate("Form", "Fine step", None))
self.coarseStepRadio.setText(_translate("Form", "Coarse step", None))
self.limitsGroup.setTitle(_translate("Form", "Limits", None))
self.xMinBtn.setText(_translate("Form", "get Current", None))
self.xMinLabel.setText(_translate("Form", "<--", None))
self.xMaxLabel.setText(_translate("Form", "-->", None))
self.xMaxBtn.setText(_translate("Form", "get Current", None))
self.yMinBtn.setText(_translate("Form", "get Current", None))
self.yMinLabel.setText(_translate("Form", "<--", None))
self.label_3.setText(_translate("Form", "Y", None))
self.yMaxLabel.setText(_translate("Form", "-->", None))
self.yMaxBtn.setText(_translate("Form", "get Current", None))
self.zMinBtn.setText(_translate("Form", "get Current", None))
self.zMinLabel.setText(_translate("Form", "<--", None))
self.label.setText(_translate("Form", "Z", None))
self.zMaxLabel.setText(_translate("Form", "-->", None))
self.zMaxBtn.setText(_translate("Form", "get Current", None))
self.label_4.setText(_translate("Form", "Max Speed", None))
self.label_2.setText(_translate("Form", "X", None))
self.xMinSpin.setSuffix(_translate("Form", " mm", None))
self.yMinSpin.setSuffix(_translate("Form", " mm", None))
self.zMinSpin.setSuffix(_translate("Form", " mm", None))
self.xMaxSpin.setSuffix(_translate("Form", " mm", None))
self.yMaxSpin.setSuffix(_translate("Form", " mm", None))
self.zMaxSpin.setSuffix(_translate("Form", " mm", None))
from acq4.pyqtgraph import SpinBox, JoystickButton
| {
"content_hash": "af5e28940c8b50b72f77d1c5740c52dc",
"timestamp": "",
"source": "github",
"line_count": 320,
"max_line_length": 104,
"avg_line_length": 55.003125,
"alnum_prop": 0.679620476109312,
"repo_name": "hiuwo/acq4",
"id": "c6a0b2b5f897d2587cdac239750dec6bf508306a",
"size": "17859",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "acq4/devices/SutterMP285/devTemplate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "18652"
},
{
"name": "C",
"bytes": "1051646"
},
{
"name": "C++",
"bytes": "636100"
},
{
"name": "CSS",
"bytes": "716"
},
{
"name": "Matlab",
"bytes": "1752"
},
{
"name": "Processing",
"bytes": "13403"
},
{
"name": "Python",
"bytes": "4925976"
},
{
"name": "Shell",
"bytes": "64"
}
],
"symlink_target": ""
} |
from . import AWSObject, AWSProperty, PropsDictType, Tags
from .validators import boolean, double, integer
from .validators.iot import policytypes, validate_json_checker
class AuditCheckConfiguration(AWSProperty):
"""
`AuditCheckConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-accountauditconfiguration-auditcheckconfiguration.html>`__
"""
props: PropsDictType = {
"Enabled": (boolean, False),
}
class AuditCheckConfigurations(AWSProperty):
"""
`AuditCheckConfigurations <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-accountauditconfiguration-auditcheckconfigurations.html>`__
"""
props: PropsDictType = {
"AuthenticatedCognitoRoleOverlyPermissiveCheck": (
AuditCheckConfiguration,
False,
),
"CaCertificateExpiringCheck": (AuditCheckConfiguration, False),
"CaCertificateKeyQualityCheck": (AuditCheckConfiguration, False),
"ConflictingClientIdsCheck": (AuditCheckConfiguration, False),
"DeviceCertificateExpiringCheck": (AuditCheckConfiguration, False),
"DeviceCertificateKeyQualityCheck": (AuditCheckConfiguration, False),
"DeviceCertificateSharedCheck": (AuditCheckConfiguration, False),
"IntermediateCaRevokedForActiveDeviceCertificatesCheck": (
AuditCheckConfiguration,
False,
),
"IotPolicyOverlyPermissiveCheck": (AuditCheckConfiguration, False),
"IotRoleAliasAllowsAccessToUnusedServicesCheck": (
AuditCheckConfiguration,
False,
),
"IotRoleAliasOverlyPermissiveCheck": (AuditCheckConfiguration, False),
"LoggingDisabledCheck": (AuditCheckConfiguration, False),
"RevokedCaCertificateStillActiveCheck": (AuditCheckConfiguration, False),
"RevokedDeviceCertificateStillActiveCheck": (AuditCheckConfiguration, False),
"UnauthenticatedCognitoRoleOverlyPermissiveCheck": (
AuditCheckConfiguration,
False,
),
}
class AuditNotificationTarget(AWSProperty):
"""
`AuditNotificationTarget <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-accountauditconfiguration-auditnotificationtarget.html>`__
"""
props: PropsDictType = {
"Enabled": (boolean, False),
"RoleArn": (str, False),
"TargetArn": (str, False),
}
class AuditNotificationTargetConfigurations(AWSProperty):
"""
`AuditNotificationTargetConfigurations <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-accountauditconfiguration-auditnotificationtargetconfigurations.html>`__
"""
props: PropsDictType = {
"Sns": (AuditNotificationTarget, False),
}
class AccountAuditConfiguration(AWSObject):
"""
`AccountAuditConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-accountauditconfiguration.html>`__
"""
resource_type = "AWS::IoT::AccountAuditConfiguration"
props: PropsDictType = {
"AccountId": (str, True),
"AuditCheckConfigurations": (AuditCheckConfigurations, True),
"AuditNotificationTargetConfigurations": (
AuditNotificationTargetConfigurations,
False,
),
"RoleArn": (str, True),
}
class Authorizer(AWSObject):
"""
`Authorizer <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-authorizer.html>`__
"""
resource_type = "AWS::IoT::Authorizer"
props: PropsDictType = {
"AuthorizerFunctionArn": (str, True),
"AuthorizerName": (str, False),
"EnableCachingForHttp": (boolean, False),
"SigningDisabled": (boolean, False),
"Status": (str, False),
"Tags": (Tags, False),
"TokenKeyName": (str, False),
"TokenSigningPublicKeys": (dict, False),
}
class RegistrationConfig(AWSProperty):
"""
`RegistrationConfig <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-cacertificate-registrationconfig.html>`__
"""
props: PropsDictType = {
"RoleArn": (str, False),
"TemplateBody": (str, False),
"TemplateName": (str, False),
}
class CACertificate(AWSObject):
"""
`CACertificate <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-cacertificate.html>`__
"""
resource_type = "AWS::IoT::CACertificate"
props: PropsDictType = {
"AutoRegistrationStatus": (str, False),
"CACertificatePem": (str, True),
"CertificateMode": (str, False),
"RegistrationConfig": (RegistrationConfig, False),
"RemoveAutoRegistration": (boolean, False),
"Status": (str, True),
"Tags": (Tags, False),
"VerificationCertificatePem": (str, False),
}
class Certificate(AWSObject):
"""
`Certificate <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-certificate.html>`__
"""
resource_type = "AWS::IoT::Certificate"
props: PropsDictType = {
"CACertificatePem": (str, False),
"CertificateMode": (str, False),
"CertificatePem": (str, False),
"CertificateSigningRequest": (str, False),
"Status": (str, True),
}
class CustomMetric(AWSObject):
"""
`CustomMetric <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-custommetric.html>`__
"""
resource_type = "AWS::IoT::CustomMetric"
props: PropsDictType = {
"DisplayName": (str, False),
"MetricName": (str, False),
"MetricType": (str, True),
"Tags": (Tags, False),
}
class Dimension(AWSObject):
"""
`Dimension <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-dimension.html>`__
"""
resource_type = "AWS::IoT::Dimension"
props: PropsDictType = {
"Name": (str, False),
"StringValues": ([str], True),
"Tags": (Tags, False),
"Type": (str, True),
}
class AuthorizerConfig(AWSProperty):
"""
`AuthorizerConfig <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-domainconfiguration-authorizerconfig.html>`__
"""
props: PropsDictType = {
"AllowAuthorizerOverride": (boolean, False),
"DefaultAuthorizerName": (str, False),
}
class DomainConfiguration(AWSObject):
"""
`DomainConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-domainconfiguration.html>`__
"""
resource_type = "AWS::IoT::DomainConfiguration"
props: PropsDictType = {
"AuthorizerConfig": (AuthorizerConfig, False),
"DomainConfigurationName": (str, False),
"DomainConfigurationStatus": (str, False),
"DomainName": (str, False),
"ServerCertificateArns": ([str], False),
"ServiceType": (str, False),
"Tags": (Tags, False),
"ValidationCertificateArn": (str, False),
}
class AggregationType(AWSProperty):
"""
`AggregationType <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-fleetmetric-aggregationtype.html>`__
"""
props: PropsDictType = {
"Name": (str, True),
"Values": ([str], True),
}
class FleetMetric(AWSObject):
"""
`FleetMetric <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-fleetmetric.html>`__
"""
resource_type = "AWS::IoT::FleetMetric"
props: PropsDictType = {
"AggregationField": (str, False),
"AggregationType": (AggregationType, False),
"Description": (str, False),
"IndexName": (str, False),
"MetricName": (str, True),
"Period": (integer, False),
"QueryString": (str, False),
"QueryVersion": (str, False),
"Tags": (Tags, False),
"Unit": (str, False),
}
class JobTemplate(AWSObject):
"""
`JobTemplate <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-jobtemplate.html>`__
"""
resource_type = "AWS::IoT::JobTemplate"
props: PropsDictType = {
"AbortConfig": (validate_json_checker, False),
"Description": (str, True),
"Document": (str, False),
"DocumentSource": (str, False),
"JobArn": (str, False),
"JobExecutionsRolloutConfig": (validate_json_checker, False),
"JobTemplateId": (str, True),
"PresignedUrlConfig": (dict, False),
"Tags": (Tags, False),
"TimeoutConfig": (validate_json_checker, False),
}
class Logging(AWSObject):
"""
`Logging <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-logging.html>`__
"""
resource_type = "AWS::IoT::Logging"
props: PropsDictType = {
"AccountId": (str, True),
"DefaultLogLevel": (str, True),
"RoleArn": (str, True),
}
class AddThingsToThingGroupParams(AWSProperty):
"""
`AddThingsToThingGroupParams <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-mitigationaction-addthingstothinggroupparams.html>`__
"""
props: PropsDictType = {
"OverrideDynamicGroups": (boolean, False),
"ThingGroupNames": ([str], True),
}
class EnableIoTLoggingParams(AWSProperty):
"""
`EnableIoTLoggingParams <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-mitigationaction-enableiotloggingparams.html>`__
"""
props: PropsDictType = {
"LogLevel": (str, True),
"RoleArnForLogging": (str, True),
}
class PublishFindingToSnsParams(AWSProperty):
"""
`PublishFindingToSnsParams <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-mitigationaction-publishfindingtosnsparams.html>`__
"""
props: PropsDictType = {
"TopicArn": (str, True),
}
class ReplaceDefaultPolicyVersionParams(AWSProperty):
"""
`ReplaceDefaultPolicyVersionParams <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-mitigationaction-replacedefaultpolicyversionparams.html>`__
"""
props: PropsDictType = {
"TemplateName": (str, True),
}
class UpdateCACertificateParams(AWSProperty):
"""
`UpdateCACertificateParams <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-mitigationaction-updatecacertificateparams.html>`__
"""
props: PropsDictType = {
"Action": (str, True),
}
class UpdateDeviceCertificateParams(AWSProperty):
"""
`UpdateDeviceCertificateParams <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-mitigationaction-updatedevicecertificateparams.html>`__
"""
props: PropsDictType = {
"Action": (str, True),
}
class ActionParams(AWSProperty):
"""
`ActionParams <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-mitigationaction-actionparams.html>`__
"""
props: PropsDictType = {
"AddThingsToThingGroupParams": (AddThingsToThingGroupParams, False),
"EnableIoTLoggingParams": (EnableIoTLoggingParams, False),
"PublishFindingToSnsParams": (PublishFindingToSnsParams, False),
"ReplaceDefaultPolicyVersionParams": (ReplaceDefaultPolicyVersionParams, False),
"UpdateCACertificateParams": (UpdateCACertificateParams, False),
"UpdateDeviceCertificateParams": (UpdateDeviceCertificateParams, False),
}
class MitigationAction(AWSObject):
"""
`MitigationAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-mitigationaction.html>`__
"""
resource_type = "AWS::IoT::MitigationAction"
props: PropsDictType = {
"ActionName": (str, False),
"ActionParams": (ActionParams, True),
"RoleArn": (str, True),
"Tags": (Tags, False),
}
class Policy(AWSObject):
"""
`Policy <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-policy.html>`__
"""
resource_type = "AWS::IoT::Policy"
props: PropsDictType = {
"PolicyDocument": (policytypes, True),
"PolicyName": (str, False),
}
class PolicyPrincipalAttachment(AWSObject):
"""
`PolicyPrincipalAttachment <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-policyprincipalattachment.html>`__
"""
resource_type = "AWS::IoT::PolicyPrincipalAttachment"
props: PropsDictType = {
"PolicyName": (str, True),
"Principal": (str, True),
}
class ProvisioningHook(AWSProperty):
"""
`ProvisioningHook <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-provisioningtemplate-provisioninghook.html>`__
"""
props: PropsDictType = {
"PayloadVersion": (str, False),
"TargetArn": (str, False),
}
class ProvisioningTemplate(AWSObject):
"""
`ProvisioningTemplate <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-provisioningtemplate.html>`__
"""
resource_type = "AWS::IoT::ProvisioningTemplate"
props: PropsDictType = {
"Description": (str, False),
"Enabled": (boolean, False),
"PreProvisioningHook": (ProvisioningHook, False),
"ProvisioningRoleArn": (str, True),
"Tags": (Tags, False),
"TemplateBody": (str, True),
"TemplateName": (str, False),
"TemplateType": (str, False),
}
class ResourceSpecificLogging(AWSObject):
"""
`ResourceSpecificLogging <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-resourcespecificlogging.html>`__
"""
resource_type = "AWS::IoT::ResourceSpecificLogging"
props: PropsDictType = {
"LogLevel": (str, True),
"TargetName": (str, True),
"TargetType": (str, True),
}
class RoleAlias(AWSObject):
"""
`RoleAlias <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-rolealias.html>`__
"""
resource_type = "AWS::IoT::RoleAlias"
props: PropsDictType = {
"CredentialDurationSeconds": (integer, False),
"RoleAlias": (str, False),
"RoleArn": (str, True),
"Tags": (Tags, False),
}
class ScheduledAudit(AWSObject):
"""
`ScheduledAudit <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-scheduledaudit.html>`__
"""
resource_type = "AWS::IoT::ScheduledAudit"
props: PropsDictType = {
"DayOfMonth": (str, False),
"DayOfWeek": (str, False),
"Frequency": (str, True),
"ScheduledAuditName": (str, False),
"Tags": (Tags, False),
"TargetCheckNames": ([str], True),
}
class AlertTarget(AWSProperty):
"""
`AlertTarget <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-securityprofile-alerttarget.html>`__
"""
props: PropsDictType = {
"AlertTargetArn": (str, True),
"RoleArn": (str, True),
}
class MachineLearningDetectionConfig(AWSProperty):
"""
`MachineLearningDetectionConfig <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-securityprofile-machinelearningdetectionconfig.html>`__
"""
props: PropsDictType = {
"ConfidenceLevel": (str, False),
}
class MetricValue(AWSProperty):
"""
`MetricValue <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-securityprofile-metricvalue.html>`__
"""
props: PropsDictType = {
"Cidrs": ([str], False),
"Count": (str, False),
"Number": (double, False),
"Numbers": ([double], False),
"Ports": ([integer], False),
"Strings": ([str], False),
}
class StatisticalThreshold(AWSProperty):
"""
`StatisticalThreshold <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-securityprofile-statisticalthreshold.html>`__
"""
props: PropsDictType = {
"Statistic": (str, False),
}
class BehaviorCriteria(AWSProperty):
"""
`BehaviorCriteria <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-securityprofile-behaviorcriteria.html>`__
"""
props: PropsDictType = {
"ComparisonOperator": (str, False),
"ConsecutiveDatapointsToAlarm": (integer, False),
"ConsecutiveDatapointsToClear": (integer, False),
"DurationSeconds": (integer, False),
"MlDetectionConfig": (MachineLearningDetectionConfig, False),
"StatisticalThreshold": (StatisticalThreshold, False),
"Value": (MetricValue, False),
}
class MetricDimension(AWSProperty):
"""
`MetricDimension <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-securityprofile-metricdimension.html>`__
"""
props: PropsDictType = {
"DimensionName": (str, True),
"Operator": (str, False),
}
class Behavior(AWSProperty):
"""
`Behavior <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-securityprofile-behavior.html>`__
"""
props: PropsDictType = {
"Criteria": (BehaviorCriteria, False),
"Metric": (str, False),
"MetricDimension": (MetricDimension, False),
"Name": (str, True),
"SuppressAlerts": (boolean, False),
}
class MetricToRetain(AWSProperty):
"""
`MetricToRetain <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-securityprofile-metrictoretain.html>`__
"""
props: PropsDictType = {
"Metric": (str, True),
"MetricDimension": (MetricDimension, False),
}
class SecurityProfile(AWSObject):
"""
`SecurityProfile <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-securityprofile.html>`__
"""
resource_type = "AWS::IoT::SecurityProfile"
props: PropsDictType = {
"AdditionalMetricsToRetainV2": ([MetricToRetain], False),
"AlertTargets": (dict, False),
"Behaviors": ([Behavior], False),
"SecurityProfileDescription": (str, False),
"SecurityProfileName": (str, False),
"Tags": (Tags, False),
"TargetArns": ([str], False),
}
class AttributePayload(AWSProperty):
"""
`AttributePayload <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-thing-attributepayload.html>`__
"""
props: PropsDictType = {
"Attributes": (dict, False),
}
class Thing(AWSObject):
"""
`Thing <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-thing.html>`__
"""
resource_type = "AWS::IoT::Thing"
props: PropsDictType = {
"AttributePayload": (AttributePayload, False),
"ThingName": (str, False),
}
class ThingPrincipalAttachment(AWSObject):
"""
`ThingPrincipalAttachment <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-thingprincipalattachment.html>`__
"""
resource_type = "AWS::IoT::ThingPrincipalAttachment"
props: PropsDictType = {
"Principal": (str, True),
"ThingName": (str, True),
}
class CloudwatchAlarmAction(AWSProperty):
"""
`CloudwatchAlarmAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-cloudwatchalarmaction.html>`__
"""
props: PropsDictType = {
"AlarmName": (str, True),
"RoleArn": (str, True),
"StateReason": (str, True),
"StateValue": (str, True),
}
class CloudwatchLogsAction(AWSProperty):
"""
`CloudwatchLogsAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-cloudwatchlogsaction.html>`__
"""
props: PropsDictType = {
"LogGroupName": (str, True),
"RoleArn": (str, True),
}
class CloudwatchMetricAction(AWSProperty):
"""
`CloudwatchMetricAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-cloudwatchmetricaction.html>`__
"""
props: PropsDictType = {
"MetricName": (str, True),
"MetricNamespace": (str, True),
"MetricTimestamp": (str, False),
"MetricUnit": (str, True),
"MetricValue": (str, True),
"RoleArn": (str, True),
}
class DynamoDBAction(AWSProperty):
"""
`DynamoDBAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-dynamodbaction.html>`__
"""
props: PropsDictType = {
"HashKeyField": (str, True),
"HashKeyType": (str, False),
"HashKeyValue": (str, True),
"PayloadField": (str, False),
"RangeKeyField": (str, False),
"RangeKeyType": (str, False),
"RangeKeyValue": (str, False),
"RoleArn": (str, True),
"TableName": (str, True),
}
class PutItemInput(AWSProperty):
"""
`PutItemInput <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-putiteminput.html>`__
"""
props: PropsDictType = {
"TableName": (str, True),
}
class DynamoDBv2Action(AWSProperty):
"""
`DynamoDBv2Action <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-dynamodbv2action.html>`__
"""
props: PropsDictType = {
"PutItem": (PutItemInput, False),
"RoleArn": (str, False),
}
class ElasticsearchAction(AWSProperty):
"""
`ElasticsearchAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-elasticsearchaction.html>`__
"""
props: PropsDictType = {
"Endpoint": (str, True),
"Id": (str, True),
"Index": (str, True),
"RoleArn": (str, True),
"Type": (str, True),
}
class FirehoseAction(AWSProperty):
"""
`FirehoseAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-firehoseaction.html>`__
"""
props: PropsDictType = {
"BatchMode": (boolean, False),
"DeliveryStreamName": (str, True),
"RoleArn": (str, True),
"Separator": (str, False),
}
class HttpActionHeader(AWSProperty):
"""
`HttpActionHeader <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-httpactionheader.html>`__
"""
props: PropsDictType = {
"Key": (str, True),
"Value": (str, True),
}
class SigV4Authorization(AWSProperty):
"""
`SigV4Authorization <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-sigv4authorization.html>`__
"""
props: PropsDictType = {
"RoleArn": (str, True),
"ServiceName": (str, True),
"SigningRegion": (str, True),
}
class HttpAuthorization(AWSProperty):
"""
`HttpAuthorization <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-httpauthorization.html>`__
"""
props: PropsDictType = {
"Sigv4": (SigV4Authorization, False),
}
class HttpAction(AWSProperty):
"""
`HttpAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-httpaction.html>`__
"""
props: PropsDictType = {
"Auth": (HttpAuthorization, False),
"ConfirmationUrl": (str, False),
"Headers": ([HttpActionHeader], False),
"Url": (str, True),
}
class IotAnalyticsAction(AWSProperty):
"""
`IotAnalyticsAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-iotanalyticsaction.html>`__
"""
props: PropsDictType = {
"BatchMode": (boolean, False),
"ChannelName": (str, True),
"RoleArn": (str, True),
}
class IotEventsAction(AWSProperty):
"""
`IotEventsAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-ioteventsaction.html>`__
"""
props: PropsDictType = {
"BatchMode": (boolean, False),
"InputName": (str, True),
"MessageId": (str, False),
"RoleArn": (str, True),
}
class AssetPropertyTimestamp(AWSProperty):
"""
`AssetPropertyTimestamp <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-assetpropertytimestamp.html>`__
"""
props: PropsDictType = {
"OffsetInNanos": (str, False),
"TimeInSeconds": (str, True),
}
class AssetPropertyVariant(AWSProperty):
"""
`AssetPropertyVariant <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-assetpropertyvariant.html>`__
"""
props: PropsDictType = {
"BooleanValue": (str, False),
"DoubleValue": (str, False),
"IntegerValue": (str, False),
"StringValue": (str, False),
}
class AssetPropertyValue(AWSProperty):
"""
`AssetPropertyValue <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-assetpropertyvalue.html>`__
"""
props: PropsDictType = {
"Quality": (str, False),
"Timestamp": (AssetPropertyTimestamp, True),
"Value": (AssetPropertyVariant, True),
}
class PutAssetPropertyValueEntry(AWSProperty):
"""
`PutAssetPropertyValueEntry <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-putassetpropertyvalueentry.html>`__
"""
props: PropsDictType = {
"AssetId": (str, False),
"EntryId": (str, False),
"PropertyAlias": (str, False),
"PropertyId": (str, False),
"PropertyValues": ([AssetPropertyValue], True),
}
class IotSiteWiseAction(AWSProperty):
"""
`IotSiteWiseAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-iotsitewiseaction.html>`__
"""
props: PropsDictType = {
"PutAssetPropertyValueEntries": ([PutAssetPropertyValueEntry], True),
"RoleArn": (str, True),
}
class KafkaAction(AWSProperty):
"""
`KafkaAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-kafkaaction.html>`__
"""
props: PropsDictType = {
"ClientProperties": (dict, True),
"DestinationArn": (str, True),
"Key": (str, False),
"Partition": (str, False),
"Topic": (str, True),
}
class KinesisAction(AWSProperty):
"""
`KinesisAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-kinesisaction.html>`__
"""
props: PropsDictType = {
"PartitionKey": (str, False),
"RoleArn": (str, True),
"StreamName": (str, True),
}
class LambdaAction(AWSProperty):
"""
`LambdaAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-lambdaaction.html>`__
"""
props: PropsDictType = {
"FunctionArn": (str, False),
}
class Timestamp(AWSProperty):
"""
`Timestamp <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-timestamp.html>`__
"""
props: PropsDictType = {
"Unit": (str, False),
"Value": (str, True),
}
class LocationAction(AWSProperty):
"""
`LocationAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-locationaction.html>`__
"""
props: PropsDictType = {
"DeviceId": (str, True),
"Latitude": (str, True),
"Longitude": (str, True),
"RoleArn": (str, True),
"Timestamp": (Timestamp, False),
"TrackerName": (str, True),
}
class OpenSearchAction(AWSProperty):
"""
`OpenSearchAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-opensearchaction.html>`__
"""
props: PropsDictType = {
"Endpoint": (str, True),
"Id": (str, True),
"Index": (str, True),
"RoleArn": (str, True),
"Type": (str, True),
}
class UserProperty(AWSProperty):
"""
`UserProperty <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-userproperty.html>`__
"""
props: PropsDictType = {
"Key": (str, True),
"Value": (str, True),
}
class RepublishActionHeaders(AWSProperty):
"""
`RepublishActionHeaders <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-republishactionheaders.html>`__
"""
props: PropsDictType = {
"ContentType": (str, False),
"CorrelationData": (str, False),
"MessageExpiry": (str, False),
"PayloadFormatIndicator": (str, False),
"ResponseTopic": (str, False),
"UserProperties": ([UserProperty], False),
}
class RepublishAction(AWSProperty):
"""
`RepublishAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-republishaction.html>`__
"""
props: PropsDictType = {
"Headers": (RepublishActionHeaders, False),
"Qos": (integer, False),
"RoleArn": (str, True),
"Topic": (str, True),
}
class S3Action(AWSProperty):
"""
`S3Action <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-s3action.html>`__
"""
props: PropsDictType = {
"BucketName": (str, True),
"CannedAcl": (str, False),
"Key": (str, True),
"RoleArn": (str, True),
}
class SnsAction(AWSProperty):
"""
`SnsAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-snsaction.html>`__
"""
props: PropsDictType = {
"MessageFormat": (str, False),
"RoleArn": (str, True),
"TargetArn": (str, True),
}
class SqsAction(AWSProperty):
"""
`SqsAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-sqsaction.html>`__
"""
props: PropsDictType = {
"QueueUrl": (str, True),
"RoleArn": (str, True),
"UseBase64": (boolean, False),
}
class StepFunctionsAction(AWSProperty):
"""
`StepFunctionsAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-stepfunctionsaction.html>`__
"""
props: PropsDictType = {
"ExecutionNamePrefix": (str, False),
"RoleArn": (str, True),
"StateMachineName": (str, True),
}
class TimestreamDimension(AWSProperty):
"""
`TimestreamDimension <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-timestreamdimension.html>`__
"""
props: PropsDictType = {
"Name": (str, True),
"Value": (str, True),
}
class TimestreamTimestamp(AWSProperty):
"""
`TimestreamTimestamp <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-timestreamtimestamp.html>`__
"""
props: PropsDictType = {
"Unit": (str, True),
"Value": (str, True),
}
class TimestreamAction(AWSProperty):
"""
`TimestreamAction <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-timestreamaction.html>`__
"""
props: PropsDictType = {
"DatabaseName": (str, True),
"Dimensions": ([TimestreamDimension], True),
"RoleArn": (str, True),
"TableName": (str, True),
"Timestamp": (TimestreamTimestamp, False),
}
class Action(AWSProperty):
"""
`Action <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-action.html>`__
"""
props: PropsDictType = {
"CloudwatchAlarm": (CloudwatchAlarmAction, False),
"CloudwatchLogs": (CloudwatchLogsAction, False),
"CloudwatchMetric": (CloudwatchMetricAction, False),
"DynamoDB": (DynamoDBAction, False),
"DynamoDBv2": (DynamoDBv2Action, False),
"Elasticsearch": (ElasticsearchAction, False),
"Firehose": (FirehoseAction, False),
"Http": (HttpAction, False),
"IotAnalytics": (IotAnalyticsAction, False),
"IotEvents": (IotEventsAction, False),
"IotSiteWise": (IotSiteWiseAction, False),
"Kafka": (KafkaAction, False),
"Kinesis": (KinesisAction, False),
"Lambda": (LambdaAction, False),
"Location": (LocationAction, False),
"OpenSearch": (OpenSearchAction, False),
"Republish": (RepublishAction, False),
"S3": (S3Action, False),
"Sns": (SnsAction, False),
"Sqs": (SqsAction, False),
"StepFunctions": (StepFunctionsAction, False),
"Timestream": (TimestreamAction, False),
}
class TopicRulePayload(AWSProperty):
"""
`TopicRulePayload <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-topicrulepayload.html>`__
"""
props: PropsDictType = {
"Actions": ([Action], True),
"AwsIotSqlVersion": (str, False),
"Description": (str, False),
"ErrorAction": (Action, False),
"RuleDisabled": (boolean, False),
"Sql": (str, True),
}
class TopicRule(AWSObject):
"""
`TopicRule <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-topicrule.html>`__
"""
resource_type = "AWS::IoT::TopicRule"
props: PropsDictType = {
"RuleName": (str, False),
"Tags": (Tags, False),
"TopicRulePayload": (TopicRulePayload, True),
}
class HttpUrlDestinationSummary(AWSProperty):
"""
`HttpUrlDestinationSummary <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicruledestination-httpurldestinationsummary.html>`__
"""
props: PropsDictType = {
"ConfirmationUrl": (str, False),
}
class VpcDestinationProperties(AWSProperty):
"""
`VpcDestinationProperties <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicruledestination-vpcdestinationproperties.html>`__
"""
props: PropsDictType = {
"RoleArn": (str, False),
"SecurityGroups": ([str], False),
"SubnetIds": ([str], False),
"VpcId": (str, False),
}
class TopicRuleDestination(AWSObject):
"""
`TopicRuleDestination <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iot-topicruledestination.html>`__
"""
resource_type = "AWS::IoT::TopicRuleDestination"
props: PropsDictType = {
"HttpUrlProperties": (HttpUrlDestinationSummary, False),
"Status": (str, False),
"VpcProperties": (VpcDestinationProperties, False),
}
class ServerCertificateSummary(AWSProperty):
"""
`ServerCertificateSummary <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-domainconfiguration-servercertificatesummary.html>`__
"""
props: PropsDictType = {
"ServerCertificateArn": (str, False),
"ServerCertificateStatus": (str, False),
"ServerCertificateStatusDetail": (str, False),
}
| {
"content_hash": "c1865bc3949a2aa6c8ae9df46332cf7f",
"timestamp": "",
"source": "github",
"line_count": 1174,
"max_line_length": 197,
"avg_line_length": 30.30494037478705,
"alnum_prop": 0.6482938894822643,
"repo_name": "cloudtools/troposphere",
"id": "21fec73e29d29aad0a6d53bb1e4ed2dce7151457",
"size": "35750",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "troposphere/iot.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "2754"
},
{
"name": "Python",
"bytes": "2305574"
},
{
"name": "Shell",
"bytes": "625"
}
],
"symlink_target": ""
} |
"""
Views for managing Neutron Routers.
"""
from django.core.urlresolvers import reverse_lazy
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tables
from horizon import tabs
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.routers\
import forms as project_forms
from openstack_dashboard.dashboards.project.routers import tables as rtables
from openstack_dashboard.dashboards.project.routers import tabs as rdtabs
class IndexView(tables.DataTableView):
table_class = rtables.RoutersTable
template_name = 'project/routers/index.html'
def _get_routers(self, search_opts=None):
try:
tenant_id = self.request.user.tenant_id
routers = api.neutron.router_list(self.request,
tenant_id=tenant_id,
search_opts=search_opts)
except Exception:
routers = []
exceptions.handle(self.request,
_('Unable to retrieve router list.'))
ext_net_dict = self._list_external_networks()
for r in routers:
r.set_id_as_name_if_empty()
self._set_external_network(r, ext_net_dict)
return routers
def get_data(self):
routers = self._get_routers()
return routers
def _list_external_networks(self):
try:
search_opts = {'router:external': True}
ext_nets = api.neutron.network_list(self.request,
**search_opts)
for ext_net in ext_nets:
ext_net.set_id_as_name_if_empty()
ext_net_dict = SortedDict((n['id'], n.name) for n in ext_nets)
except Exception as e:
msg = _('Unable to retrieve a list of external networks "%s".') % e
exceptions.handle(self.request, msg)
ext_net_dict = {}
return ext_net_dict
def _set_external_network(self, router, ext_net_dict):
gateway_info = router.external_gateway_info
if gateway_info:
ext_net_id = gateway_info['network_id']
if ext_net_id in ext_net_dict:
gateway_info['network'] = ext_net_dict[ext_net_id]
else:
msg = _('External network "%s" not found.') % (ext_net_id)
exceptions.handle(self.request, msg)
class DetailView(tabs.TabbedTableView):
tab_group_class = rdtabs.RouterDetailTabs
template_name = 'project/routers/detail.html'
failure_url = reverse_lazy('horizon:project:routers:index')
@memoized.memoized_method
def _get_data(self):
try:
router_id = self.kwargs['router_id']
router = api.neutron.router_get(self.request, router_id)
router.set_id_as_name_if_empty(length=0)
except Exception:
msg = _('Unable to retrieve details for router "%s".') \
% (router_id)
exceptions.handle(self.request, msg, redirect=self.failure_url)
if router.external_gateway_info:
ext_net_id = router.external_gateway_info['network_id']
try:
ext_net = api.neutron.network_get(self.request, ext_net_id,
expand_subnet=False)
ext_net.set_id_as_name_if_empty(length=0)
router.external_gateway_info['network'] = ext_net.name
except Exception:
msg = _('Unable to retrieve an external network "%s".') \
% (ext_net_id)
exceptions.handle(self.request, msg)
router.external_gateway_info['network'] = ext_net_id
return router
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
context["router"] = self._get_data()
return context
def get(self, request, *args, **kwargs):
router = self._get_data()
self.kwargs['router'] = router
return super(DetailView, self).get(request, *args, **kwargs)
class CreateView(forms.ModalFormView):
form_class = project_forms.CreateForm
template_name = 'project/routers/create.html'
success_url = reverse_lazy("horizon:project:routers:index")
| {
"content_hash": "bbf1466ba65b90a3f3b7788a3a17cc97",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 79,
"avg_line_length": 38.76521739130435,
"alnum_prop": 0.5975773889636609,
"repo_name": "rd37/horizon",
"id": "779020c9b4cd896e61052319d2fcc3e16ec4525f",
"size": "5170",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/project/routers/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "334034"
},
{
"name": "JavaScript",
"bytes": "707335"
},
{
"name": "Python",
"bytes": "3254186"
},
{
"name": "Shell",
"bytes": "15924"
}
],
"symlink_target": ""
} |
from __future__ import division
import unittest2 as unittest
from mock.tests.support import inPy3k
try:
unicode
except NameError:
# Python 3
unicode = str
long = int
import inspect
import sys
import textwrap
from mock import Mock, MagicMock
from mock.mock import _magics
class TestMockingMagicMethods(unittest.TestCase):
def test_deleting_magic_methods(self):
mock = Mock()
self.assertFalse(hasattr(mock, '__getitem__'))
mock.__getitem__ = Mock()
self.assertTrue(hasattr(mock, '__getitem__'))
del mock.__getitem__
self.assertFalse(hasattr(mock, '__getitem__'))
def test_magicmock_del(self):
mock = MagicMock()
# before using getitem
del mock.__getitem__
self.assertRaises(TypeError, lambda: mock['foo'])
mock = MagicMock()
# this time use it first
mock['foo']
del mock.__getitem__
self.assertRaises(TypeError, lambda: mock['foo'])
def test_magic_method_wrapping(self):
mock = Mock()
def f(self, name):
return self, 'fish'
mock.__getitem__ = f
self.assertIsNot(mock.__getitem__, f)
self.assertEqual(mock['foo'], (mock, 'fish'))
self.assertEqual(mock.__getitem__('foo'), (mock, 'fish'))
mock.__getitem__ = mock
self.assertIs(mock.__getitem__, mock)
def test_magic_methods_isolated_between_mocks(self):
mock1 = Mock()
mock2 = Mock()
mock1.__iter__ = Mock(return_value=iter([]))
self.assertEqual(list(mock1), [])
self.assertRaises(TypeError, lambda: list(mock2))
def test_repr(self):
mock = Mock()
self.assertEqual(repr(mock), "<Mock id='%s'>" % id(mock))
mock.__repr__ = lambda s: 'foo'
self.assertEqual(repr(mock), 'foo')
def test_str(self):
mock = Mock()
self.assertEqual(str(mock), object.__str__(mock))
mock.__str__ = lambda s: 'foo'
self.assertEqual(str(mock), 'foo')
@unittest.skipIf(inPy3k, "no unicode in Python 3")
def test_unicode(self):
mock = Mock()
self.assertEqual(unicode(mock), unicode(str(mock)))
mock.__unicode__ = lambda s: unicode('foo')
self.assertEqual(unicode(mock), unicode('foo'))
def test_dict_methods(self):
mock = Mock()
self.assertRaises(TypeError, lambda: mock['foo'])
def _del():
del mock['foo']
def _set():
mock['foo'] = 3
self.assertRaises(TypeError, _del)
self.assertRaises(TypeError, _set)
_dict = {}
def getitem(s, name):
return _dict[name]
def setitem(s, name, value):
_dict[name] = value
def delitem(s, name):
del _dict[name]
mock.__setitem__ = setitem
mock.__getitem__ = getitem
mock.__delitem__ = delitem
self.assertRaises(KeyError, lambda: mock['foo'])
mock['foo'] = 'bar'
self.assertEqual(_dict, {'foo': 'bar'})
self.assertEqual(mock['foo'], 'bar')
del mock['foo']
self.assertEqual(_dict, {})
def test_numeric(self):
original = mock = Mock()
mock.value = 0
self.assertRaises(TypeError, lambda: mock + 3)
def add(self, other):
mock.value += other
return self
mock.__add__ = add
self.assertEqual(mock + 3, mock)
self.assertEqual(mock.value, 3)
del mock.__add__
def iadd(mock):
mock += 3
self.assertRaises(TypeError, iadd, mock)
mock.__iadd__ = add
mock += 6
self.assertEqual(mock, original)
self.assertEqual(mock.value, 9)
self.assertRaises(TypeError, lambda: 3 + mock)
mock.__radd__ = add
self.assertEqual(7 + mock, mock)
self.assertEqual(mock.value, 16)
def test_division(self):
original = mock = Mock()
mock.value = 32
self.assertRaises(TypeError, lambda: mock / 2)
def truediv(self, other):
mock.value /= other
return self
mock.__truediv__ = truediv
self.assertEqual(mock / 2, mock)
self.assertEqual(mock.value, 16)
del mock.__truediv__
if inPy3k:
def itruediv(mock):
mock /= 4
self.assertRaises(TypeError, itruediv, mock)
mock.__itruediv__ = truediv
mock /= 8
self.assertEqual(mock, original)
self.assertEqual(mock.value, 2)
else:
mock.value = 2
self.assertRaises(TypeError, lambda: 8 / mock)
mock.__rtruediv__ = truediv
self.assertEqual(0.5 / mock, mock)
self.assertEqual(mock.value, 4)
def test_hash(self):
mock = Mock()
# test delegation
self.assertEqual(hash(mock), Mock.__hash__(mock))
def _hash(s):
return 3
mock.__hash__ = _hash
self.assertEqual(hash(mock), 3)
def test_nonzero(self):
m = Mock()
self.assertTrue(bool(m))
nonzero = lambda s: False
if not inPy3k:
m.__nonzero__ = nonzero
else:
m.__bool__ = nonzero
self.assertFalse(bool(m))
def test_comparison(self):
mock = Mock()
def comp(s, o):
return True
mock.__lt__ = mock.__gt__ = mock.__le__ = mock.__ge__ = comp
self. assertTrue(mock < 3)
self. assertTrue(mock > 3)
self. assertTrue(mock <= 3)
self. assertTrue(mock >= 3)
if not inPy3k:
# incomparable in Python 3
self.assertEqual(Mock() < 3, object() < 3)
self.assertEqual(Mock() > 3, object() > 3)
self.assertEqual(Mock() <= 3, object() <= 3)
self.assertEqual(Mock() >= 3, object() >= 3)
else:
self.assertRaises(TypeError, lambda: MagicMock() < object())
self.assertRaises(TypeError, lambda: object() < MagicMock())
self.assertRaises(TypeError, lambda: MagicMock() < MagicMock())
self.assertRaises(TypeError, lambda: MagicMock() > object())
self.assertRaises(TypeError, lambda: object() > MagicMock())
self.assertRaises(TypeError, lambda: MagicMock() > MagicMock())
self.assertRaises(TypeError, lambda: MagicMock() <= object())
self.assertRaises(TypeError, lambda: object() <= MagicMock())
self.assertRaises(TypeError, lambda: MagicMock() <= MagicMock())
self.assertRaises(TypeError, lambda: MagicMock() >= object())
self.assertRaises(TypeError, lambda: object() >= MagicMock())
self.assertRaises(TypeError, lambda: MagicMock() >= MagicMock())
def test_equality(self):
for mock in Mock(), MagicMock():
self.assertEqual(mock == mock, True)
self.assertIsInstance(mock == mock, bool)
self.assertEqual(mock != mock, False)
self.assertIsInstance(mock != mock, bool)
self.assertEqual(mock == object(), False)
self.assertEqual(mock != object(), True)
def eq(self, other):
return other == 3
mock.__eq__ = eq
self.assertTrue(mock == 3)
self.assertFalse(mock == 4)
def ne(self, other):
return other == 3
mock.__ne__ = ne
self.assertTrue(mock != 3)
self.assertFalse(mock != 4)
mock = MagicMock()
mock.__eq__.return_value = True
self.assertIsInstance(mock == 3, bool)
self.assertEqual(mock == 3, True)
mock.__ne__.return_value = False
self.assertIsInstance(mock != 3, bool)
self.assertEqual(mock != 3, False)
def test_len_contains_iter(self):
mock = Mock()
self.assertRaises(TypeError, len, mock)
self.assertRaises(TypeError, iter, mock)
self.assertRaises(TypeError, lambda: 'foo' in mock)
mock.__len__ = lambda s: 6
self.assertEqual(len(mock), 6)
mock.__contains__ = lambda s, o: o == 3
self.assertIn(3, mock)
self.assertNotIn(6, mock)
mock.__iter__ = lambda s: iter('foobarbaz')
self.assertEqual(list(mock), list('foobarbaz'))
def test_magicmock(self):
mock = MagicMock()
mock.__iter__.return_value = iter([1, 2, 3])
self.assertEqual(list(mock), [1, 2, 3])
name = '__nonzero__'
other = '__bool__'
if inPy3k:
name, other = other, name
getattr(mock, name).return_value = False
self.assertFalse(hasattr(mock, other))
self.assertFalse(bool(mock))
for entry in _magics:
self.assertTrue(hasattr(mock, entry))
self.assertFalse(hasattr(mock, '__imaginery__'))
def test_magic_mock_equality(self):
mock = MagicMock()
self.assertIsInstance(mock == object(), bool)
self.assertIsInstance(mock != object(), bool)
self.assertEqual(mock == object(), False)
self.assertEqual(mock != object(), True)
self.assertEqual(mock == mock, True)
self.assertEqual(mock != mock, False)
def test_magicmock_defaults(self):
mock = MagicMock()
self.assertEqual(int(mock), 1)
self.assertEqual(complex(mock), 1j)
self.assertEqual(float(mock), 1.0)
self.assertEqual(long(mock), long(1))
self.assertNotIn(object(), mock)
self.assertEqual(len(mock), 0)
self.assertEqual(list(mock), [])
self.assertEqual(hash(mock), object.__hash__(mock))
self.assertEqual(str(mock), object.__str__(mock))
self.assertEqual(unicode(mock), object.__str__(mock))
self.assertIsInstance(unicode(mock), unicode)
self.assertTrue(bool(mock))
if not inPy3k:
self.assertEqual(oct(mock), '1')
else:
# in Python 3 oct and hex use __index__
# so these tests are for __index__ in py3k
self.assertEqual(oct(mock), '0o1')
self.assertEqual(hex(mock), '0x1')
# how to test __sizeof__ ?
@unittest.skipIf(inPy3k, "no __cmp__ in Python 3")
def test_non_default_magic_methods(self):
mock = MagicMock()
self.assertRaises(AttributeError, lambda: mock.__cmp__)
mock = Mock()
mock.__cmp__ = lambda s, o: 0
self.assertEqual(mock, object())
def test_magic_methods_and_spec(self):
class Iterable(object):
def __iter__(self):
pass
mock = Mock(spec=Iterable)
self.assertRaises(AttributeError, lambda: mock.__iter__)
mock.__iter__ = Mock(return_value=iter([]))
self.assertEqual(list(mock), [])
class NonIterable(object):
pass
mock = Mock(spec=NonIterable)
self.assertRaises(AttributeError, lambda: mock.__iter__)
def set_int():
mock.__int__ = Mock(return_value=iter([]))
self.assertRaises(AttributeError, set_int)
mock = MagicMock(spec=Iterable)
self.assertEqual(list(mock), [])
self.assertRaises(AttributeError, set_int)
def test_magic_methods_and_spec_set(self):
class Iterable(object):
def __iter__(self):
pass
mock = Mock(spec_set=Iterable)
self.assertRaises(AttributeError, lambda: mock.__iter__)
mock.__iter__ = Mock(return_value=iter([]))
self.assertEqual(list(mock), [])
class NonIterable(object):
pass
mock = Mock(spec_set=NonIterable)
self.assertRaises(AttributeError, lambda: mock.__iter__)
def set_int():
mock.__int__ = Mock(return_value=iter([]))
self.assertRaises(AttributeError, set_int)
mock = MagicMock(spec_set=Iterable)
self.assertEqual(list(mock), [])
self.assertRaises(AttributeError, set_int)
def test_setting_unsupported_magic_method(self):
mock = MagicMock()
def set_setattr():
mock.__setattr__ = lambda self, name: None
self.assertRaisesRegex(AttributeError,
"Attempting to set unsupported magic method '__setattr__'.",
set_setattr
)
def test_attributes_and_return_value(self):
mock = MagicMock()
attr = mock.foo
def _get_type(obj):
# the type of every mock (or magicmock) is a custom subclass
# so the real type is the second in the mro
return type(obj).__mro__[1]
self.assertEqual(_get_type(attr), MagicMock)
returned = mock()
self.assertEqual(_get_type(returned), MagicMock)
def test_magic_methods_are_magic_mocks(self):
mock = MagicMock()
self.assertIsInstance(mock.__getitem__, MagicMock)
mock[1][2].__getitem__.return_value = 3
self.assertEqual(mock[1][2][3], 3)
def test_magic_method_reset_mock(self):
mock = MagicMock()
str(mock)
self.assertTrue(mock.__str__.called)
mock.reset_mock()
self.assertFalse(mock.__str__.called)
def test_dir(self):
# overriding the default implementation
for mock in Mock(), MagicMock():
def _dir(self):
return ['foo']
mock.__dir__ = _dir
self.assertEqual(dir(mock), ['foo'])
@unittest.skipIf('PyPy' in sys.version, "This fails differently on pypy")
def test_bound_methods(self):
m = Mock()
# XXXX should this be an expected failure instead?
# this seems like it should work, but is hard to do without introducing
# other api inconsistencies. Failure message could be better though.
m.__iter__ = [3].__iter__
self.assertRaises(TypeError, iter, m)
def test_magic_method_type(self):
class Foo(MagicMock):
pass
foo = Foo()
self.assertIsInstance(foo.__int__, Foo)
def test_descriptor_from_class(self):
m = MagicMock()
type(m).__str__.return_value = 'foo'
self.assertEqual(str(m), 'foo')
def test_iterable_as_iter_return_value(self):
m = MagicMock()
m.__iter__.return_value = [1, 2, 3]
self.assertEqual(list(m), [1, 2, 3])
self.assertEqual(list(m), [1, 2, 3])
m.__iter__.return_value = iter([4, 5, 6])
self.assertEqual(list(m), [4, 5, 6])
self.assertEqual(list(m), [])
@unittest.skipIf(sys.version_info < (3, 5), "@ added in Python 3.5")
def test_matmul(self):
src = textwrap.dedent("""\
m = MagicMock()
self.assertIsInstance(m @ 1, MagicMock)
m.__matmul__.return_value = 42
m.__rmatmul__.return_value = 666
m.__imatmul__.return_value = 24
self.assertEqual(m @ 1, 42)
self.assertEqual(1 @ m, 666)
m @= 24
self.assertEqual(m, 24)
""")
exec(src)
def test_divmod_and_rdivmod(self):
m = MagicMock()
self.assertIsInstance(divmod(5, m), MagicMock)
m.__divmod__.return_value = (2, 1)
self.assertEqual(divmod(m, 2), (2, 1))
m = MagicMock()
foo = divmod(2, m)
self.assertIsInstance(foo, MagicMock)
foo_direct = m.__divmod__(2)
self.assertIsInstance(foo_direct, MagicMock)
bar = divmod(m, 2)
self.assertIsInstance(bar, MagicMock)
bar_direct = m.__rdivmod__(2)
self.assertIsInstance(bar_direct, MagicMock)
# http://bugs.python.org/issue23310
# Check if you can change behaviour of magic methds in MagicMock init
def test_magic_in_initialization(self):
m = MagicMock(**{'__str__.return_value': "12"})
self.assertEqual(str(m), "12")
def test_changing_magic_set_in_initialization(self):
m = MagicMock(**{'__str__.return_value': "12"})
m.__str__.return_value = "13"
self.assertEqual(str(m), "13")
m = MagicMock(**{'__str__.return_value': "12"})
m.configure_mock(**{'__str__.return_value': "14"})
self.assertEqual(str(m), "14")
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "01f7c6c1dc0396e5dfe902a3c2d1ae9e",
"timestamp": "",
"source": "github",
"line_count": 529,
"max_line_length": 79,
"avg_line_length": 30.744801512287335,
"alnum_prop": 0.5576119035907526,
"repo_name": "pypingou/mock",
"id": "5e39770a008a6e06921c3e38049160380f4d065e",
"size": "16411",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "mock/tests/testmagicmethods.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "249450"
}
],
"symlink_target": ""
} |
from tempest.services.network import resources as n_resources
DELETABLE_CLASS_DEF = """class %(cls_name)s(n_resources.%(cls_name)s):
pass
"""
IGNORE_LIST = ['DeletableSubnet', 'DeletableRouter']
# inhere Deletable<Class> from parent module
for cls_name in [x for x in dir(n_resources)
if x.startswith('Deletable') and x not in IGNORE_LIST]:
class_def = DELETABLE_CLASS_DEF % dict(cls_name=cls_name)
exec class_def
# Add/mod methods so we can use it while sustain original functions.
MSG_BLOCK_BY_ADMIN = "Block %s as router might be owned by ADMIN. " \
"Use DeletableRouter instead."
class DeletableSubnet(n_resources.DeletableSubnet):
def __init__(self, *args, **kwargs):
super(DeletableSubnet, self).__init__(*args, **kwargs)
def add_to_router(self, router_id):
raise Exception(MSG_BLOCK_BY_ADMIN % "add_to_router()")
def delete_from_router(self, router_id):
raise Exception(MSG_BLOCK_BY_ADMIN % "delete_from_router()")
# DeletableSubnet should not deal with router which when owned by ADMIN
# will raise privilege issue. Always let the router deals with interfaces.
class DeletableRouter(n_resources.DeletableRouter):
def __init__(self, *args, **kwargs):
super(DeletableRouter, self).__init__(*args, **kwargs)
self._subnets = set()
def set_gateway(self, network_id):
return self.client.update_router(
self.id,
external_gateway_info=dict(network_id=network_id))
def unset_gateway(self):
return self.client.update_router(
self.id,
external_gateway_info=dict())
def add_subnet(self, subnet):
return self.add_interface(subnet)
def add_interface(self, subnet):
# should not let subnet add interface to router as
# the router might be crated by admin.
"""
self.client.add_router_interface_with_subnbet_id(
self.id, subnet_id=subnet.id)
"""
x_method(self.client, 'add_router_interface_with_subnet_id',
self.id, subnet_id=subnet.id)
self._subnets.add(subnet)
def delete_subnet(self, subnet):
return self.delete_interface(subnet)
def delete_interface(self, subnet):
"""
self.client.remove_router_interface_with_subnet_id(
self.id, subnet_id=subnet.id)
"""
x_method(self.client, 'remove_router_interface_with_subnet_id',
self.id, subnet_id=subnet.id)
try:
self._subnets.remove(subnet)
except Exception:
pass
def update_extra_routes(self, nexthop, destination):
return self.client.update_extra_routes(self.id, nexthop, destination)
# to-be-fixed by https://bugs.launchpad.net/tempest/+bug/1468600
def update_extra_routes_future(self, routes):
return self.client.update_extra_routes(self.id, routes)
def delete_extra_routes(self):
return self.client.delete_extra_routes(self.id)
def delete(self):
try:
self.delete_extra_routes()
except Exception:
pass
self.unset_gateway()
for subnet in self._subnets.copy():
self.delete_interface(subnet)
super(DeletableRouter, self).delete()
# Workaround solution
def x_method(target_obj, method_name, *args, **kwargs):
_method = getattr(target_obj, method_name, None)
if _method is None:
raise Exception("Method[%s] is not defined at instance[%s]" %
method_name, str(target_obj))
results = _method(*args, **kwargs)
return results
| {
"content_hash": "6ac8ebc4798d9880740f58a4625abd8c",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 77,
"avg_line_length": 33.75925925925926,
"alnum_prop": 0.6341195831047723,
"repo_name": "gravity-tak/vmware-nsx-tempest",
"id": "1bf6c46d8dfb5841de88db1400b31079019dc67f",
"size": "4414",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vmware_nsx_tempest/tests/nsxv/scenario/net_resources.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "232235"
}
],
"symlink_target": ""
} |
from __future__ import division
import numpy as np
import field_info as field
import pickle
# from evaluation.potentialfield_generated_plot import cleanup_nan_values
from action import Category
import Simulation as Sim
import action as a
from naoth.math import *
import copy
""" General Functions """
gen_field = []
nx = []
ny = []
def gaussian(x, y, mu_x, mu_y, sigma_x, sigma_y):
fac_x = ((x - mu_x)/sigma_x)**2
fac_y = ((y - mu_y)/sigma_y)**2
return np.exp(-0.5 * (fac_x + fac_y))
def slope(x, y, slope_x, slope_y):
return slope_x * x + slope_y * y
""" Current Potentialfield"""
def evaluate_action(results, state):
sum_potential = 0.0
number_of_actions = 0.0
for p in results.positions():
if p.cat() == Category.INFIELD or p.cat() == Category.OPPGOAL:
sum_potential += evaluate_single_pos(state.pose * p.pos())
number_of_actions += 1
assert number_of_actions > 0
sum_potential /= number_of_actions
return sum_potential
def evaluate_single_pos(ball_pos): # evaluates the potential field at a x,y position
# gets called by evaluate_action2
sigma_x = field.x_opponent_goal / 2.0
sigma_y = field.y_left_sideline / 2.5
f = slope(ball_pos.x, ball_pos.y, -1.0 / field.x_opponent_goal, 0.0)\
- gaussian(ball_pos.x, ball_pos.y, field.x_opponent_goal, 0.0, sigma_x, sigma_y)\
+ gaussian(ball_pos.x, ball_pos.y, field.x_own_goal, 0.0, 1.5 * sigma_x, sigma_y)
return f
""" Potentialfield with other robots """
def evaluate_action_with_robots(results, state):
pf_normal_value = evaluate_action(results, state)
sum_potential = 0.0
number_of_actions = 0.0
for p in results.positions():
if p.cat() == Category.INFIELD or p.cat() == Category.OPPGOAL:
sum_potential += evaluate_single_pos_with_robots(state.pose * p.pos(), state.opp_robots, state.own_robots)
number_of_actions += 1
assert number_of_actions > 0
sum_potential /= number_of_actions
squared_difference = np.abs(pf_normal_value - sum_potential) # decide whether considering other robots is appropriate
if squared_difference < 0.05: # upper bound - how to choose?
return pf_normal_value
# ab hier experimental
# new Ball pos with passing
new_ball_pos = results.expected_ball_pos_mean
#update state
new_state = copy.copy(state)
new_state.ball_position = Vector2(0.0, 0.0) # Ball = Robot
new_state.translation = new_ball_pos
#new_state.rotation = pass # maybe rotation as needed for direkt / shortest path to the ball
new_state.potential_field_function = "normal"
# Simulation as in Simulation.py (decide_smart)
actions_consequences = []
# Simulate Consequences
for action in new_state.actions: #new_state.actions includes the possible kicks
single_consequence = a.ActionResults([])
actions_consequences.append(Sim.simulate_consequences(action, single_consequence, new_state, 30))
best_action_with_team = Sim.decide_smart(actions_consequences, state)
# compare new best action
# TODO: Add simulation of a second kick without other robots to have better comparison
# needed: action which would have been done without other robots
# what should get returned?? to compare with other actions
"""
if best_value > pf_normal_value:
return sum_potential
"""
return sum_potential
def evaluate_single_pos_with_robots(ball_pos, opp_robots, own_robots):
# gets called by evaluate_action_with_robots
sigma_x = field.x_opponent_goal / 2.0
sigma_y = field.y_left_sideline / 2.5
f = slope(ball_pos.x, ball_pos.y, -1.0 / field.x_opponent_goal, 0.0) \
- gaussian(ball_pos.x, ball_pos.y, field.x_opponent_goal, 0.0, sigma_x, sigma_y) \
+ gaussian(ball_pos.x, ball_pos.y, field.x_own_goal, 0.0, 1.5 * sigma_x, sigma_y)
for opp_rob in opp_robots: # adding influence of own and opponent robots into the field
f += robot_field_opp(opp_rob, ball_pos)
"""
# this makes the influence region uniform - it might not be what we want
f_rob = robot_field_opp(opp_rob, ball_pos)
if f_rob != 0:
if f_rob + f <= 1:
f = f_rob
else:
f += f_rob
"""
for own_rob in own_robots:
f -= robot_field_own(own_rob, ball_pos)
"""
f_rob = robot_field_own(own_rob, ball_pos)
if f_rob != 0:
if f - f_rob <= -0.5:
f -= f_rob
else:
f = -f_rob
"""
return f
def robot_field_own(robot_pos, ball_pos):
# gets called by evaluate_single_pos_with_robots
vel_rot = 60 # grad pro second
vel_walk = 200 # mm pro second
# ball_pos = robot_pos / ball_pos # somehow the operator is not recognized here,
# so I copied the implementation of the transformation here
# ball_pos = (ball_pos - robot_pos.translation).rotate(-robot_pos.rotation)
ball_pos = (ball_pos - robot_pos)
# evaluation function
angle = np.degrees(np.arctan2(ball_pos.y, ball_pos.x))
rot_time = np.abs(angle / vel_rot)
distance = np.hypot(ball_pos.x, ball_pos.y)
distance_time = distance / vel_walk
total_time = distance_time + rot_time
if total_time >= 5:
total_time = 5
total_time /= 5.
total_time = 1 - total_time
return total_time
def robot_field_opp(robot_pos, ball_pos):
# gets called by evaluate_single_pos_with_robots
vel_rot = 60 # grad pro second
vel_walk = 200 # mm pro second
# ball_pos = robot_pos / ball_pos # somehow the operator is not recognized here,
# so I copied the implementation of the transformation here
# ball_pos = (ball_pos - robot_pos.translation).rotate(-robot_pos.rotation)
ball_pos = (ball_pos - robot_pos)
# evaluation function
angle = np.degrees(np.arctan2(ball_pos.y, ball_pos.x))
rot_time = np.abs(angle / vel_rot)
distance = np.hypot(ball_pos.x, ball_pos.y)
distance_time = distance / vel_walk
total_time = distance_time + rot_time
if total_time >= 5:
total_time = 5
total_time /= 5.
total_time = 1 - total_time
return total_time
""" Generated Potentialfield """
"""
def evaluate_action_gen_field(results, state):
sum_potential = 0.0
number_of_actions = 0.0
for p in results.positions():
if p.cat() == Category.INFIELD or p.cat() == Category.OPPGOAL:
sum_potential += evaluate_single_pos(state.pose * p.pos())
number_of_actions += 1
assert number_of_actions > 0
sum_potential /= number_of_actions
return sum_potential
def evaluate_single_pos_gen_field(ball_pos):
# TODO round ball_pos.x and ball_pos.y to the nearest position int the lookup table
ball_pos.x = int(round(x / 200.0) * 200.0)
ball_pos.y = int(round(x / 200.0) * 200.0)
f = gen_field[ny[ball_pos.y], nx[ball_pos.x]]
return f
"""
if __name__ == "__main__":
"""
data_prefix = "D:/RoboCup/Paper-Repos/Bachelor-Schlotter/data/"
gen_field = pickle.load(open(str(data_prefix) + "potential_field_generation/potential_field_gen_own1.pickle", "rb"))
gen_field = cleanup_nan_values(gen_field)
# create a structure for the scalar field
nx = {}
ny = {}
for pos in gen_field:
x, y, time, angle = pos
nx[x] = x
ny[y] = y
nxi = np.array(sorted(nx.keys()))
nyi = np.array(sorted(ny.keys()))
for i, v in enumerate(nxi):
nx[v] = i
for i, v in enumerate(nyi):
ny[v] = i
f = np.zeros((len(ny), len(nx)))
g = np.zeros((len(ny), len(nx)))
# create the scalar fields
for position in gen_field:
x, y, time, _ = position
f[ny[y], nx[x]] = time
g[ny[y], nx[-x]] = time
gen_field = f-g
"""
| {
"content_hash": "097885075c5ab6a9811a46f04789a9ee",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 121,
"avg_line_length": 30.011406844106464,
"alnum_prop": 0.6219434942353984,
"repo_name": "BerlinUnited/NaoTH",
"id": "ebc74762ced8c5073c43f0b704f28b44f033e44c",
"size": "7893",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "Utils/py/ActionSelection/experimental/QLearning/Linear_function_approximation/tools/potential_field.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "319"
},
{
"name": "C",
"bytes": "16295"
},
{
"name": "C++",
"bytes": "3831321"
},
{
"name": "CSS",
"bytes": "8839"
},
{
"name": "HTML",
"bytes": "21148"
},
{
"name": "Java",
"bytes": "1816793"
},
{
"name": "Jupyter Notebook",
"bytes": "8092"
},
{
"name": "Lua",
"bytes": "73794"
},
{
"name": "MATLAB",
"bytes": "141780"
},
{
"name": "Python",
"bytes": "1337382"
},
{
"name": "Shell",
"bytes": "60599"
}
],
"symlink_target": ""
} |
import tensorflow as tf
# Create input data with three samples from a normal distribution with mean 0 and stddev 1
input_data = tf.random_normal([3], mean=0, stddev=1)
# Create a queue that holds three elements
q = tf.FIFOQueue(3, tf.float32)
# Fill the queue with the data
input_data = tf.Print(input_data, data=[input_data], message="Raw inputs data generated:", summarize=3)
init = q.enqueue_many(input_data)
# To leverage multi-threading we create a "QueueRunner"
# that will handle the "enqueue_op" outside of the main thread
# We don't need much parallelism here, so we will use only 1 thread
numberOfThreads = 1
qr = tf.train.QueueRunner(q, [init] * numberOfThreads)
# Don't forget to add your "QueueRunner" to the QUEUE_RUNNERS collection
tf.train.add_queue_runner(qr)
# Dequeue op is used to get the next elements in the queue
x = q.dequeue()
# Each time we use the input tensor, we print the number of elements left in the queue
x = tf.Print(x, data=[q.size(), x], message="Nb elements left:")
# Iterate through the queue
y = x + 1
with tf.Session() as sess:
# But now we build our coordinator to coordinate our child threads with
# the main thread
coord = tf.train.Coordinator()
# Beware, if you don't start all your queues before runnig anything
# The main threads will wait for them to start and you will hang again
# This helper start all queues in tf.GraphKeys.QUEUE_RUNNERS
threads = tf.train.start_queue_runners(coord=coord)
# Start the queue until it's full
sess.run(y)
sess.run(y)
sess.run(y)
# Queue is full
sess.run(y)
# We request our child threads to stop ...
coord.request_stop()
# ... and we wait for them to do so before releasing the main thread
coord.join(threads)
| {
"content_hash": "2126967ed4cd487f41f1fa93cf578779",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 103,
"avg_line_length": 36.142857142857146,
"alnum_prop": 0.7148503670242801,
"repo_name": "datitran/Krimskrams",
"id": "f4ef9ed90b4269ceb1aea6da1a1315124748f786",
"size": "1959",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TensorFlow/threading_queues.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2764166"
},
{
"name": "Python",
"bytes": "36601"
}
],
"symlink_target": ""
} |
from argparse import ArgumentParser
from logging import getLogger
from os import getenv
from crosscompute.exceptions import (
CrossComputeError)
from crosscompute.routines.automation import (
DiskAutomation)
from crosscompute.routines.log import (
configure_argument_parser_for_logging,
configure_logging_from)
from crosscompute.scripts.configure import (
configure_argument_parser_for_configuring)
def do(arguments=None):
a = ArgumentParser()
configure_argument_parser_for_logging(a)
configure_argument_parser_for_configuring(a)
configure_argument_parser_for_running(a)
args = a.parse_args(arguments)
try:
configure_logging_from(args)
configure_running_from(args)
automation = DiskAutomation.load(args.path_or_folder)
except CrossComputeError as e:
L.error(e)
return
run_with(automation, args)
def configure_argument_parser_for_running(a):
a.add_argument(
'--no-rebuild', dest='with_rebuild', action='store_false',
help='do not rebuild container images')
def configure_running_from(args):
port = getattr(args, 'port', None)
origin_uri = getenv('CROSSCOMPUTE_ORIGIN_URI') or (
f'http://localhost:{port}' if port else 'http://localhost')
args.environment = {
'CROSSCOMPUTE_ORIGIN_URI': origin_uri,
}
def run_with(automation, args):
return run(automation, args.environment, args.with_rebuild)
def run(
automation,
environment,
with_rebuild=True):
try:
automation.run(environment, with_rebuild)
except CrossComputeError as e:
L.error(e)
except KeyboardInterrupt:
pass
L = getLogger(__name__)
if __name__ == '__main__':
do()
| {
"content_hash": "f52cfb5956444ccdc0c63353c304c141",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 67,
"avg_line_length": 26.08955223880597,
"alnum_prop": 0.6807780320366132,
"repo_name": "crosscompute/crosscompute",
"id": "83d87f38bed9acea9204090cdea9e786c6f09fc3",
"size": "1748",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "crosscompute/scripts/run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "227"
},
{
"name": "HTML",
"bytes": "1017"
},
{
"name": "JavaScript",
"bytes": "5264"
},
{
"name": "Jinja",
"bytes": "4487"
},
{
"name": "Python",
"bytes": "270455"
},
{
"name": "Shell",
"bytes": "67"
}
],
"symlink_target": ""
} |
import discord
import asyncio
import permissions
import commands
from datetime import datetime, date, time
import tzlocal
import random
client = None
nextDelay = random.randrange(10, 60)
tz=tzlocal.get_localzone()
@commands.registerEventHandler(triggerType="\\timeTick", name="dayget")
async def checkDay():
if time(minute = 1, second = 10) < datetime.now().time():
return
if datetime.now().time() < time(second = nextDelay):
return
midnight = datetime.combine(date.today(), time.min.replace(tzinfo=tz)) #Get an offset aware datetime representing the previous midnight
channel = client.get_channel(102981131074297856) #IAA genearl
async for m in channel.history(limit=2):
if tz.localize(m.created_at) > midnight:
return
await channel.send( "New Day Get")
#if discord.utils.find(lambda m: m.timestamp > midnight, list(channel.history(limit=2)) is None:
#await channel.send( "New Day Get") | {
"content_hash": "aa6d6b601f44484d6ea7acd748502619",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 139,
"avg_line_length": 31.9375,
"alnum_prop": 0.6673189823874756,
"repo_name": "Azimath/discordbot",
"id": "3a4143ea9ca838a21a0b9a3fddb3cb330b2f5b6a",
"size": "1022",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/daygetter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "12992"
},
{
"name": "Python",
"bytes": "102175"
},
{
"name": "Shell",
"bytes": "75"
}
],
"symlink_target": ""
} |
"""Methods for gathering statistics on the unit test coverage of a library.
"""
def testability(parser, module):
"""Returns a dictionary of testability scores, indexed by executable
name with value {"score", "dscore", "pscore"}.
"""
execs = _list_executables(parser, module)
result = {}
for xname, xinst in execs.items():
pscore, dscore, total = _testability(xinst)
if pscore is None:
continue
result[xinst.full_name] = {"score": total, "pscore": pscore, "dscore": dscore}
return result
def _testability(xinst, xall=None):
"""Returns a testability score for the specified executable instance.
This takes into account the depth of the dependency chains, number of
parameters and the test coverage of the dependency calls.
:arg xall: when True, scores are even returned for executables that have
unit tests defined; otherwise, only non-tested ones are scored.
"""
analysis = _analyze(xinst)
if (xall is None and ((analysis["ntests"] > 0 and analysis["ncases"] > 0)
or analysis["skip"])):
return (None, None, None)
#Only the input parameters are relevant for the testability score.
inpars = [n for n, p in analysis["params"].items() if "in" in p["direction"]]
#For the dependencies, we are interested in how many there are and
#which of those have good coverage with unit tests already.
covered = []
uncovered = []
for kdep, vdepl in xinst.dependencies.items():
for vdep in vdepl:
if isinstance(vdep, str):
continue
if vdep.target is not None:
danal = _analyze(vdep.target)
if (danal["ntests"] > 0 and danal["ncases"] > 0):
#This dependency has unit tests defined for it already, so
#the hierarchy is preserved if the calling method is tested.
covered.append(vdep)
else:
iparams = [a for (a, p) in zip(vdep.argnames, vdep.target.ordered_parameters)
if "in" in p.direction]
uncovered.extend([p for p in iparams if p not in uncovered and p not in inpars])
#Now we assign scores to the number of input parameters and dependencies.
from math import exp
#For dependencies, the covered ones offer no *penalty* to running the tests
#whereas the uncovered ones do. Also, if the covered ones use any of the input
#parameters that the calling method uses, there is a high likelihood that the
#data for those parameters already exists. For uncovered dependencies, we only
#care about them if they have input parameters.
dscore = -(1. - exp(-len(uncovered)/5.))
overlap = []
for cdep in covered:
overlap.extend([a for a in cdep.argnames if a in inpars and a not in overlap])
#If there are no input parameters, then this score will be 1.0; as more
#input parameters are added, it becomes more difficult to test.
pscore = exp(-(len(inpars) - len(overlap))/5.)
return pscore, dscore, pscore + dscore
def _list_executables(parser, module):
"""Returns the dictionary of executables for the specified module *name*
in the code parser.
"""
mkey = module.lower()
if mkey not in parser.modules:
parser.load_dependency(mkey, True, True)
if mkey in parser.modules:
return parser.modules[mkey].executables
else:
raise ValueError("The code parser cannot locate module '{}'".format(mkey) +
" for analyzing unit testing statistics.")
def summary(parser, module):
"""Returns a dictionary and text summarizing the coverage of the
specified module.
:arg parser: the fortpy.code.CodeParser instance.
:arg module: the name of the module to return a summary for.
"""
execs = _list_executables(parser, module)
result = {}
for xname, xinst in execs.items():
analysis = _analyze(xinst)
result[xinst.full_name] = (analysis, _describe(xinst, analysis))
return result
def _analyze(anexec):
"""Analyzes the specified executable instance to get unit testing
coverage statistics.
"""
#Examine the test_group instance to see if we have any testable
#instances. If we do, take a look at the cases to determine coverage.
#Setup a defaults dictionary with the stats fields we may find
#information about.
result = {
"ntests": 0,
"tests": {},
"ncases": 0,
"summary": False,
"params": {},
"nparams": 0,
"skip": False
}
"""Dictionary of results for analysis of the unit test coverage for
this executable. Keys:
ntests: number of unit tests defined for the executable.
tests: dictionary keyed by unit test identifier; describes the details
of coverage for each unit test (cases, etc.).
ncases: the total number of test cases across all tests.
summary: true if the docstring summary exists.
params: dictionary of summary, regularity and constraint data for each
parameter in the executable.
nparams: the number of parameters with summary tags.
"""
if anexec.test_group is not None:
tg = anexec.test_group
for tname, tinst in tg.tests.items():
result["tests"][tname] = {
#The 1 for no cases listed is for the default case that exists whenever
#a test is set to run checks on model outputs
"cases": 1 if tinst.cases is None else len(tinst.cases),
"inputs": len(tinst.inputs),
"outputs": len(tinst.outputs),
"assignments": len(tinst.methods),
"globals": len(tinst.variables),
"targets": len(tinst.targets),
"enabled": tinst.runchecks and tinst.execute,
"executes": tinst.execute,
"timed": tinst.timed
}
result["ncases"] += result["tests"][tname]["cases"] if tinst.runchecks else 0
result["ntests"] += 1 if result["tests"][tname]["enabled"] else 0
#<skip enabled="true">
result["skip"] = any([d.xml.tag == "skip" and "enabled" in d.xml.attrib
and d.xml.attrib["enabled"].lower() == "true"
for d in anexec.docstring])
result["summary"] = anexec.summary != "No summary for element."
result["nparams"] = 0
result["nparams.in"] = 0
for pinst in anexec.ordered_parameters:
hassum = pinst.summary != "No summary for element.",
result["params"][pinst.name] = {
"summary": hassum,
"regular": any(["regular" in d.xml.attrib and d.xml.attrib["regular"].lower() == "true"
for d in pinst.docstring]),
"bounded": any(["range" in d.xml.attrib for d in pinst.docstring]),
"direction": pinst.direction
}
if hassum:
result["nparams"] += 1
if "in" in pinst.direction:
result["nparams.in"] += 1
pgood = result["nparams"] == len(anexec.parameters)
if result["summary"]:
if pgood:
docs = "GOOD"
else:
docs = "OK"
else:
docs = "NO"
result["docsum"] = docs
return result
fmtstr = "{0:<40s} | {1:^5d} | {2:^9.2f} | {3:^4s} |"
headstr = "{0:<40s} | {1:^5s} | {2:^9s} | {3:^4s} |"
dheader = headstr.format(*("Executable Identifier", "Tests", "Case/Test", "Docs"))
def _describe(anexec, analysis):
"""Returns a text description of the analysis for the unit tests of the
specified executable instance.
:arg analysis: the result from calling _analyze(anexec).
"""
#The text description just has the mass totals for everything.
#Since it will be tabulated, we just return a tuple of the values
#that need to be passed to the format string.
pcase = float(analysis["ncases"]) / (analysis["ntests"] if analysis["ntests"] > 0 else 1)
tup = (anexec.full_name, analysis["ntests"], pcase, analysis["docsum"])
return fmtstr.format(*tup)
| {
"content_hash": "acdc2d81595f0251fee39f9e2d21ad11",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 100,
"avg_line_length": 42.78534031413613,
"alnum_prop": 0.613558492413118,
"repo_name": "rosenbrockc/fortpy",
"id": "7396221cd170b89e2bec294f417f5319e57f3cfb",
"size": "8172",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fortpy/stats/testing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Fortran",
"bytes": "1511880"
},
{
"name": "Python",
"bytes": "1108723"
},
{
"name": "Roff",
"bytes": "4249"
},
{
"name": "TeX",
"bytes": "35594"
}
],
"symlink_target": ""
} |
from datetime import datetime
from dajaxice.core import dajaxice_functions
from dajax.core import Dajax
from treeio.projects.models import Task, Milestone
from django.contrib import messages
from django.utils.translation import ugettext as _
def gantt(request, task, start, end):
dajax = Dajax()
try:
t = Task.objects.get(pk=task)
ot = _("Task")
except:
t = Milestone.objects.get(pk=task)
ot = _("Milestone")
s = datetime.strptime(start, '%Y-%m-%d').replace(hour=12)
e = datetime.strptime(end, '%Y-%m-%d').replace(hour=12)
t.start_date = s
t.end_date = e
t.save()
messages.add_message(request, messages.INFO, _(
"%(ot)s \"%(t)s\" dates have been updated.") % {'ot': ot, 't': unicode(t)})
return dajax.json()
dajaxice_functions.register(gantt)
| {
"content_hash": "70ae7ae5a82d34f6cb509c9391a6ebae",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 83,
"avg_line_length": 31.76923076923077,
"alnum_prop": 0.6464891041162227,
"repo_name": "hellfish2/treeio",
"id": "5c94d3f94e02c671aae673a165f67f7448109ecb",
"size": "939",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "projects/ajax.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
"""
Class for VM tasks like spawn, snapshot, suspend, resume etc.
"""
import collections
import os
import time
import decorator
from oslo.config import cfg
from oslo.utils import excutils
from oslo.utils import units
from oslo.vmware import exceptions as vexc
from nova.api.metadata import base as instance_metadata
from nova import compute
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_states
from nova.console import type as ctype
from nova import context as nova_context
from nova import exception
from nova.i18n import _, _LE, _LW
from nova import objects
from nova.openstack.common import lockutils
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
from nova import utils
from nova.virt import configdrive
from nova.virt import diagnostics
from nova.virt import driver
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import ds_util
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import imagecache
from nova.virt.vmwareapi import vif as vmwarevif
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmware_images
CONF = cfg.CONF
CONF.import_opt('image_cache_subdirectory_name', 'nova.virt.imagecache')
CONF.import_opt('remove_unused_base_images', 'nova.virt.imagecache')
CONF.import_opt('vnc_enabled', 'nova.vnc')
CONF.import_opt('my_ip', 'nova.netconf')
LOG = logging.getLogger(__name__)
VMWARE_POWER_STATES = {
'poweredOff': power_state.SHUTDOWN,
'poweredOn': power_state.RUNNING,
'suspended': power_state.SUSPENDED}
RESIZE_TOTAL_STEPS = 4
DcInfo = collections.namedtuple('DcInfo',
['ref', 'name', 'vmFolder'])
class VirtualMachineInstanceConfigInfo(object):
"""Parameters needed to create and configure a new instance."""
def __init__(self, instance, instance_name, image_info,
datastore, dc_info, image_cache):
# Some methods called during spawn take the instance parameter purely
# for logging purposes.
# TODO(vui) Clean them up, so we no longer need to keep this variable
self.instance = instance
# Get the instance name. In some cases this may differ from the 'uuid',
# for example when the spawn of a rescue instance takes place.
self.instance_name = instance_name or instance.uuid
self.ii = image_info
self.root_gb = instance.root_gb
self.datastore = datastore
self.dc_info = dc_info
self._image_cache = image_cache
@property
def cache_image_folder(self):
if self.ii.image_id is None:
return
return self._image_cache.get_image_cache_folder(
self.datastore, self.ii.image_id)
@property
def cache_image_path(self):
if self.ii.image_id is None:
return
cached_image_file_name = "%s.%s" % (self.ii.image_id,
self.ii.file_type)
return self.cache_image_folder.join(cached_image_file_name)
# Note(vui): See https://bugs.launchpad.net/nova/+bug/1363349
# for cases where mocking time.sleep() can have unintended effects on code
# not under test. For now, unblock the affected test cases by providing
# a wrapper function to work around needing to mock time.sleep()
def _time_sleep_wrapper(delay):
time.sleep(delay)
@decorator.decorator
def retry_if_task_in_progress(f, *args, **kwargs):
retries = max(CONF.vmware.api_retry_count, 1)
delay = 1
for attempt in range(1, retries + 1):
if attempt != 1:
_time_sleep_wrapper(delay)
delay = min(2 * delay, 60)
try:
f(*args, **kwargs)
return
except error_util.TaskInProgress:
pass
class VMwareVMOps(object):
"""Management class for VM-related tasks."""
def __init__(self, session, virtapi, volumeops, cluster=None,
datastore_regex=None):
"""Initializer."""
self.compute_api = compute.API()
self._session = session
self._virtapi = virtapi
self._volumeops = volumeops
self._cluster = cluster
self._datastore_regex = datastore_regex
# Ensure that the base folder is unique per compute node
if CONF.remove_unused_base_images:
self._base_folder = '%s%s' % (CONF.my_ip,
CONF.image_cache_subdirectory_name)
else:
# Aging disable ensures backward compatibility
self._base_folder = CONF.image_cache_subdirectory_name
self._tmp_folder = 'vmware_temp'
self._default_root_device = 'vda'
self._rescue_suffix = '-rescue'
self._migrate_suffix = '-orig'
self._datastore_dc_mapping = {}
self._datastore_browser_mapping = {}
self._imagecache = imagecache.ImageCacheManager(self._session,
self._base_folder)
def _extend_virtual_disk(self, instance, requested_size, name, dc_ref):
service_content = self._session.vim.service_content
LOG.debug("Extending root virtual disk to %s", requested_size)
vmdk_extend_task = self._session._call_method(
self._session.vim,
"ExtendVirtualDisk_Task",
service_content.virtualDiskManager,
name=name,
datacenter=dc_ref,
newCapacityKb=requested_size,
eagerZero=False)
try:
self._session._wait_for_task(vmdk_extend_task)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_('Extending virtual disk failed with error: %s'),
e, instance=instance)
# Clean up files created during the extend operation
files = [name.replace(".vmdk", "-flat.vmdk"), name]
for file in files:
ds_path = ds_util.DatastorePath.parse(file)
self._delete_datastore_file(ds_path, dc_ref)
LOG.debug("Extended root virtual disk")
def _delete_datastore_file(self, datastore_path, dc_ref):
try:
ds_util.file_delete(self._session, datastore_path, dc_ref)
except (vexc.CannotDeleteFileException,
vexc.FileFaultException,
vexc.FileLockedException,
vexc.FileNotFoundException):
LOG.debug("Unable to delete %(ds)s. There may be more than "
"one process or thread trying to delete the file",
{'ds': datastore_path},
exc_info=True)
def _extend_if_required(self, dc_info, image_info, instance,
root_vmdk_path):
"""Increase the size of the root vmdk if necessary."""
if instance.root_gb * units.Gi > image_info.file_size:
size_in_kb = instance.root_gb * units.Mi
self._extend_virtual_disk(instance, size_in_kb,
root_vmdk_path, dc_info.ref)
def _configure_config_drive(self, instance, vm_ref, dc_info, datastore,
injected_files, admin_password):
session_vim = self._session.vim
cookies = session_vim.client.options.transport.cookiejar
uploaded_iso_path = self._create_config_drive(instance,
injected_files,
admin_password,
datastore.name,
dc_info.name,
instance['uuid'],
cookies)
uploaded_iso_path = datastore.build_path(uploaded_iso_path)
self._attach_cdrom_to_vm(
vm_ref, instance,
datastore.ref,
str(uploaded_iso_path))
def build_virtual_machine(self, instance, instance_name, image_info,
dc_info, datastore, network_info):
node_mo_id = vm_util.get_mo_id_from_instance(instance)
res_pool_ref = vm_util.get_res_pool_ref(self._session,
self._cluster, node_mo_id)
vif_infos = vmwarevif.get_vif_info(self._session,
self._cluster,
utils.is_neutron(),
image_info.vif_model,
network_info)
allocations = self._get_cpu_allocations(instance.instance_type_id)
# Get the create vm config spec
client_factory = self._session.vim.client.factory
config_spec = vm_util.get_vm_create_spec(client_factory,
instance,
instance_name,
datastore.name,
vif_infos,
image_info.os_type,
allocations=allocations)
# Create the VM
vm_ref = vm_util.create_vm(self._session, instance, dc_info.vmFolder,
config_spec, res_pool_ref)
return vm_ref
def _get_cpu_allocations(self, instance_type_id):
# Read flavors for allocations
flavor = objects.Flavor.get_by_id(
nova_context.get_admin_context(read_deleted='yes'),
instance_type_id)
allocations = {}
for (key, type) in (('cpu_limit', int),
('cpu_reservation', int),
('cpu_shares_level', str),
('cpu_shares_share', int)):
value = flavor.extra_specs.get('quota:' + key)
if value:
allocations[key] = type(value)
return allocations
def _fetch_image_as_file(self, context, vi, image_ds_loc):
"""Download image as an individual file to host via HTTP PUT."""
session = self._session
session_vim = session.vim
cookies = session_vim.client.options.transport.cookiejar
LOG.debug("Downloading image file data %(image_id)s to "
"%(file_path)s on the data store "
"%(datastore_name)s",
{'image_id': vi.ii.image_id,
'file_path': image_ds_loc,
'datastore_name': vi.datastore.name},
instance=vi.instance)
vmware_images.fetch_image(
context,
vi.instance,
session._host,
vi.dc_info.name,
vi.datastore.name,
image_ds_loc.rel_path,
cookies=cookies)
def _prepare_sparse_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, "tmp-sparse.vmdk")
return tmp_dir_loc, tmp_image_ds_loc
def _prepare_flat_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
ds_util.mkdir(self._session, tmp_image_ds_loc.parent, vi.dc_info.ref)
vm_util.create_virtual_disk(
self._session, vi.dc_info.ref,
vi.ii.adapter_type,
vi.ii.disk_type,
str(tmp_image_ds_loc),
vi.ii.file_size_in_kb)
flat_vmdk_name = vi.cache_image_path.basename.replace('.vmdk',
'-flat.vmdk')
flat_vmdk_ds_loc = tmp_dir_loc.join(vi.ii.image_id, flat_vmdk_name)
self._delete_datastore_file(str(flat_vmdk_ds_loc), vi.dc_info.ref)
return tmp_dir_loc, flat_vmdk_ds_loc
def _prepare_iso_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
return tmp_dir_loc, tmp_image_ds_loc
def _move_to_cache(self, dc_ref, src_folder_ds_path, dst_folder_ds_path):
try:
ds_util.file_move(self._session, dc_ref,
src_folder_ds_path, dst_folder_ds_path)
except vexc.FileAlreadyExistsException:
# Folder move has failed. This may be due to the fact that a
# process or thread has already completed the operation.
# Since image caching is synchronized, this can only happen
# due to action external to the process.
# In the event of a FileAlreadyExists we continue,
# all other exceptions will be raised.
LOG.warning(_LW("Destination %s already exists! Concurrent moves "
"can lead to unexpected results."),
dst_folder_ds_path)
def _cache_sparse_image(self, vi, tmp_image_ds_loc):
tmp_dir_loc = tmp_image_ds_loc.parent.parent
converted_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
# converts fetched image to preallocated disk
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(tmp_image_ds_loc),
str(converted_image_ds_loc))
self._delete_datastore_file(str(tmp_image_ds_loc), vi.dc_info.ref)
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _cache_flat_image(self, vi, tmp_image_ds_loc):
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _cache_iso_image(self, vi, tmp_image_ds_loc):
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _get_vm_config_info(self, instance, image_info, instance_name=None):
"""Captures all relevant information from the spawn parameters."""
if (instance.root_gb != 0 and
image_info.file_size > instance.root_gb * units.Gi):
reason = _("Image disk size greater than requested disk size")
raise exception.InstanceUnacceptable(instance_id=instance.uuid,
reason=reason)
datastore = ds_util.get_datastore(
self._session, self._cluster, self._datastore_regex)
dc_info = self.get_datacenter_ref_and_name(datastore.ref)
return VirtualMachineInstanceConfigInfo(instance,
instance_name,
image_info,
datastore,
dc_info,
self._imagecache)
def _get_image_callbacks(self, vi):
disk_type = vi.ii.disk_type
image_fetch = self._fetch_image_as_file
if vi.ii.is_iso:
image_prepare = self._prepare_iso_image
image_cache = self._cache_iso_image
elif disk_type == constants.DISK_TYPE_SPARSE:
image_prepare = self._prepare_sparse_image
image_cache = self._cache_sparse_image
elif disk_type in constants.SUPPORTED_FLAT_VARIANTS:
image_prepare = self._prepare_flat_image
image_cache = self._cache_flat_image
else:
reason = _("disk type '%s' not supported") % disk_type
raise exception.InvalidDiskInfo(reason=reason)
return image_prepare, image_fetch, image_cache
def _fetch_image_if_missing(self, context, vi):
image_prepare, image_fetch, image_cache = self._get_image_callbacks(vi)
LOG.debug("Processing image %s", vi.ii.image_id)
with lockutils.lock(str(vi.cache_image_path),
lock_file_prefix='nova-vmware-fetch_image'):
self.check_cache_folder(vi.datastore.name, vi.datastore.ref)
ds_browser = self._get_ds_browser(vi.datastore.ref)
if not ds_util.file_exists(self._session, ds_browser,
vi.cache_image_folder,
vi.cache_image_path.basename):
LOG.debug("Preparing fetch location")
tmp_dir_loc, tmp_image_ds_loc = image_prepare(vi)
LOG.debug("Fetch image to %s", tmp_image_ds_loc)
image_fetch(context, vi, tmp_image_ds_loc)
LOG.debug("Caching image")
image_cache(vi, tmp_image_ds_loc)
LOG.debug("Cleaning up location %s", str(tmp_dir_loc))
self._delete_datastore_file(str(tmp_dir_loc), vi.dc_info.ref)
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info=None,
instance_name=None, power_on=True):
client_factory = self._session.vim.client.factory
image_info = vmware_images.VMwareImage.from_image(instance.image_ref,
image_meta)
vi = self._get_vm_config_info(instance, image_info, instance_name)
# Creates the virtual machine. The virtual machine reference returned
# is unique within Virtual Center.
vm_ref = self.build_virtual_machine(instance,
vi.instance_name,
image_info,
vi.dc_info,
vi.datastore,
network_info)
# Cache the vm_ref. This saves a remote call to the VC. This uses the
# instance_name. This covers all use cases including rescue and resize.
vm_util.vm_ref_cache_update(vi.instance_name, vm_ref)
# Set the machine.id parameter of the instance to inject
# the NIC configuration inside the VM
if CONF.flat_injected:
self._set_machine_id(client_factory, instance, network_info)
# Set the vnc configuration of the instance, vnc port starts from 5900
if CONF.vnc_enabled:
self._get_and_set_vnc_config(client_factory, instance)
block_device_mapping = []
if block_device_info is not None:
block_device_mapping = driver.block_device_info_get_mapping(
block_device_info)
# NOTE(mdbooth): the logic here is that we ignore the image if there
# are block device mappings. This behaviour is incorrect, and a bug in
# the driver. We should be able to accept an image and block device
# mappings.
if len(block_device_mapping) > 0:
msg = "Block device information present: %s" % block_device_info
# NOTE(mriedem): block_device_info can contain an auth_password
# so we have to scrub the message before logging it.
LOG.debug(logging.mask_password(msg), instance=instance)
for root_disk in block_device_mapping:
connection_info = root_disk['connection_info']
# TODO(hartsocks): instance is unnecessary, remove it
# we still use instance in many locations for no other purpose
# than logging, can we simplify this?
self._volumeops.attach_root_volume(connection_info, instance,
self._default_root_device,
vi.datastore.ref)
else:
self._imagecache.enlist_image(
image_info.image_id, vi.datastore, vi.dc_info.ref)
self._fetch_image_if_missing(context, vi)
if image_info.is_iso:
self._use_iso_image(vm_ref, vi)
elif image_info.linked_clone:
self._use_disk_image_as_linked_clone(vm_ref, vi)
else:
self._use_disk_image_as_full_clone(vm_ref, vi)
if configdrive.required_by(instance):
self._configure_config_drive(
instance, vm_ref, vi.dc_info, vi.datastore,
injected_files, admin_password)
if power_on:
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
def _create_config_drive(self, instance, injected_files, admin_password,
data_store_name, dc_name, upload_folder, cookies):
if CONF.config_drive_format != 'iso9660':
reason = (_('Invalid config_drive_format "%s"') %
CONF.config_drive_format)
raise exception.InstancePowerOnFailure(reason=reason)
LOG.info(_('Using config drive for instance'), instance=instance)
extra_md = {}
if admin_password:
extra_md['admin_pass'] = admin_password
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files,
extra_md=extra_md)
try:
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
with utils.tempdir() as tmp_path:
tmp_file = os.path.join(tmp_path, 'configdrive.iso')
cdb.make_drive(tmp_file)
upload_iso_path = "%s/configdrive.iso" % (
upload_folder)
vmware_images.upload_iso_to_datastore(
tmp_file, instance,
host=self._session._host,
data_center_name=dc_name,
datastore_name=data_store_name,
cookies=cookies,
file_path=upload_iso_path)
return upload_iso_path
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_('Creating config drive failed with error: %s'),
e, instance=instance)
def _attach_cdrom_to_vm(self, vm_ref, instance,
datastore, file_path):
"""Attach cdrom to VM by reconfiguration."""
client_factory = self._session.vim.client.factory
devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
(controller_key, unit_number,
controller_spec) = vm_util.allocate_controller_key_and_unit_number(
client_factory,
devices,
'ide')
cdrom_attach_config_spec = vm_util.get_cdrom_attach_config_spec(
client_factory, datastore, file_path,
controller_key, unit_number)
if controller_spec:
cdrom_attach_config_spec.deviceChange.append(controller_spec)
LOG.debug("Reconfiguring VM instance to attach cdrom %s",
file_path, instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, cdrom_attach_config_spec)
LOG.debug("Reconfigured VM instance to attach cdrom %s",
file_path, instance=instance)
def _create_vm_snapshot(self, instance, vm_ref):
LOG.debug("Creating Snapshot of the VM instance", instance=instance)
snapshot_task = self._session._call_method(
self._session.vim,
"CreateSnapshot_Task", vm_ref,
name="%s-snapshot" % instance.uuid,
description="Taking Snapshot of the VM",
memory=False,
quiesce=True)
self._session._wait_for_task(snapshot_task)
LOG.debug("Created Snapshot of the VM instance", instance=instance)
task_info = self._session._call_method(vim_util,
"get_dynamic_property",
snapshot_task, "Task", "info")
snapshot = task_info.result
return snapshot
@retry_if_task_in_progress
def _delete_vm_snapshot(self, instance, vm_ref, snapshot):
LOG.debug("Deleting Snapshot of the VM instance", instance=instance)
delete_snapshot_task = self._session._call_method(
self._session.vim,
"RemoveSnapshot_Task", snapshot,
removeChildren=False, consolidate=True)
self._session._wait_for_task(delete_snapshot_task)
LOG.debug("Deleted Snapshot of the VM instance", instance=instance)
def snapshot(self, context, instance, image_id, update_task_state):
"""Create snapshot from a running VM instance.
Steps followed are:
1. Get the name of the vmdk file which the VM points to right now.
Can be a chain of snapshots, so we need to know the last in the
chain.
2. Create the snapshot. A new vmdk is created which the VM points to
now. The earlier vmdk becomes read-only.
3. Call CopyVirtualDisk which coalesces the disk chain to form a single
vmdk, rather a .vmdk metadata file and a -flat.vmdk disk data file.
4. Now upload the -flat.vmdk file to the image store.
5. Delete the coalesced .vmdk and -flat.vmdk created.
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
service_content = self._session.vim.service_content
def _get_vm_and_vmdk_attribs():
# Get the vmdk file name that the VM is pointing to
hw_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
(vmdk_file_path_before_snapshot, adapter_type,
disk_type) = vm_util.get_vmdk_path_and_adapter_type(
hw_devices, uuid=instance.uuid)
if not vmdk_file_path_before_snapshot:
LOG.debug("No root disk defined. Unable to snapshot.")
raise error_util.NoRootDiskDefined()
datastore_name = ds_util.DatastorePath.parse(
vmdk_file_path_before_snapshot).datastore
os_type = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "summary.config.guestId")
return (vmdk_file_path_before_snapshot, adapter_type, disk_type,
datastore_name, os_type)
(vmdk_file_path_before_snapshot, adapter_type, disk_type,
datastore_name, os_type) = _get_vm_and_vmdk_attribs()
snapshot = self._create_vm_snapshot(instance, vm_ref)
update_task_state(task_state=task_states.IMAGE_PENDING_UPLOAD)
def _check_if_tmp_folder_exists():
# Copy the contents of the VM that were there just before the
# snapshot was taken
ds_ref_ret = self._session._call_method(
vim_util, "get_dynamic_property", vm_ref, "VirtualMachine",
"datastore")
if ds_ref_ret is None:
raise exception.DatastoreNotFound()
ds_ref = ds_ref_ret.ManagedObjectReference[0]
self.check_temp_folder(datastore_name, ds_ref)
return ds_ref
ds_ref = _check_if_tmp_folder_exists()
# Generate a random vmdk file name to which the coalesced vmdk content
# will be copied to. A random name is chosen so that we don't have
# name clashes.
random_name = uuidutils.generate_uuid()
dest_vmdk_file_path = ds_util.DatastorePath(
datastore_name, self._tmp_folder, "%s.vmdk" % random_name)
dest_vmdk_data_file_path = ds_util.DatastorePath(
datastore_name, self._tmp_folder, "%s-flat.vmdk" % random_name)
dc_info = self.get_datacenter_ref_and_name(ds_ref)
def _copy_vmdk_content():
# Consolidate the snapshotted disk to a temporary vmdk.
LOG.debug('Copying snapshotted disk %s.',
vmdk_file_path_before_snapshot,
instance=instance)
copy_disk_task = self._session._call_method(
self._session.vim,
"CopyVirtualDisk_Task",
service_content.virtualDiskManager,
sourceName=vmdk_file_path_before_snapshot,
sourceDatacenter=dc_info.ref,
destName=str(dest_vmdk_file_path),
destDatacenter=dc_info.ref,
force=False)
self._session._wait_for_task(copy_disk_task)
LOG.debug('Copied snapshotted disk %s.',
vmdk_file_path_before_snapshot,
instance=instance)
_copy_vmdk_content()
self._delete_vm_snapshot(instance, vm_ref, snapshot)
cookies = self._session.vim.client.options.transport.cookiejar
def _upload_vmdk_to_image_repository():
# Upload the contents of -flat.vmdk file which has the disk data.
LOG.debug("Uploading image %s", image_id,
instance=instance)
vmware_images.upload_image(
context,
image_id,
instance,
os_type=os_type,
disk_type=constants.DEFAULT_DISK_TYPE,
adapter_type=adapter_type,
image_version=1,
host=self._session._host,
data_center_name=dc_info.name,
datastore_name=datastore_name,
cookies=cookies,
file_path="%s/%s-flat.vmdk" % (self._tmp_folder, random_name))
LOG.debug("Uploaded image %s", image_id,
instance=instance)
update_task_state(task_state=task_states.IMAGE_UPLOADING,
expected_state=task_states.IMAGE_PENDING_UPLOAD)
_upload_vmdk_to_image_repository()
def _clean_temp_data():
"""Delete temporary vmdk files generated in image handling
operations.
"""
# The data file is the one occupying space, and likelier to see
# deletion problems, so prioritize its deletion first. In the
# unlikely event that its deletion fails, the small descriptor file
# is retained too by design since it makes little sense to remove
# it when the data disk it refers to still lingers.
for f in dest_vmdk_data_file_path, dest_vmdk_file_path:
self._delete_datastore_file(f, dc_info.ref)
_clean_temp_data()
def reboot(self, instance, network_info):
"""Reboot a VM instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.guest.toolsStatus", "runtime.powerState",
"summary.guest.toolsRunningStatus"]
props = self._session._call_method(vim_util, "get_object_properties",
None, vm_ref, "VirtualMachine",
lst_properties)
query = vm_util.get_values_from_object_properties(self._session, props)
pwr_state = query['runtime.powerState']
tools_status = query['summary.guest.toolsStatus']
tools_running_status = query['summary.guest.toolsRunningStatus']
# Raise an exception if the VM is not powered On.
if pwr_state not in ["poweredOn"]:
reason = _("instance is not powered on")
raise exception.InstanceRebootFailure(reason=reason)
# If latest vmware tools are installed in the VM, and that the tools
# are running, then only do a guest reboot. Otherwise do a hard reset.
if (tools_status == "toolsOk" and
tools_running_status == "guestToolsRunning"):
LOG.debug("Rebooting guest OS of VM", instance=instance)
self._session._call_method(self._session.vim, "RebootGuest",
vm_ref)
LOG.debug("Rebooted guest OS of VM", instance=instance)
else:
LOG.debug("Doing hard reboot of VM", instance=instance)
reset_task = self._session._call_method(self._session.vim,
"ResetVM_Task", vm_ref)
self._session._wait_for_task(reset_task)
LOG.debug("Did hard reboot of VM", instance=instance)
def _destroy_instance(self, instance, destroy_disks=True,
instance_name=None):
# Destroy a VM instance
# Get the instance name. In some cases this may differ from the 'uuid',
# for example when the spawn of a rescue instance takes place.
if instance_name is None:
instance_name = instance['uuid']
try:
vm_ref = vm_util.get_vm_ref_from_name(self._session, instance_name)
if vm_ref is None:
LOG.warning(_('Instance does not exist on backend'),
instance=instance)
return
lst_properties = ["config.files.vmPathName", "runtime.powerState",
"datastore"]
props = self._session._call_method(vim_util,
"get_object_properties",
None, vm_ref, "VirtualMachine", lst_properties)
query = vm_util.get_values_from_object_properties(
self._session, props)
pwr_state = query['runtime.powerState']
vm_config_pathname = query.get('config.files.vmPathName')
vm_ds_path = None
if vm_config_pathname is not None:
vm_ds_path = ds_util.DatastorePath.parse(
vm_config_pathname)
# Power off the VM if it is in PoweredOn state.
if pwr_state == "poweredOn":
vm_util.power_off_instance(self._session, instance, vm_ref)
# Un-register the VM
try:
LOG.debug("Unregistering the VM", instance=instance)
self._session._call_method(self._session.vim,
"UnregisterVM", vm_ref)
LOG.debug("Unregistered the VM", instance=instance)
except Exception as excep:
LOG.warn(_("In vmwareapi:vmops:_destroy_instance, got this "
"exception while un-registering the VM: %s"),
excep)
# Delete the folder holding the VM related content on
# the datastore.
if destroy_disks and vm_ds_path:
try:
dir_ds_compliant_path = vm_ds_path.parent
LOG.debug("Deleting contents of the VM from "
"datastore %(datastore_name)s",
{'datastore_name': vm_ds_path.datastore},
instance=instance)
ds_ref_ret = query['datastore']
ds_ref = ds_ref_ret.ManagedObjectReference[0]
dc_info = self.get_datacenter_ref_and_name(ds_ref)
ds_util.file_delete(self._session,
dir_ds_compliant_path,
dc_info.ref)
LOG.debug("Deleted contents of the VM from "
"datastore %(datastore_name)s",
{'datastore_name': vm_ds_path.datastore},
instance=instance)
except Exception:
LOG.warn(_("In vmwareapi:vmops:_destroy_instance, "
"exception while deleting the VM contents from "
"the disk"), exc_info=True)
except Exception as exc:
LOG.exception(exc, instance=instance)
finally:
vm_util.vm_ref_cache_delete(instance_name)
def destroy(self, instance, destroy_disks=True):
"""Destroy a VM instance.
Steps followed for each VM are:
1. Power off, if it is in poweredOn state.
2. Un-register.
3. Delete the contents of the folder holding the VM related data.
"""
# If there is a rescue VM then we need to destroy that one too.
LOG.debug("Destroying instance", instance=instance)
if instance['vm_state'] == vm_states.RESCUED:
LOG.debug("Rescue VM configured", instance=instance)
try:
self.unrescue(instance, power_on=False)
LOG.debug("Rescue VM destroyed", instance=instance)
except Exception:
rescue_name = instance['uuid'] + self._rescue_suffix
self._destroy_instance(instance,
destroy_disks=destroy_disks,
instance_name=rescue_name)
# NOTE(arnaud): Destroy uuid-orig and uuid VMs iff it is not
# triggered by the revert resize api call. This prevents
# the uuid-orig VM to be deleted to be able to associate it later.
if instance.task_state != task_states.RESIZE_REVERTING:
# When VM deletion is triggered in middle of VM resize before VM
# arrive RESIZED state, uuid-orig VM need to deleted to avoid
# VM leak. Within method _destroy_instance it will check vmref
# exist or not before attempt deletion.
resize_orig_vmname = instance['uuid'] + self._migrate_suffix
vm_orig_ref = vm_util.get_vm_ref_from_name(self._session,
resize_orig_vmname)
if vm_orig_ref:
self._destroy_instance(instance,
destroy_disks=destroy_disks,
instance_name=resize_orig_vmname)
self._destroy_instance(instance, destroy_disks=destroy_disks)
LOG.debug("Instance destroyed", instance=instance)
def pause(self, instance):
msg = _("pause not supported for vmwareapi")
raise NotImplementedError(msg)
def unpause(self, instance):
msg = _("unpause not supported for vmwareapi")
raise NotImplementedError(msg)
def suspend(self, instance):
"""Suspend the specified instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
pwr_state = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "runtime.powerState")
# Only PoweredOn VMs can be suspended.
if pwr_state == "poweredOn":
LOG.debug("Suspending the VM", instance=instance)
suspend_task = self._session._call_method(self._session.vim,
"SuspendVM_Task", vm_ref)
self._session._wait_for_task(suspend_task)
LOG.debug("Suspended the VM", instance=instance)
# Raise Exception if VM is poweredOff
elif pwr_state == "poweredOff":
reason = _("instance is powered off and cannot be suspended.")
raise exception.InstanceSuspendFailure(reason=reason)
else:
LOG.debug("VM was already in suspended state. So returning "
"without doing anything", instance=instance)
def resume(self, instance):
"""Resume the specified instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
pwr_state = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "runtime.powerState")
if pwr_state.lower() == "suspended":
LOG.debug("Resuming the VM", instance=instance)
suspend_task = self._session._call_method(
self._session.vim,
"PowerOnVM_Task", vm_ref)
self._session._wait_for_task(suspend_task)
LOG.debug("Resumed the VM", instance=instance)
else:
reason = _("instance is not in a suspended state")
raise exception.InstanceResumeFailure(reason=reason)
def rescue(self, context, instance, network_info, image_meta):
"""Rescue the specified instance.
- shutdown the instance VM.
- spawn a rescue VM (the vm name-label will be instance-N-rescue).
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
self.power_off(instance)
instance_name = instance.uuid + self._rescue_suffix
self.spawn(context, instance, image_meta,
None, None, network_info,
instance_name=instance_name,
power_on=False)
# Attach vmdk to the rescue VM
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
(vmdk_path, adapter_type,
disk_type) = vm_util.get_vmdk_path_and_adapter_type(
hardware_devices, uuid=instance.uuid)
rescue_vm_ref = vm_util.get_vm_ref_from_name(self._session,
instance_name)
self._volumeops.attach_disk_to_vm(
rescue_vm_ref, instance,
adapter_type, disk_type, vmdk_path)
vm_util.power_on_instance(self._session, instance,
vm_ref=rescue_vm_ref)
def unrescue(self, instance, power_on=True):
"""Unrescue the specified instance."""
# Get the original vmdk_path
vm_ref = vm_util.get_vm_ref(self._session, instance)
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
(vmdk_path, adapter_type,
disk_type) = vm_util.get_vmdk_path_and_adapter_type(
hardware_devices, uuid=instance.uuid)
instance_name = instance.uuid + self._rescue_suffix
# detach the original instance disk from the rescue disk
vm_rescue_ref = vm_util.get_vm_ref_from_name(self._session,
instance_name)
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_rescue_ref,
"VirtualMachine", "config.hardware.device")
device = vm_util.get_vmdk_volume_disk(hardware_devices, path=vmdk_path)
vm_util.power_off_instance(self._session, instance, vm_rescue_ref)
self._volumeops.detach_disk_from_vm(vm_rescue_ref, instance, device)
self._destroy_instance(instance, instance_name=instance_name)
if power_on:
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
def power_off(self, instance):
"""Power off the specified instance.
:param instance: nova.objects.instance.Instance
"""
vm_util.power_off_instance(self._session, instance)
def power_on(self, instance):
vm_util.power_on_instance(self._session, instance)
def _get_orig_vm_name_label(self, instance):
return instance.uuid + '-orig'
def _update_instance_progress(self, context, instance, step, total_steps):
"""Update instance progress percent to reflect current step number
"""
# Divide the action's workflow into discrete steps and "bump" the
# instance's progress field as each step is completed.
#
# For a first cut this should be fine, however, for large VM images,
# the clone disk step begins to dominate the equation. A
# better approximation would use the percentage of the VM image that
# has been streamed to the destination host.
progress = round(float(step) / total_steps * 100)
instance_uuid = instance.uuid
LOG.debug("Updating instance '%(instance_uuid)s' progress to"
" %(progress)d",
{'instance_uuid': instance_uuid, 'progress': progress},
instance=instance)
instance.progress = progress
instance.save()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
"""
# Checks if the migration needs a disk resize down.
if flavor['root_gb'] < instance['root_gb']:
reason = _("Unable to shrink disk.")
raise exception.InstanceFaultRollback(
exception.ResizeError(reason=reason))
# 0. Zero out the progress to begin
self._update_instance_progress(context, instance,
step=0,
total_steps=RESIZE_TOTAL_STEPS)
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Read the host_ref for the destination. If this is None then the
# VC will decide on placement
host_ref = self._get_host_ref_from_name(dest)
# 1. Power off the instance
self.power_off(instance)
self._update_instance_progress(context, instance,
step=1,
total_steps=RESIZE_TOTAL_STEPS)
# 2. Disassociate the linked vsphere VM from the instance
vm_util.disassociate_vmref_from_instance(self._session, instance,
vm_ref,
suffix=self._migrate_suffix)
self._update_instance_progress(context, instance,
step=2,
total_steps=RESIZE_TOTAL_STEPS)
ds_ref = ds_util.get_datastore(
self._session, self._cluster,
datastore_regex=self._datastore_regex).ref
dc_info = self.get_datacenter_ref_and_name(ds_ref)
# 3. Clone the VM for instance
vm_util.clone_vmref_for_instance(self._session, instance, vm_ref,
host_ref, ds_ref, dc_info.vmFolder)
self._update_instance_progress(context, instance,
step=3,
total_steps=RESIZE_TOTAL_STEPS)
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
# Destroy the original VM. The vm_ref needs to be searched using the
# instance.uuid + self._migrate_suffix as the identifier. We will
# not get the vm when searched using the instanceUuid but rather will
# be found using the uuid buried in the extraConfig
vm_ref = vm_util.search_vm_ref_by_identifier(self._session,
instance.uuid + self._migrate_suffix)
if vm_ref is None:
LOG.debug("instance not present", instance=instance)
return
try:
LOG.debug("Destroying the VM", instance=instance)
destroy_task = self._session._call_method(
self._session.vim,
"Destroy_Task", vm_ref)
self._session._wait_for_task(destroy_task)
LOG.debug("Destroyed the VM", instance=instance)
except Exception as excep:
LOG.warn(_("In vmwareapi:vmops:confirm_migration, got this "
"exception while destroying the VM: %s"), excep)
def finish_revert_migration(self, context, instance, network_info,
block_device_info, power_on=True):
"""Finish reverting a resize."""
vm_util.associate_vmref_for_instance(self._session, instance,
suffix=self._migrate_suffix)
if power_on:
vm_util.power_on_instance(self._session, instance)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance=False,
block_device_info=None, power_on=True):
"""Completes a resize, turning on the migrated instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
if resize_instance:
client_factory = self._session.vim.client.factory
vm_resize_spec = vm_util.get_vm_resize_spec(client_factory,
instance)
vm_util.reconfigure_vm(self._session, vm_ref, vm_resize_spec)
# Resize the disk (if larger)
old_root_gb = instance.system_metadata['old_instance_type_root_gb']
if instance['root_gb'] > int(old_root_gb):
root_disk_in_kb = instance['root_gb'] * units.Mi
vmdk_path = vm_util.get_vmdk_path(self._session, vm_ref,
instance)
data_store_ref = ds_util.get_datastore(self._session,
self._cluster, datastore_regex=self._datastore_regex).ref
dc_info = self.get_datacenter_ref_and_name(data_store_ref)
self._extend_virtual_disk(instance, root_disk_in_kb, vmdk_path,
dc_info.ref)
# TODO(ericwb): add extend for ephemeral disk
# 4. Start VM
if power_on:
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
self._update_instance_progress(context, instance,
step=4,
total_steps=RESIZE_TOTAL_STEPS)
def live_migration(self, context, instance_ref, dest,
post_method, recover_method, block_migration=False):
"""Spawning live_migration operation for distributing high-load."""
vm_ref = vm_util.get_vm_ref(self._session, instance_ref)
host_ref = self._get_host_ref_from_name(dest)
if host_ref is None:
raise exception.HostNotFound(host=dest)
LOG.debug("Migrating VM to host %s", dest, instance=instance_ref)
try:
vm_migrate_task = self._session._call_method(
self._session.vim,
"MigrateVM_Task", vm_ref,
host=host_ref,
priority="defaultPriority")
self._session._wait_for_task(vm_migrate_task)
except Exception:
with excutils.save_and_reraise_exception():
recover_method(context, instance_ref, dest, block_migration)
post_method(context, instance_ref, dest, block_migration)
LOG.debug("Migrated VM to host %s", dest, instance=instance_ref)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
ctxt = nova_context.get_admin_context()
instances_info = dict(instance_count=len(instances),
timeout=timeout)
if instances_info["instance_count"] > 0:
LOG.info(_("Found %(instance_count)d hung reboots "
"older than %(timeout)d seconds") % instances_info)
for instance in instances:
LOG.info(_("Automatically hard rebooting"), instance=instance)
self.compute_api.reboot(ctxt, instance, "HARD")
def get_info(self, instance):
"""Return data about the VM instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.config.numCpu",
"summary.config.memorySizeMB",
"runtime.powerState"]
vm_props = self._session._call_method(vim_util,
"get_object_properties", None, vm_ref, "VirtualMachine",
lst_properties)
query = vm_util.get_values_from_object_properties(
self._session, vm_props)
max_mem = int(query.get('summary.config.memorySizeMB', 0)) * 1024
num_cpu = int(query.get('summary.config.numCpu', 0))
return {'state': VMWARE_POWER_STATES[query['runtime.powerState']],
'max_mem': max_mem,
'mem': max_mem,
'num_cpu': num_cpu,
'cpu_time': 0}
def _get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.config",
"summary.quickStats",
"summary.runtime"]
vm_props = self._session._call_method(vim_util,
"get_object_properties", None, vm_ref, "VirtualMachine",
lst_properties)
query = vm_util.get_values_from_object_properties(self._session,
vm_props)
data = {}
# All of values received are objects. Convert them to dictionaries
for value in query.values():
prop_dict = vim_util.object_to_dict(value, list_depth=1)
data.update(prop_dict)
return data
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
data = self._get_diagnostics(instance)
# Add a namespace to all of the diagnostsics
return dict([('vmware:' + k, v) for k, v in data.items()])
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics."""
data = self._get_diagnostics(instance)
state = data.get('powerState')
if state:
state = power_state.STATE_MAP[VMWARE_POWER_STATES[state]]
uptime = data.get('uptimeSeconds', 0)
config_drive = configdrive.required_by(instance)
diags = diagnostics.Diagnostics(state=state,
driver='vmwareapi',
config_drive=config_drive,
hypervisor_os='esxi',
uptime=uptime)
diags.memory_details.maximum = data.get('memorySizeMB', 0)
diags.memory_details.used = data.get('guestMemoryUsage', 0)
# TODO(garyk): add in cpu, nic and disk stats
return diags
def _get_vnc_console_connection(self, instance):
"""Return connection info for a vnc console."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
opt_value = self._session._call_method(vim_util,
'get_dynamic_property',
vm_ref, 'VirtualMachine',
vm_util.VNC_CONFIG_KEY)
if opt_value:
port = int(opt_value.value)
else:
raise exception.ConsoleTypeUnavailable(console_type='vnc')
return {'port': port,
'internal_access_path': None}
@staticmethod
def _get_machine_id_str(network_info):
machine_id_str = ''
for vif in network_info:
# TODO(vish): add support for dns2
# TODO(sateesh): add support for injection of ipv6 configuration
network = vif['network']
ip_v4 = netmask_v4 = gateway_v4 = broadcast_v4 = dns = None
subnets_v4 = [s for s in network['subnets'] if s['version'] == 4]
if len(subnets_v4) > 0:
if len(subnets_v4[0]['ips']) > 0:
ip_v4 = subnets_v4[0]['ips'][0]
if len(subnets_v4[0]['dns']) > 0:
dns = subnets_v4[0]['dns'][0]['address']
netmask_v4 = str(subnets_v4[0].as_netaddr().netmask)
gateway_v4 = subnets_v4[0]['gateway']['address']
broadcast_v4 = str(subnets_v4[0].as_netaddr().broadcast)
interface_str = ";".join([vif['address'],
ip_v4 and ip_v4['address'] or '',
netmask_v4 or '',
gateway_v4 or '',
broadcast_v4 or '',
dns or ''])
machine_id_str = machine_id_str + interface_str + '#'
return machine_id_str
def _set_machine_id(self, client_factory, instance, network_info):
"""Set the machine id of the VM for guest tools to pick up
and reconfigure the network interfaces.
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
machine_id_change_spec = vm_util.get_machine_id_change_spec(
client_factory,
self._get_machine_id_str(network_info))
LOG.debug("Reconfiguring VM instance to set the machine id",
instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, machine_id_change_spec)
LOG.debug("Reconfigured VM instance to set the machine id",
instance=instance)
@utils.synchronized('vmware.get_and_set_vnc_port')
def _get_and_set_vnc_config(self, client_factory, instance):
"""Set the vnc configuration of the VM."""
port = vm_util.get_vnc_port(self._session)
vm_ref = vm_util.get_vm_ref(self._session, instance)
vnc_config_spec = vm_util.get_vnc_config_spec(
client_factory, port)
LOG.debug("Reconfiguring VM instance to enable vnc on "
"port - %(port)s", {'port': port},
instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, vnc_config_spec)
LOG.debug("Reconfigured VM instance to enable vnc on "
"port - %(port)s", {'port': port},
instance=instance)
def _get_ds_browser(self, ds_ref):
ds_browser = self._datastore_browser_mapping.get(ds_ref.value)
if not ds_browser:
ds_browser = self._session._call_method(
vim_util, "get_dynamic_property", ds_ref, "Datastore",
"browser")
self._datastore_browser_mapping[ds_ref.value] = ds_browser
return ds_browser
def _get_host_ref_from_name(self, host_name):
"""Get reference to the host with the name specified."""
host_objs = self._session._call_method(vim_util, "get_objects",
"HostSystem", ["name"])
vm_util._cancel_retrieve_if_necessary(self._session, host_objs)
for host in host_objs:
if hasattr(host, 'propSet'):
if host.propSet[0].val == host_name:
return host.obj
return None
def _get_vmfolder_ref(self):
"""Get the Vm folder ref from the datacenter."""
dc_objs = self._session._call_method(vim_util, "get_objects",
"Datacenter", ["vmFolder"])
vm_util._cancel_retrieve_if_necessary(self._session, dc_objs)
# There is only one default datacenter in a standalone ESX host
vm_folder_ref = dc_objs.objects[0].propSet[0].val
return vm_folder_ref
def _create_folder_if_missing(self, ds_name, ds_ref, folder):
"""Create a folder if it does not exist.
Currently there are two folder that are required on the datastore
- base folder - the folder to store cached images
- temp folder - the folder used for snapshot management and
image uploading
This method is aimed to be used for the management of those
folders to ensure that they are created if they are missing.
The ds_util method mkdir will be used to check if the folder
exists. If this throws and exception 'FileAlreadyExistsException'
then the folder already exists on the datastore.
"""
path = ds_util.DatastorePath(ds_name, folder)
dc_info = self.get_datacenter_ref_and_name(ds_ref)
try:
ds_util.mkdir(self._session, path, dc_info.ref)
LOG.debug("Folder %s created.", path)
except vexc.FileAlreadyExistsException:
# NOTE(hartsocks): if the folder already exists, that
# just means the folder was prepped by another process.
pass
def check_cache_folder(self, ds_name, ds_ref):
"""Check that the cache folder exists."""
self._create_folder_if_missing(ds_name, ds_ref, self._base_folder)
def check_temp_folder(self, ds_name, ds_ref):
"""Check that the temp folder exists."""
self._create_folder_if_missing(ds_name, ds_ref, self._tmp_folder)
def _check_if_folder_file_exists(self, ds_browser, ds_ref, ds_name,
folder_name, file_name):
# Ensure that the cache folder exists
self.check_cache_folder(ds_name, ds_ref)
# Check if the file exists or not.
folder_ds_path = ds_util.DatastorePath(ds_name, folder_name)
return ds_util.file_exists(
self._session, ds_browser, folder_ds_path, file_name)
def inject_network_info(self, instance, network_info):
"""inject network info for specified instance."""
# Set the machine.id parameter of the instance to inject
# the NIC configuration inside the VM
client_factory = self._session.vim.client.factory
self._set_machine_id(client_factory, instance, network_info)
def manage_image_cache(self, context, instances):
if not CONF.remove_unused_base_images:
LOG.debug("Image aging disabled. Aging will not be done.")
return
datastores = ds_util.get_available_datastores(self._session,
self._cluster,
self._datastore_regex)
datastores_info = []
for ds in datastores:
dc_info = self.get_datacenter_ref_and_name(ds.ref)
datastores_info.append((ds, dc_info))
self._imagecache.update(context, instances, datastores_info)
def _get_valid_vms_from_retrieve_result(self, retrieve_result):
"""Returns list of valid vms from RetrieveResult object."""
lst_vm_names = []
while retrieve_result:
token = vm_util._get_token(retrieve_result)
for vm in retrieve_result.objects:
vm_name = None
conn_state = None
for prop in vm.propSet:
if prop.name == "name":
vm_name = prop.val
elif prop.name == "runtime.connectionState":
conn_state = prop.val
# Ignoring the orphaned or inaccessible VMs
if conn_state not in ["orphaned", "inaccessible"]:
lst_vm_names.append(vm_name)
if token:
retrieve_result = self._session._call_method(vim_util,
"continue_to_get_objects",
token)
else:
break
return lst_vm_names
def instance_exists(self, instance):
try:
vm_util.get_vm_ref(self._session, instance)
return True
except exception.InstanceNotFound:
return False
def attach_interface(self, instance, image_meta, vif):
"""Attach an interface to the instance."""
vif_model = image_meta.get("hw_vif_model",
constants.DEFAULT_VIF_MODEL)
vif_model = vm_util.convert_vif_model(vif_model)
vif_info = vmwarevif.get_vif_dict(self._session, self._cluster,
vif_model, utils.is_neutron(), vif)
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Ensure that there is not a race with the port index management
with lockutils.lock(instance.uuid,
lock_file_prefix='nova-vmware-hot-plug'):
port_index = vm_util.get_attach_port_index(self._session, vm_ref)
client_factory = self._session.vim.client.factory
attach_config_spec = vm_util.get_network_attach_config_spec(
client_factory, vif_info, port_index)
LOG.debug("Reconfiguring VM to attach interface",
instance=instance)
try:
vm_util.reconfigure_vm(self._session, vm_ref,
attach_config_spec)
except Exception as e:
LOG.error(_LE('Attaching network adapter failed. Exception: '
' %s'),
e, instance=instance)
raise exception.InterfaceAttachFailed(
instance_uuid=instance['uuid'])
LOG.debug("Reconfigured VM to attach interface", instance=instance)
def detach_interface(self, instance, vif):
"""Detach an interface from the instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Ensure that there is not a race with the port index management
with lockutils.lock(instance.uuid,
lock_file_prefix='nova-vmware-hot-plug'):
port_index = vm_util.get_vm_detach_port_index(self._session,
vm_ref,
vif['id'])
if port_index is None:
msg = _("No device with interface-id %s exists on "
"VM") % vif['id']
raise exception.NotFound(msg)
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
device = vmwarevif.get_network_device(hardware_devices,
vif['address'])
if device is None:
msg = _("No device with MAC address %s exists on the "
"VM") % vif['address']
raise exception.NotFound(msg)
client_factory = self._session.vim.client.factory
detach_config_spec = vm_util.get_network_detach_config_spec(
client_factory, device, port_index)
LOG.debug("Reconfiguring VM to detach interface",
instance=instance)
try:
vm_util.reconfigure_vm(self._session, vm_ref,
detach_config_spec)
except Exception as e:
LOG.error(_LE('Detaching network adapter failed. Exception: '
'%s'),
e, instance=instance)
raise exception.InterfaceDetachFailed(
instance_uuid=instance['uuid'])
LOG.debug("Reconfigured VM to detach interface", instance=instance)
def _use_disk_image_as_full_clone(self, vm_ref, vi):
"""Uses cached image disk by copying it into the VM directory."""
instance_folder = vi.instance_name
root_disk_name = "%s.vmdk" % vi.instance_name
root_disk_ds_loc = vi.datastore.build_path(instance_folder,
root_disk_name)
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(vi.cache_image_path),
str(root_disk_ds_loc))
self._extend_if_required(
vi.dc_info, vi.ii, vi.instance, str(root_disk_ds_loc))
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi, False)
def _sized_image_exists(self, sized_disk_ds_loc, ds_ref):
ds_browser = self._get_ds_browser(ds_ref)
return ds_util.file_exists(
self._session, ds_browser, sized_disk_ds_loc.parent,
sized_disk_ds_loc.basename)
def _use_disk_image_as_linked_clone(self, vm_ref, vi):
"""Uses cached image as parent of a COW child in the VM directory."""
sized_image_disk_name = "%s.vmdk" % vi.ii.image_id
if vi.root_gb > 0:
sized_image_disk_name = "%s.%s.vmdk" % (vi.ii.image_id, vi.root_gb)
sized_disk_ds_loc = vi.cache_image_folder.join(sized_image_disk_name)
# Ensure only a single thread extends the image at once.
# We do this by taking a lock on the name of the extended
# image. This allows multiple threads to create resized
# copies simultaneously, as long as they are different
# sizes. Threads attempting to create the same resized copy
# will be serialized, with only the first actually creating
# the copy.
#
# Note that the object is in a per-nova cache directory,
# so inter-nova locking is not a concern. Consequently we
# can safely use simple thread locks.
with lockutils.lock(str(sized_disk_ds_loc),
lock_file_prefix='nova-vmware-image'):
if not self._sized_image_exists(sized_disk_ds_loc,
vi.datastore.ref):
LOG.debug("Copying root disk of size %sGb", vi.root_gb)
try:
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(vi.cache_image_path),
str(sized_disk_ds_loc))
except Exception as e:
LOG.warning(_("Root disk file creation "
"failed - %s"), e)
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to copy cached '
'image %(source)s to '
'%(dest)s for resize: '
'%(error)s'),
{'source': vi.cache_image_path,
'dest': sized_disk_ds_loc,
'error': e.message})
try:
ds_util.file_delete(self._session,
sized_disk_ds_loc,
vi.dc_info.ref)
except vexc.FileNotFoundException:
# File was never created: cleanup not
# required
pass
# Resize the copy to the appropriate size. No need
# for cleanup up here, as _extend_virtual_disk
# already does it
self._extend_if_required(
vi.dc_info, vi.ii, vi.instance, str(sized_disk_ds_loc))
# Associate the sized image disk to the VM by attaching to the VM a
# COW child of said disk.
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(sized_disk_ds_loc),
vi.root_gb * units.Mi, vi.ii.linked_clone)
def _use_iso_image(self, vm_ref, vi):
"""Uses cached image as a bootable virtual cdrom."""
self._attach_cdrom_to_vm(
vm_ref, vi.instance, vi.datastore.ref,
str(vi.cache_image_path))
# Optionally create and attach blank disk
if vi.root_gb > 0:
instance_folder = vi.instance_name
root_disk_name = "%s.vmdk" % vi.instance_name
root_disk_ds_loc = vi.datastore.build_path(instance_folder,
root_disk_name)
# It is pointless to COW a blank disk
linked_clone = False
vm_util.create_virtual_disk(
self._session, vi.dc_info.ref,
vi.ii.adapter_type,
vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi)
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi, linked_clone)
def _update_datacenter_cache_from_objects(self, dcs):
"""Updates the datastore/datacenter cache."""
while dcs:
token = vm_util._get_token(dcs)
for dco in dcs.objects:
dc_ref = dco.obj
ds_refs = []
prop_dict = vm_util.propset_dict(dco.propSet)
name = prop_dict.get('name')
vmFolder = prop_dict.get('vmFolder')
datastore_refs = prop_dict.get('datastore')
if datastore_refs:
datastore_refs = datastore_refs.ManagedObjectReference
for ds in datastore_refs:
ds_refs.append(ds.value)
else:
LOG.debug("Datacenter %s doesn't have any datastore "
"associated with it, ignoring it", name)
for ds_ref in ds_refs:
self._datastore_dc_mapping[ds_ref] = DcInfo(ref=dc_ref,
name=name, vmFolder=vmFolder)
if token:
dcs = self._session._call_method(vim_util,
"continue_to_get_objects",
token)
else:
break
def get_datacenter_ref_and_name(self, ds_ref):
"""Get the datacenter name and the reference."""
dc_info = self._datastore_dc_mapping.get(ds_ref.value)
if not dc_info:
dcs = self._session._call_method(vim_util, "get_objects",
"Datacenter", ["name", "datastore", "vmFolder"])
self._update_datacenter_cache_from_objects(dcs)
dc_info = self._datastore_dc_mapping.get(ds_ref.value)
return dc_info
def list_instances(self):
"""Lists the VM instances that are registered with vCenter cluster."""
properties = ['name', 'runtime.connectionState']
LOG.debug("Getting list of instances from cluster %s",
self._cluster)
vms = []
root_res_pool = self._session._call_method(
vim_util, "get_dynamic_property", self._cluster,
'ClusterComputeResource', 'resourcePool')
if root_res_pool:
vms = self._session._call_method(
vim_util, 'get_inner_objects', root_res_pool, 'vm',
'VirtualMachine', properties)
lst_vm_names = self._get_valid_vms_from_retrieve_result(vms)
LOG.debug("Got total of %s instances", str(len(lst_vm_names)))
return lst_vm_names
def get_vnc_console(self, instance):
"""Return connection info for a vnc console using vCenter logic."""
# vCenter does not run virtual machines and does not run
# a VNC proxy. Instead, you need to tell OpenStack to talk
# directly to the ESX host running the VM you are attempting
# to connect to via VNC.
vnc_console = self._get_vnc_console_connection(instance)
host_name = vm_util.get_host_name_for_vm(
self._session,
instance)
vnc_console['host'] = host_name
# NOTE: VM can move hosts in some situations. Debug for admins.
LOG.debug("VM %(uuid)s is currently on host %(host_name)s",
{'uuid': instance.name, 'host_name': host_name},
instance=instance)
return ctype.ConsoleVNC(**vnc_console)
| {
"content_hash": "30c32b4358d05936ffee6ebb68f88ccd",
"timestamp": "",
"source": "github",
"line_count": 1652,
"max_line_length": 79,
"avg_line_length": 46.84866828087167,
"alnum_prop": 0.5445512572034008,
"repo_name": "vmthunder/nova",
"id": "cd5869e26aceea35f10cd2cfdda1bb8f7f04ff1d",
"size": "78146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/virt/vmwareapi/vmops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from redis_wrap import (
get_redis,
get_list,
get_hash,
get_set,
setup_redis,
get_key,
set_key,
get_queue,
get_dict,
ConnectionError,
ResponseError
)
from task import (
task_registry,
register,
push_task,
has_task,
pop_task,
pop_error,
push_runtime_error,
gen_task,
push_runtime_task
)
from model import *
from cron import add_cron, has_cron, remove_cron, start_cron
from async import async, enable_transaction, ping_task, prepare_task
| {
"content_hash": "83c6806b3d983f6437842c9b5e8fd437",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 68,
"avg_line_length": 19.71875,
"alnum_prop": 0.526148969889065,
"repo_name": "audoe/ztq",
"id": "ef7769d5167395eaa69df127569d1fdd0dc4b25a",
"size": "645",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ztq_core/ztq_core/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6911"
},
{
"name": "JavaScript",
"bytes": "367"
},
{
"name": "Python",
"bytes": "109781"
},
{
"name": "Visual Basic",
"bytes": "646"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
from orc import PACKAGE_NAME, VERSION
if __name__ == '__main__':
setup(
name=PACKAGE_NAME,
version=VERSION,
license='MIT',
packages=find_packages(),
install_requires=['tornado==3.1.1']
)
| {
"content_hash": "39c68c1625e0188d45194406a3b0c997",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 43,
"avg_line_length": 20.214285714285715,
"alnum_prop": 0.5901060070671378,
"repo_name": "eugeniy/orc",
"id": "c1a8414a0831ca90a68f846d19b8385f05fde89d",
"size": "283",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1205"
}
],
"symlink_target": ""
} |
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'RxPython'
copyright = '2013, Adrian Kündig'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'RxPythondoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'RxPython.tex', 'RxPython Documentation',
'Adrian Kündig', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'rxpython', 'RxPython Documentation',
['Adrian Kündig'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'RxPython', 'RxPython Documentation',
'Adrian Kündig', 'RxPython', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| {
"content_hash": "b9c5482cc584615a364dd17c337b93ee",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 80,
"avg_line_length": 32.16593886462882,
"alnum_prop": 0.7011946782514255,
"repo_name": "akuendig/RxPython",
"id": "1916ad94e02b1cdb8b76d00f17155f2787ca265c",
"size": "7812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "382753"
},
{
"name": "Shell",
"bytes": "5109"
}
],
"symlink_target": ""
} |
"""Create the asset."""
import common # fixes python import path
import argparse
def create_asset(target_dir):
"""Create the asset."""
raise NotImplementedError('Implement me!')
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--target_dir', '-t', required=True)
args = parser.parse_args()
create_asset(args.target_dir)
if __name__ == '__main__':
main()
| {
"content_hash": "39bee7c31382230639236a465e04b03c",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 58,
"avg_line_length": 19.7,
"alnum_prop": 0.6700507614213198,
"repo_name": "aosp-mirror/platform_external_skia",
"id": "708ec7b0aeee94dd658a5728b74541fc95a3e9f7",
"size": "553",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "infra/bots/assets/scripts/create.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "12716940"
},
{
"name": "Batchfile",
"bytes": "904"
},
{
"name": "C",
"bytes": "620774"
},
{
"name": "C#",
"bytes": "4683"
},
{
"name": "C++",
"bytes": "27394853"
},
{
"name": "GLSL",
"bytes": "67013"
},
{
"name": "Go",
"bytes": "80137"
},
{
"name": "HTML",
"bytes": "1002516"
},
{
"name": "Java",
"bytes": "32794"
},
{
"name": "JavaScript",
"bytes": "51666"
},
{
"name": "Lex",
"bytes": "4372"
},
{
"name": "Lua",
"bytes": "70974"
},
{
"name": "Makefile",
"bytes": "2295"
},
{
"name": "Objective-C",
"bytes": "35223"
},
{
"name": "Objective-C++",
"bytes": "34410"
},
{
"name": "PHP",
"bytes": "120845"
},
{
"name": "Python",
"bytes": "1002226"
},
{
"name": "Shell",
"bytes": "49974"
}
],
"symlink_target": ""
} |
"""
WSGI config for tastegood project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
sys.path.append('/data/www.taste-good.com')
os.environ['DJANGO_SETTINGS_MODULE'] = 'tastegood.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
| {
"content_hash": "41e660bcfa4720d67feb8e281c4492e8",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 79,
"avg_line_length": 38.130434782608695,
"alnum_prop": 0.7936145952109465,
"repo_name": "blueooh/storehome",
"id": "7849971b577b249084b092c46b092dc871d045e5",
"size": "877",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tastegood/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "56417"
},
{
"name": "JavaScript",
"bytes": "75416"
},
{
"name": "PHP",
"bytes": "1742"
},
{
"name": "Python",
"bytes": "19280"
},
{
"name": "Shell",
"bytes": "694"
}
],
"symlink_target": ""
} |
import json
from django.contrib import admin
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from .models import Container, ContainerImage, Mirror
from .models import ContainerBox, ContainerBoxContainers, ContainerRelated
from .forms import ContainerBoxContainersInlineForm
from opps.core.admin import PublishableAdmin, apply_opps_rules, BaseBoxAdmin
from opps.core.permissions.admin import AdminViewPermission
from opps.core.filters import ChannelListFilter, HasQuerySet
from opps.images.generate import image_url
from opps.fields.models import Field, FieldOption
@apply_opps_rules('containers')
class ContainerRelatedInline(admin.TabularInline):
model = ContainerRelated
fk_name = 'container'
raw_id_fields = ['related']
actions = None
ordering = ('order',)
extra = 1
classes = ('collapse',)
verbose_name = _(u'Related content')
verbose_name_plural = _(u'Related contents')
@apply_opps_rules('containers')
class ContainerImageInline(admin.TabularInline):
model = ContainerImage
fk_name = 'container'
raw_id_fields = ['image']
sortable_field_name = "order"
actions = None
extra = 0
verbose_name = _(u"Image")
verbose_name_plural = _(u"Images")
fieldsets = [(None, {'fields': ('image', 'image_thumb',
'order', 'caption')})]
ordering = ('order',)
readonly_fields = ['image_thumb']
def image_thumb(self, obj):
if obj.image:
return u'<img width="60px" height="60px" src="{0}" />'.format(
image_url(obj.image.archive.url, width=60, height=60))
return _(u'No Image')
image_thumb.short_description = _(u'Thumbnail')
image_thumb.allow_tags = True
@apply_opps_rules('containers')
class ContainerBoxContainersInline(admin.StackedInline):
model = ContainerBoxContainers
form = ContainerBoxContainersInlineForm
fk_name = 'containerbox'
raw_id_fields = ['container', 'main_image']
sortable_field_name = "order"
actions = None
ordering = ('order',)
extra = 0
fieldsets = [(None, {
'fields': ('container', 'aggregate', 'highlight', 'order',
'date_available', 'date_end', 'hat', 'title',
'main_image', 'main_image_caption', 'url', 'url_target')})]
@apply_opps_rules('containers')
class ContainerAdmin(PublishableAdmin, AdminViewPermission):
inlines = [ContainerRelatedInline]
prepopulated_fields = {"slug": ["title"]}
readonly_fields = ['get_http_absolute_url', 'short_url',
'in_containerboxes', 'image_thumb']
raw_id_fields = ['main_image', 'channel', 'mirror_channel']
ordering = ('-date_available',)
autocomplete_lookup_fields = {
'fk': ['channel'],
}
def get_list_filter(self, request):
list_filter = super(ContainerAdmin, self).list_filter
list_filter = [ChannelListFilter] + list(list_filter)
return list_filter
def save_model(self, request, obj, form, change):
super(ContainerAdmin, self).save_model(request, obj, form, change)
_json = {}
for field in Field.objects.filter(
application__contains=obj.__class__.__name__):
if field.type == 'checkbox':
for fo in FieldOption.objects.filter(field=field):
key = "{0}_{1}".format(field.slug, fo.option.slug)
_json[key] = request.POST.get('json_{0}'.format(key), '')
else:
_json[field.slug] = request.POST.get(
'json_{0}'.format(field.slug), '')
obj.json = json.dumps(_json)
obj.save()
@apply_opps_rules('containers')
class ContainerBoxAdmin(BaseBoxAdmin, AdminViewPermission):
inlines = [ContainerBoxContainersInline]
raw_id_fields = ['channel', 'queryset', 'main_image']
list_display = ['name', 'site', 'channel_name', 'date_available',
'published']
save_as = True
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'name', 'slug', 'title', 'title_url',
'main_image', 'main_image_caption')}),
(_(u'Relationships'), {
'fields': ('channel', 'queryset')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('content_group', 'published', 'date_available')}),
)
autocomplete_lookup_fields = {
'fk': ['channel'],
}
def clean_ended_entries(self, request, queryset):
now = timezone.now()
for box in queryset:
ended = box.containerboxcontainers_containerboxes.filter(
date_end__lt=now
)
if ended:
ended.delete()
clean_ended_entries.short_description = _(u'Clean ended containers')
def get_list_display(self, request):
list_display = getattr(self, 'list_display', [])
if request.user.is_superuser:
return list_display + ['is_dynamic']
return list_display
def get_list_filter(self, request):
list_filter = super(ContainerBoxAdmin, self).list_filter
if request.user.is_superuser:
list_filter = [HasQuerySet] + list_filter
return list_filter
def is_dynamic(self, obj):
if obj.queryset:
return True
else:
return False
is_dynamic.short_description = _(u'Dynamic')
is_dynamic.boolean = True
actions = ('clean_ended_entries',)
class HideContainerAdmin(PublishableAdmin, AdminViewPermission):
list_display = ['image_thumb', 'get_child_class', 'title', 'channel_name',
'date_available', 'published']
list_display_links = ['image_thumb', 'title']
readonly_fields = ['image_thumb']
def get_child_class(self, obj):
return _(obj.child_class)
get_child_class.short_description = _(u'Child class')
get_child_class.admin_order_field = 'child_class'
def get_model_perms(self, *args, **kwargs):
return {}
def has_add_permission(self, request):
return False
def change_view(self, request, object_id, form_url='', extra_context=None):
"""
Redirects to specific child_class admin change form.
"""
obj = self.queryset(request).get(pk=object_id)
return redirect(reverse('admin:{}_{}_change'.format(
obj._meta.app_label, obj._meta.module_name), args=(obj.pk,)))
def get_list_filter(self, request):
list_filter = super(HideContainerAdmin, self).list_filter
list_filter = [ChannelListFilter] + list(list_filter)
return list_filter
def queryset(self, request):
qs = super(HideContainerAdmin, self).queryset(request)
# TODO: Document this
blacklist = getattr(settings, 'OPPS_CONTAINERS_BLACKLIST', [])
if blacklist:
qs = qs.exclude(child_class__in=blacklist)
return qs.select_related('main_image')
admin.site.register(Container, HideContainerAdmin)
admin.site.register(ContainerBox, ContainerBoxAdmin)
admin.site.register(Mirror, HideContainerAdmin)
| {
"content_hash": "0e6dd403ec2a5b006938fc4d73c65447",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 79,
"avg_line_length": 35.10144927536232,
"alnum_prop": 0.6241398293421415,
"repo_name": "jeanmask/opps",
"id": "9e74cc308cd19a6ded5ac663309e8b6e944d1e93",
"size": "7312",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opps/containers/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13004"
},
{
"name": "HTML",
"bytes": "56903"
},
{
"name": "JavaScript",
"bytes": "62514"
},
{
"name": "Makefile",
"bytes": "848"
},
{
"name": "Python",
"bytes": "1207954"
},
{
"name": "Shell",
"bytes": "661"
}
],
"symlink_target": ""
} |
"""
Clone an undirected graph. Each node in the graph contains a label and a list of its neighbors.
OJ's undirected graph serialization:
Nodes are labeled uniquely.
We use # as a separator for each node, and , as a separator for node label and each neighbor of the node.
As an example, consider the serialized graph {0,1,2#1,2#2,2}.
The graph has a total of three nodes, and therefore contains three parts as separated by #.
First node is labeled as 0. Connect node 0 to both nodes 1 and 2.
Second node is labeled as 1. Connect node 1 to node 2.
Third node is labeled as 2. Connect node 2 to node 2 (itself), thus forming a self-cycle.
Visually, the graph looks like the following:
1
/ \
/ \
0 --- 2
/ \
\_/
"""
# Definition for a undirected graph node
# class UndirectedGraphNode:
# def __init__(self, x):
# self.label = x
# self.neighbors = []
class Solution:
# @param node, a undirected graph node
# @return a undirected graph node
def cloneGraph(self, node):
if node is None:
return None
soln = UndirectedGraphNode(node.label)
nodes = {node.label : soln}
# DFS
stack = [node]
# BFS
#stack = collections.deque([node])
while stack:
# DFS
topnode = stack.pop()
# BFS
#topnode = stack.popleft()
currnode = nodes[topnode.label]
for neighbor in topnode.neighbors:
if neighbor.label not in nodes:
nodes[neighbor.label] = UndirectedGraphNode(neighbor.label)
stack.append(neighbor)
currnode.neighbors.append(nodes[neighbor.label])
return soln
"""
# Recursive dfs:
class Solution:
# @param node, a undirected graph node
# @return a undirected graph node
def __init__(self):
self.cloned = {}
def cloneGraph(self, node):
if not node:
return None
else:
return self.dfs(node)
def dfs(self, node):
if not node:
return None
new_node = UndirectedGraphNode(node.label)
self.cloned[node.label] = new_node
for neighbor in node.neighbors:
nb_label = neighbor.label
if nb_label in self.cloned:
new_node.neighbors.append(self.cloned[nb_label])
else:
new_node.neighbors.append(self.dfs(neighbor))
return new_node
"""
| {
"content_hash": "c74f1c1a5f465db451a0610dc876f663",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 109,
"avg_line_length": 30.53012048192771,
"alnum_prop": 0.5840568271507498,
"repo_name": "ufjfeng/leetcode-jf-soln",
"id": "439de7fe8a3a225963bdeb1af872ed0216438711",
"size": "2534",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/133_clone_graph.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "426793"
},
{
"name": "SQLPL",
"bytes": "738"
},
{
"name": "Shell",
"bytes": "1518"
}
],
"symlink_target": ""
} |
import logging
import sys
from cliff.app import App
from cliff.commandmanager import CommandManager
class DemoApp(App):
log = logging.getLogger(__name__)
def __init__(self):
super(DemoApp, self).__init__(
description='cliff demo app',
version='0.1',
command_manager=CommandManager('cliff.demo'),
)
def initialize_app(self, argv):
self.log.debug('initialize_app')
def prepare_to_run_command(self, cmd):
self.log.debug('prepare_to_run_command %s', cmd.__class__.__name__)
def clean_up(self, cmd, result, err):
self.log.debug('clean_up %s', cmd.__class__.__name__)
if err:
self.log.debug('got an error: %s', err)
def main(argv=sys.argv[1:]):
myapp = DemoApp()
return myapp.run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| {
"content_hash": "23e6461843c7bccce6111dbf4058e3e8",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 75,
"avg_line_length": 23.783783783783782,
"alnum_prop": 0.5829545454545455,
"repo_name": "dreamhost/cliff-tablib",
"id": "85807c8e24734582062fb1f02cc2edea9f16ab7c",
"size": "880",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "demoapp/cliffdemo/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "34824"
}
],
"symlink_target": ""
} |
"""
@summary:
Log to stdout.
@author: Ruben Reifenberg
"""
import sys
from rrlog import Log
from rrlog import server
from rrlog.server import textwriter
print_ = sys.stdout.write # tests replace print_
def createServer(writer=None, tsFormat=None, filters=None, observers=None, writeMsgid=False, format_line=None):
"""
:param tsFormat: timestamp format. See L{rrlog.server.LogServer.__init__}
:param filters: default = () See L{rrlog.server.LogServer.__init__}
:param observers: default = () See L{rrlog.server.LogServer.__init__}
:param format_line: See L{rrlog.server.textwriter.TextlineLogWriter.__init__}
"""
if writer is None:
writer = LOGWRITER_CLASS(
writeMsgid=writeMsgid,
format_line=format_line,
)
return server.LogServer(
writer = writer,
filters = filters,
observers = observers,
tsFormat = tsFormat,
)
def createLocalLog(
writer=None,
filters=None,
observers=None,
traceOffset=0,
tsFormat=None,
writeMsgid=False,
stackMax=1,
catsEnable=None,
catsDisable=None,
seFilesExclude=None,
format_line=None,
name=None,
extractStack=True,
):
"""
:param catsEnable: see L{rrlog.Log.__init__}
:param catsDisable: see L{rrlog.Log.__init__}
:param seFilesExclude: see L{rrlog.Log.__init__}
:param filters: see L{rrlog.server.LogServer.__init__}
:param observers: see L{rrlog.server.LogServer.__init__}
:param tsFormat: timestamp format. See L{rrlog.server.LogServer.__init__}
Here, the default is None (shows no time stamps)
:param stackMax: see L{rrlog.Log.__init__}, default: 5 (==log 5 stack levels.)
:param format_line: See L{rrlog.server.textwriter.TextlineLogWriter.__init__}
:param name: The log can be identified by its optional name attribute (__repr__ method of the log will use it.)
:param extractStack: see L{rrlog.Log.__init__}
:returns: a Log ready to use
"""
if writer is None:
writer = LOGWRITER_CLASS(
writeMsgid=writeMsgid,
format_line=format_line,
)
return Log(
server = createServer(
writer=writer,
tsFormat=tsFormat,
filters=filters,
observers=observers,
),
traceOffset=traceOffset,
stackMax=stackMax,
catsEnable=catsEnable,
catsDisable=catsDisable,
seFilesExclude=seFilesExclude,
name=name,
extractStack=extractStack,
)
class PrintLogWriter(textwriter.TextlineLogWriter):
"""
"""
def __init__(self, writeMsgid=False, format_line=None):
textwriter.TextlineLogWriter.__init__(self, format_line)
self.writeMsgid = writeMsgid
def _format_line(self, job):
"""
Default formatting method.
:rtype: str
:returns: single log line
"""
def pre():
if self.writeMsgid: return "[%s]"%(job.msgid)
else: return ""
return "%s%s %s %s %s\n"%(
pre(),
job.msg,
self.cfn_cln(job),
job.pathStr(1),
job.ts,
)
def close(self):
pass
def writeNow(self, job):
"""
Write without buffering, return when written
"""
print_(
self._format_line[0](job,lineCount="")
) #todo:lineCount="" is a hack (for empty output without linecount)
LOGWRITER_CLASS = PrintLogWriter
| {
"content_hash": "138f43871907b144edaa60756963689a",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 112,
"avg_line_length": 23.045112781954888,
"alnum_prop": 0.6933115823817292,
"repo_name": "shful/python-rrlog",
"id": "45036cf8573873392fe1339dd417e53df902d281",
"size": "4165",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rrlog/server/printwriter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "162448"
}
],
"symlink_target": ""
} |
from flask import Flask, Response
import json
from config import *
import sys
if sys.version_info[0] < 3:
import MySQLdb
else:
import pymysql as MySQLdb
app = Flask(__name__)
@app.route("/search/<keyword>/")
@app.route("/search/<keyword>/<encoding>")
def search(keyword=None, encoding=None):
if keyword == None:
return "Pass a keyword"
db = MySQLdb.connect(host=DB_HOST, user=DB_USER, passwd=DB_PASSWORD, db=DB_SCHEME)
cursor = db.cursor()
cursor.execute("SELECT name, quotation FROM quote WHERE quotation LIKE %s", ("%"+keyword+"%"))
rows = cursor.fetchall()
return format(rows, encoding)
@app.route("/quotes/<nick>/")
@app.route("/quotes/<nick>/<encoding>")
def lookupquote(nick=None, encoding=None):
if nick == None:
return "Pass a nick"
db = MySQLdb.connect(host=DB_HOST, user=DB_USER, passwd=DB_PASSWORD, db=DB_SCHEME)
cursor = db.cursor()
cursor.execute("SELECT name, quotation FROM quote WHERE name = %s", [nick])
rows = cursor.fetchall()
return format(rows, encoding)
def format(rows, encoding):
mimetype = "text/plain" if encoding == None else "application/json"
if len(rows) == 0:
resp = "No quotes found\n" if encoding == None else """{"error": "No quotes found"}"""
return Response(response=resp, mimetype=mimetype)
if encoding == None:
#Return it as plaintext
results = "\n".join(["<%s> %s" % (x[0], decode(x[1])) for x in rows]) + "\n"
return Response(response=results, mimetype=mimetype)
elif encoding == "json":
#Return it as json
results = [{"nick": x[0], "quote": decode(x[1])} for x in rows]
return Response(response=json.dumps(results), mimetype=mimetype)
def decode(data):
""" Decode a latin-1 string and turn it into utf-8 """
return data.encode("latin-1").decode("utf-8", errors="replace")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=True)
else:
application = app
| {
"content_hash": "c5b972265a5bf328c97c755a80c2a037",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 95,
"avg_line_length": 30.112903225806452,
"alnum_prop": 0.6797000535618639,
"repo_name": "Nyubis/bhottu-api",
"id": "38b3b6cd61dba3fc9330bec1a8e270fd586e54dc",
"size": "1886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1922"
}
],
"symlink_target": ""
} |
from tests.helper.voctomix_test import VoctomixTest
from lib.videomix import VideoMix
class VideomixerSetSources(VoctomixTest):
def setUp(self):
super().setUp()
self.videomixer = VideoMix()
def test_can_set_source_a(self):
self.videomixer.setVideoSourceA(42)
self.assertEqual(self.videomixer.sourceA, 42)
def test_can_set_source_b(self):
self.videomixer.setVideoSourceB(23)
self.assertEqual(self.videomixer.sourceB, 23)
def test_setting_source_a_swaps_a_and_b_if_required(self):
self.videomixer.sourceA = 42
self.videomixer.sourceB = 23
self.videomixer.setVideoSourceA(23)
self.assertEqual(self.videomixer.sourceA, 23)
self.assertEqual(self.videomixer.sourceB, 42)
def test_setting_source_b_swaps_a_and_b_if_required(self):
self.videomixer.sourceA = 13
self.videomixer.sourceB = 78
self.videomixer.setVideoSourceB(13)
self.assertEqual(self.videomixer.sourceA, 78)
self.assertEqual(self.videomixer.sourceB, 13)
| {
"content_hash": "144c1f7c889165349d10fe8601e7e27c",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 62,
"avg_line_length": 31.41176470588235,
"alnum_prop": 0.6900749063670412,
"repo_name": "h01ger/voctomix",
"id": "7b396bf244ad62f9aae353d813e6665326df3d8c",
"size": "1068",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "voctocore/tests/videomix/test_videomixer_set_sources.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "181"
},
{
"name": "Python",
"bytes": "213049"
},
{
"name": "Shell",
"bytes": "21902"
}
],
"symlink_target": ""
} |
from SystemPanic.Core.draw_util import draw_text
def draw_title_screen(game_surface, game_state):
# Add the background
game_surface.blit(
game_state["active_config"]["background"],
[0, 0]
)
draw_text(game_surface, "SYSTEM PANIC!", (160, 120), game_state["garbled"])
draw_text(game_surface, "PRESS FIRE TO START", (160, 130), game_state["garbled"])
def advance_title_screen(paks, game_state, time_since_start, delta_t):
from SystemPanic.Core.game_state import next_level, change_mode, GAME_MODES
# We want to ensure that the player has released the fire button before advancing to the title screen,
# since it's likely that they'll die with the fire button pressed.
if game_state["pressed_buttons"]["fire"] is False:
game_state["mode_specific"]["fire_released"] = True
elif game_state["mode_specific"].get("fire_released") is True:
game_state = next_level(game_state)
return game_state
| {
"content_hash": "9e394d08f5ef12cd897bb09b033f2168",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 106,
"avg_line_length": 38.72,
"alnum_prop": 0.6838842975206612,
"repo_name": "xaroth8088/SystemPanic",
"id": "687b7ebbc7cf600dfd27f57c98e7741266f69e24",
"size": "968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SystemPanic/Core/Screens/title.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "245"
},
{
"name": "Python",
"bytes": "147122"
}
],
"symlink_target": ""
} |
"""Usage:
md2pdf <filepath> [-s <stylesheet>] [-o <output>]
md2pdf [-h|-v]
Options:
-h --help show help
-v --version show version
-o <output> output pdf/html path
-s <stylesheet> stylesheet path"""
import sys
from docopt import docopt
from . import __version__
from .generator import generator
def main():
args = docopt(__doc__, version=__version__)
filepath = args['<filepath>']
stylesheet = args['-s']
output = args['-o']
if not filepath:
sys.exit(__doc__)
try:
results = generator.generate(
filepath, stylesheet=stylesheet, output=output)
except Exception, e:
sys.exit(e)
message = 'output to %s (%.2fs)\n' % results
sys.stdout.write(message)
sys.stdout.flush()
sys.exit(0)
if __name__ == '__main__':
main()
| {
"content_hash": "fee5aceba93fc1aa38685ecb1f5eae5c",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 59,
"avg_line_length": 20.463414634146343,
"alnum_prop": 0.5804529201430274,
"repo_name": "hit9/md2pdf",
"id": "a5b774c048eee9d9e20fa3b4107484b6247aae02",
"size": "854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "md2pdf/cli.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "207"
},
{
"name": "Python",
"bytes": "7033"
}
],
"symlink_target": ""
} |
import requests as req, zipfile, io, markdown2 as md, sqlite3, os, shutil, tarfile
html_tmpl = """<html><head><link rel="stylesheet" type="text/css" href="../style.css"/></head><body><section id="tldr"><div id="page">%content%</div></section></body></html>"""
doc_source = "https://github.com/tldr-pages/tldr/archive/master.zip"
docset_path = "tldrpages.docset"
doc_path_contents = docset_path + "/Contents/"
doc_path_resources = docset_path + "/Contents/Resources/"
doc_path = docset_path + "/Contents/Resources/Documents/"
doc_pref = "tldr-master/pages"
if os.path.exists(doc_path):
try: shutil.rmtree(doc_path)
except OSError as e:
print("Could not delete dirs " + e.strerror)
raise SystemExit
os.makedirs(doc_path)
try: r = req.get(doc_source)
except req.exceptions.ConnectionError:
print("Could not load tldr-pages from " + doc_source)
raise SystemExit
if r.status_code != 200:
print("Could not load tldr-pages.")
raise SystemExit
db = sqlite3.connect(doc_path_resources+"/"+"docSet.dsidx")
cur = db.cursor()
try: cur.execute('DROP TABLE searchIndex;')
except: pass
cur.execute('CREATE TABLE searchIndex(id INTEGER PRIMARY KEY, name TEXT, type TEXT, path TEXT);')
cur.execute('CREATE UNIQUE INDEX anchor ON searchIndex (name, type, path);')
# Generate tldr pages to HTML documents
markdowner = md.Markdown()
with zipfile.ZipFile(io.BytesIO(r.content), "r") as archive:
for path in archive.namelist():
if path.startswith(doc_pref) and path.endswith(".md"):
cmd_name = path[path.rfind("/")+1:-3]
sub_dir = path[len(doc_pref)+1:path.rfind("/")]
sub_path = os.path.join(doc_path, sub_dir)
if not os.path.exists(sub_path):
try: os.mkdir(sub_path)
except OSError as e:
print("Could not create dir " + e.strerror)
raise SystemExit
if sub_dir != "common":
cur.execute('INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES (?,?,?)', (sub_dir + " " + cmd_name, 'Command', sub_dir+'/'+cmd_name+".html"))
else:
cur.execute('INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES (?,?,?)', (cmd_name, 'Command', sub_dir+'/'+cmd_name+".html"))
doc = markdowner.convert(archive.read(path))
doc = html_tmpl.replace("%content%", doc)
with open(os.path.join(doc_path, path[len(doc_pref)+1:].replace(".md", ".html")), "wb") as html:
html.write(doc.encode("utf-8"))
db.commit()
db.close()
# Generate tldr pages index.html
with open(os.path.join(doc_path, "index.html"), "w+") as html:
html.write('<html><head></head><body><h1>TLDR pages Docset</h1><br/>powered by <a href="http://tldr-pages.github.io">tldr-pages.github.io/</a>')
for dir in os.listdir(doc_path):
if os.path.isdir(os.path.join(doc_path, dir)):
html.write("<h2>%s</h2><ul>" % dir)
html.writelines(['<li><a href="%s/%s">%s</a></li>' % (dir, f, f[:-5]) for f in os.listdir(os.path.join(doc_path, dir))])
html.write("</ul>")
html.write('</body></html>')
# copy static content
shutil.copyfile("static/style.css", doc_path+"/style.css")
shutil.copyfile("static/Info.plist", doc_path_contents+"/Info.plist")
shutil.copyfile("static/icon.png", docset_path+"/icon.png")
shutil.copyfile("static/icon@2x.png", docset_path+"/icon@2x.png")
# create tgz
with tarfile.open("tldr_pages.tgz", "w") as docset:
docset.add(docset_path)
| {
"content_hash": "ce924f0d6bf869e7a78b60b7ba350a29",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 176,
"avg_line_length": 44.03703703703704,
"alnum_prop": 0.6232127838519764,
"repo_name": "Moddus/tldr-python-dash-docset",
"id": "d4076ad3335d8ed809363891161fe8b77ae99311",
"size": "3591",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "generator.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1850"
},
{
"name": "Python",
"bytes": "3591"
}
],
"symlink_target": ""
} |
TAB = '\t'
# regexp = "((ac|bc)*ad+)#"
# regexp = "((a(b|c))*c)#"
# regexp = "(1+|1*01(11|01)+)#"
# regexp = "((a|b|c|d|e|f|g|h|i|j|k|l|m|n|o|p|q|r|s|t|u|v|w|x|y|z)" \
# "(a|b|c|d|e|f|g|h|i|j|k|l|m|n|o|p|q|r|s|t|u|v|w|x|y|z|0|1|2|3|4|5|6|7|8|9)*)#"
# regexp = "(a*b|b*(ab+|bc))#"
# regexp = "((1?1?0)*1?)#" # Лаба 1, задание 2
# regexp = "(2\\*2\\=1\\+1\\+1\\+1)#"
# regexp = "(\".+\")#"
regexp = "(n..b)#"
current = 0
curposnum = 1
hashposnum = 0
followpostable = {}
anykeytable = set()
dfa = {}
class Node:
def __init__(self, symbol, scr=None, ak=None):
if isinstance(self, BinaryNode):
self.lchild = None
self.rchild = None
elif isinstance(self, UnaryNode):
self.child = None
elif isinstance(self, Leaf):
pass
self.screened = scr
self.anykey = ak
self.data = symbol
self.position = None
self.n = False
self.f = set()
self.l = set()
def setp(self):
if self.screened or\
(self.data != "|" and self.data != "." and self.data != "*" and self.data != "+" and self.data != "?"):
global curposnum, followpostable, anykeytable
self.position = curposnum
followpostable[curposnum] = [self.data, set()]
if self.anykey:
assert isinstance(anykeytable, set)
anykeytable.add(self.position)
curposnum += 1
def nullable(self):
if self.data == "|" and not self.screened:
self.n = self.lchild.n or self.rchild.n
elif self.data == "." and not self.screened:
self.n = self.lchild.n and self.rchild.n
elif (self.data == "*" or self.data == "?") and not self.screened:
self.n = True
elif self.data == "+" and not self.screened:
self.n = False
else:
self.n = False
def firstpos(self):
if self.data == "|" and not self.screened:
self.f.update(self.lchild.f, self.rchild.f)
elif self.data == "." and not self.screened:
if self.lchild.n:
self.f.update(self.lchild.f, self.rchild.f)
else:
self.f.update(self.lchild.f)
elif (self.data == "*" or self.data == "+" or self.data == "?") and not self.screened:
self.f.update(self.child.f)
else:
self.f = {self.position}
def lastpos(self):
if self.data == "|" and not self.screened:
self.l.update(self.lchild.l | self.rchild.l)
elif self.data == "." and not self.screened:
if self.rchild.n:
self.l.update(self.lchild.l | self.rchild.l)
else:
self.l.update(self.rchild.l)
elif (self.data == "*" or self.data == "+" or self.data == "?") and not self.screened:
self.l.update(self.child.l)
else:
self.l = {self.position}
def followpos(self):
global followpostable
if self.getdata() == '.' and not self.screened:
for one in self.lchild.l:
assert isinstance(followpostable, dict)
followpostable[one][1].update(self.rchild.f)
elif (self.getdata() == '*' or self.getdata() == '+') and not self.screened:
for one in self.child.l:
assert isinstance(followpostable, dict)
followpostable[one][1].update(self.child.f)
def getdata(self):
return self.data
class UnaryNode(Node):
def setc(self, obj):
self.child = obj
def printtree(self, h):
print TAB * h + self.getdata()
if self.child:
self.child.printtree(h + 1)
def postorderwalk_n(self):
if self:
self.child.postorderwalk_n()
self.nullable()
def postorderwalk_f(self):
if self:
self.child.postorderwalk_f()
self.firstpos()
def postorderwalk_l(self):
if self:
self.child.postorderwalk_l()
self.lastpos()
def postorderwalk_p(self):
if self:
self.child.postorderwalk_p()
self.setp()
def postorderwalk_w(self):
if self:
self.child.postorderwalk_w()
self.followpos()
class Leaf(Node):
def printtree(self, h):
print TAB * h + self.getdata()
def postorderwalk_n(self):
if self:
self.nullable()
def postorderwalk_f(self):
if self:
self.firstpos()
def postorderwalk_l(self):
if self:
self.lastpos()
def postorderwalk_p(self):
if self:
self.setp()
def postorderwalk_w(self):
if self:
self.followpos()
class BinaryNode(Node):
def setl(self, obj):
self.lchild = obj
def setr(self, obj):
self.rchild = obj
def printtree(self, h):
if self.rchild:
self.rchild.printtree(h + 1)
print TAB * h + self.getdata()
if self.lchild:
self.lchild.printtree(h + 1)
def postorderwalk_n(self):
if self:
self.lchild.postorderwalk_n()
self.rchild.postorderwalk_n()
self.nullable()
def postorderwalk_f(self):
if self:
self.lchild.postorderwalk_f()
self.rchild.postorderwalk_f()
self.firstpos()
def postorderwalk_l(self):
if self:
self.lchild.postorderwalk_l()
self.rchild.postorderwalk_l()
self.lastpos()
def postorderwalk_p(self):
if self:
self.lchild.postorderwalk_p()
self.rchild.postorderwalk_p()
self.setp()
def postorderwalk_w(self):
if self:
self.lchild.postorderwalk_w()
self.rchild.postorderwalk_w()
self.followpos()
def createunode(symbol, node):
nd = UnaryNode(symbol)
nd.setc(node)
return nd
def createleaf(symbol, scrnd, ak):
nd = Leaf(symbol, scrnd, ak)
return nd
def createbinode(symbol, leftnode, rightnode):
nd = BinaryNode(symbol)
nd.setl(leftnode)
nd.setr(rightnode)
return nd
def makeor(terms):
result = makeand(terms[0])
for one in range(1, len(terms)):
result = createbinode('|', result, makeand(terms[one]))
return result
def makeand(term):
result = term[0]
for one in range(1, len(term)):
result = createbinode('.', result, term[one])
return result
def parse():
global regexp, current
terms = []
term = []
while current < len(regexp) and regexp[current] != ')':
if regexp[current] == '(':
current += 1
term.append(parse())
elif regexp[current] == '|':
terms.append(term)
term = []
elif regexp[current] == '*' or regexp[current] == '+' or regexp[current] == '?':
term.append(createunode(regexp[current], term.pop()))
elif regexp[current] == '\\':
current += 1
term.append(createleaf(regexp[current], True, False))
elif regexp[current] == '.':
term.append(createleaf("<@>", False, True))
else:
term.append(createleaf(regexp[current], False, False))
current += 1
terms.append(term)
return makeor(terms)
def calc(root):
root.postorderwalk_p()
global curposnum, hashposnum
hashposnum = curposnum - 1
root.postorderwalk_n()
root.postorderwalk_f()
root.postorderwalk_l()
root.postorderwalk_w()
def dfabuild(posset):
global dfa, followpostable, hashposnum
if posset not in dfa:
dfa[posset] = dict()
for one in posset:
symbol = followpostable[one][0]
if symbol != '#':
symstate = set().union(followpostable[one][1])
if symbol not in dfa[posset] and symbol != '<@>':
dfa[posset][symbol] = symstate
elif symbol == '<@>':
temp = anykeytable & posset
if any(temp):
dfa[posset][1] = symstate
else:
dfa[posset][symbol].update(symstate)
if hashposnum in posset:
dfa[posset][0] = True
else:
dfa[posset][0] = False
for one in dfa[posset]:
if one != 0:
if dfa[posset][one]:
temp = frozenset().union(dfa[posset][one])
if temp not in dfa:
dfabuild(temp)
def dfareader(stt, state, word):
assert isinstance(word, str)
for symbol in word:
if symbol in stt[state]:
state = frozenset().union(stt[state][symbol])
elif 1 in stt[state] and stt[state][1]:
state = frozenset().union(stt[state][1])
else:
print "Invalid word/expression!"
return
if stt[state][0]:
print "The word fits."
return
else:
print "Last terminal isn't finite!"
return
def unittest():
test = parse()
calc(test)
test.printtree(0)
print "Hash is at position ", hashposnum
print followpostable
global dfa
ffs = frozenset().union(test.f)
dfabuild(ffs)
print "========================"
print dfa.keys()
for one in dfa:
print "========================"
print dfa[one]
print "========================"
print
w = input("Enter a word: ")
dfareader(dfa, ffs, w)
unittest()
| {
"content_hash": "a17c84b389a20307afc466601305cab6",
"timestamp": "",
"source": "github",
"line_count": 344,
"max_line_length": 119,
"avg_line_length": 27.59011627906977,
"alnum_prop": 0.5280792329575387,
"repo_name": "Katsutami7moto/study",
"id": "d8e670ab54c190a07247d60efc993d5468376027",
"size": "9941",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dfa_regexp/regexp_dfa_parsing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "24544"
},
{
"name": "Python",
"bytes": "74579"
},
{
"name": "QMake",
"bytes": "1064"
}
],
"symlink_target": ""
} |
from contextlib import contextmanager
from datetime import datetime, timedelta
from fewerror.state import State
class Now:
def __init__(self):
self.now = datetime.now()
def advance(self, td):
self.now += td
def __call__(self):
return self.now
@contextmanager
def roundtripped_state(tmpdir, per_word_timeout_seconds=-1):
d = str(tmpdir)
now = Now()
def load():
return State.load(
"test", d,
per_word_timeout_seconds=per_word_timeout_seconds,
now=now)
s = load()
yield s, now
s2 = load()
assert s == s2
def test_str(tmpdir):
with roundtripped_state(tmpdir) as (s, now):
assert ' 0 ' in str(s)
def test_reply_once(tmpdir):
with roundtripped_state(tmpdir) as (s, now):
assert s.can_reply(123, ['blood'])
assert s.can_reply(123, ['blood'])
s.record_reply(123, ['blood'], 124)
assert not s.can_reply(123, ['blood'])
# It shouldn't matter what the word is, we don't reply to the same tweet twice.
assert not s.can_reply(123, ['annoying'])
# But rate-limiting is disabled, so reply immediately to the same word in any other toot
assert s.can_reply(456, ['blood'])
assert s.can_reply(456, ['annoying'])
def test_word_rate_limit(tmpdir):
with roundtripped_state(tmpdir, per_word_timeout_seconds=30) as (s, now):
assert s.can_reply(123, ['blood'])
s.record_reply(123, ['blood'], 124)
assert not s.can_reply(123, ['blood'])
# Reply to new tweets, but not about blood
assert not s.can_reply(456, ['blood'])
assert not s.can_reply(456, ['blood', 'annoying'])
assert not s.can_reply(456, ['annoying', 'blood'])
assert s.can_reply(789, ['annoying'])
now.advance(timedelta(seconds=31))
assert s.can_reply(456, ['blood'])
assert s.can_reply(789, ['annoying'])
| {
"content_hash": "1c304c38985b9f0aaccf41be9ec43a3f",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 96,
"avg_line_length": 27.47887323943662,
"alnum_prop": 0.5981547924141466,
"repo_name": "wjt/fewerror",
"id": "3d95fd1afe8ebec66747e3311294000c9ab5a4ee",
"size": "1973",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_state.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "162"
},
{
"name": "Python",
"bytes": "63927"
},
{
"name": "Shell",
"bytes": "403"
}
],
"symlink_target": ""
} |
from flask import make_response, jsonify, abort, current_app
from werkzeug.http import HTTP_STATUS_CODES
import sys
import os
import traceback
def _make_json_response(response, code=200):
''' Helper for making API returns consistent '''
# if response type is not defined, use default HTTP status name
if code is not 200 and not response['errors']['type']:
response['errors']['type'] = HTTP_STATUS_CODES[code]
return make_response(jsonify(response), code)
def make_success_resp(msg=None):
response = {
'success': True,
'message': msg or ''
}
return _make_json_response(response)
def make_data_resp(data, msg=None):
response = {
'success': True,
'data': data,
'message': msg or ''
}
return _make_json_response(response)
def make_error_resp(msg, type=None, code=400):
response = {
'errors': {
'message': msg or "Something is wrong!",
'type': type,
'more info': ''
}
}
return _make_json_response(response, code)
def make_form_error_resp(form, msg=None):
type = 'Form validation error'
if not msg:
msg = form.errors
return make_error_resp(msg=msg, type=type, code=422)
def make_exception_resp(exception, type=None, code=500):
# NOTE: Will probably not want to display excpetion to users in production
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
# include file name, line number and stacktrace
msg = "Exception: %s: %s: %s %s" % (exc_type, fname, exc_tb.tb_lineno,
traceback.format_exc())
if (current_app.config['DEBUG']):
return make_error_resp(msg=msg, type=type, code=422)
else:
current_app.logger.critical('Exception caught: %s' % msg)
return make_error_resp(msg="Internal Server Error. Report this problem!",
type=type, code=422)
| {
"content_hash": "8420e177160deb1e2657563aeee0220a",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 81,
"avg_line_length": 31.21875,
"alnum_prop": 0.6141141141141141,
"repo_name": "asnorkin/sentiment_analysis",
"id": "204c467547abf51224c5ccf071e96b4848c8f8a7",
"size": "1998",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/common/response.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "157128"
},
{
"name": "C++",
"bytes": "171536"
},
{
"name": "CSS",
"bytes": "6270"
},
{
"name": "Fortran",
"bytes": "14107"
},
{
"name": "HTML",
"bytes": "20857"
},
{
"name": "JavaScript",
"bytes": "14146"
},
{
"name": "MATLAB",
"bytes": "4346"
},
{
"name": "Makefile",
"bytes": "214"
},
{
"name": "Python",
"bytes": "30022411"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
"""
The task plugin.
"""
import cli.http as http
from cli.exceptions import CLIException
from cli.plugins import PluginBase
from cli.util import Table
PLUGIN_NAME = "task"
PLUGIN_CLASS = "Task"
VERSION = "Mesos CLI Task Plugin"
SHORT_HELP = "Interacts with the tasks running in a Mesos cluster"
class Task(PluginBase):
"""
The task plugin.
"""
COMMANDS = {
"list": {
"arguments": [],
"flags": {},
"short_help": "List all active tasks in a Mesos cluster",
"long_help": "List all active tasks in a Mesos cluster"
}
}
def list(self, argv):
"""
List the tasks running in a cluster by checking the /tasks endpoint.
"""
# pylint: disable=unused-argument
try:
master = self.config.master()
except Exception as exception:
raise CLIException("Unable to get leading master address: {error}"
.format(error=exception))
try:
tasks = http.get_json(master, "tasks")["tasks"]
except Exception as exception:
raise CLIException("Could not open '/tasks'"
" endpoint at '{addr}': {error}"
.format(addr=master, error=exception))
if len(tasks) == 0:
print "There are no tasks running in the cluster."
return
try:
table = Table(["Task ID", "Framework ID", "Executor ID"])
for task in tasks:
table.add_row([task["id"],
task["framework_id"],
task["executor_id"]])
except Exception as exception:
raise CLIException("Unable to build table of tasks: {error}"
.format(error=exception))
print str(table)
| {
"content_hash": "1ec321b17ce0c182114925f716db3b08",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 78,
"avg_line_length": 28.93846153846154,
"alnum_prop": 0.5279106858054227,
"repo_name": "verizonlabs/mesos",
"id": "cc6cff56c71262729a8870017bef2e97636abe5a",
"size": "2666",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/python/cli_new/lib/cli/plugins/task/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "94"
},
{
"name": "Batchfile",
"bytes": "7980"
},
{
"name": "C++",
"bytes": "11049692"
},
{
"name": "CMake",
"bytes": "76942"
},
{
"name": "CSS",
"bytes": "6958"
},
{
"name": "HTML",
"bytes": "86859"
},
{
"name": "Java",
"bytes": "142025"
},
{
"name": "JavaScript",
"bytes": "1015168"
},
{
"name": "M4",
"bytes": "175144"
},
{
"name": "Makefile",
"bytes": "106936"
},
{
"name": "PowerShell",
"bytes": "2547"
},
{
"name": "Protocol Buffer",
"bytes": "473051"
},
{
"name": "Python",
"bytes": "250931"
},
{
"name": "Ruby",
"bytes": "9164"
},
{
"name": "Shell",
"bytes": "121093"
}
],
"symlink_target": ""
} |
"""Test StateVectorSimulatorPy."""
import unittest
import numpy as np
from qiskit.providers.basicaer import StatevectorSimulatorPy
from qiskit.test import ReferenceCircuits
from qiskit.test import providers
from qiskit import QuantumRegister, QuantumCircuit, execute
from qiskit.quantum_info.random import random_unitary
from qiskit.quantum_info import state_fidelity
class StatevectorSimulatorTest(providers.BackendTestCase):
"""Test BasicAer statevector simulator."""
backend_cls = StatevectorSimulatorPy
circuit = None
def test_run_circuit(self):
"""Test final state vector for single circuit run."""
# Set test circuit
self.circuit = ReferenceCircuits.bell_no_measure()
# Execute
result = super().test_run_circuit()
actual = result.get_statevector(self.circuit)
# state is 1/sqrt(2)|00> + 1/sqrt(2)|11>, up to a global phase
self.assertAlmostEqual((abs(actual[0])) ** 2, 1 / 2)
self.assertEqual(actual[1], 0)
self.assertEqual(actual[2], 0)
self.assertAlmostEqual((abs(actual[3])) ** 2, 1 / 2)
def test_measure_collapse(self):
"""Test final measurement collapses statevector"""
# Set test circuit
self.circuit = ReferenceCircuits.bell()
# Execute
result = super().test_run_circuit()
actual = result.get_statevector(self.circuit)
# The final state should be EITHER |00> OR |11>
diff_00 = np.linalg.norm(np.array([1, 0, 0, 0]) - actual) ** 2
diff_11 = np.linalg.norm(np.array([0, 0, 0, 1]) - actual) ** 2
success = np.allclose([diff_00, diff_11], [0, 2]) or np.allclose([diff_00, diff_11], [2, 0])
# state is 1/sqrt(2)|00> + 1/sqrt(2)|11>, up to a global phase
self.assertTrue(success)
def test_unitary(self):
"""Test unitary gate instruction"""
num_trials = 10
max_qubits = 3
# Test 1 to max_qubits for random n-qubit unitary gate
for i in range(max_qubits):
num_qubits = i + 1
psi_init = np.zeros(2**num_qubits)
psi_init[0] = 1.0
qr = QuantumRegister(num_qubits, "qr")
for _ in range(num_trials):
# Create random unitary
unitary = random_unitary(2**num_qubits)
# Compute expected output state
psi_target = unitary.data.dot(psi_init)
# Simulate output on circuit
circuit = QuantumCircuit(qr)
circuit.unitary(unitary, qr)
job = execute(circuit, self.backend)
result = job.result()
psi_out = result.get_statevector(0)
fidelity = state_fidelity(psi_target, psi_out)
self.assertGreater(fidelity, 0.999)
def test_global_phase(self):
"""Test global_phase"""
n_qubits = 4
qr = QuantumRegister(n_qubits)
circ = QuantumCircuit(qr)
circ.x(qr)
circ.global_phase = 0.5
self.circuit = circ
result = super().test_run_circuit()
actual = result.get_statevector(self.circuit)
expected = np.exp(1j * circ.global_phase) * np.repeat([[0], [1]], [n_qubits**2 - 1, 1])
self.assertTrue(np.allclose(actual, expected))
def test_global_phase_composite(self):
"""Test global_phase"""
n_qubits = 4
qr = QuantumRegister(n_qubits)
circ = QuantumCircuit(qr)
circ.x(qr)
circ.global_phase = 0.5
gate = circ.to_gate()
comp = QuantumCircuit(qr)
comp.append(gate, qr)
comp.global_phase = 0.1
self.circuit = comp
result = super().test_run_circuit()
actual = result.get_statevector(self.circuit)
expected = np.exp(1j * 0.6) * np.repeat([[0], [1]], [n_qubits**2 - 1, 1])
self.assertTrue(np.allclose(actual, expected))
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "717ec8872cbe80ebae28b2499c18f43b",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 100,
"avg_line_length": 37.00934579439252,
"alnum_prop": 0.5936868686868687,
"repo_name": "QISKit/qiskit-sdk-py",
"id": "15561261cbf82c2179133db9f848edb3421f7842",
"size": "4443",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "test/python/basicaer/test_statevector_simulator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2582"
},
{
"name": "C++",
"bytes": "327518"
},
{
"name": "CMake",
"bytes": "19294"
},
{
"name": "Makefile",
"bytes": "5608"
},
{
"name": "Pascal",
"bytes": "2444"
},
{
"name": "Python",
"bytes": "1312801"
},
{
"name": "Shell",
"bytes": "8385"
}
],
"symlink_target": ""
} |
import os
import setuptools # type: ignore
from typing import List
VERSION = '0.4.1'
def strip_comments(line: str) -> str:
if line.startswith('-i '):
return ''
return line.split('#', 1)[0].strip()
def req(filename: str) -> List[str]:
ln: str
requires: set
with open(os.path.join(os.getcwd(), filename)) as fp:
requires = set([strip_comments(ln) for ln in fp.readlines()])
requires -= set([''])
return list(requires)
setup_params = dict(
name="sierrapy",
version=VERSION,
url="https://github.com/hivdb/sierra-client/tree/master/python",
author='Philip Tzou',
author_email="philiptz@stanford.edu",
description='A Client of HIVdb Sierra GraphQL Webservice.',
packages=['sierrapy',
'sierrapy/fragments',
'sierrapy/recipes',
'sierrapy/commands',
'sierrapy/viruses'],
install_requires=req('requirements.txt'),
# tests_require=reqs('test-requirements.txt'),
include_package_data=True,
entry_points={'console_scripts': [
'sierrapy = sierrapy.cmds:main',
]},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Scientific/Engineering :: Bio-Informatics'],
# test_suite="nose.collector",
zip_safe=True)
if __name__ == '__main__':
setuptools.setup(**setup_params)
| {
"content_hash": "1d01e03efbf8f3393ed895bb63aa92bb",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 69,
"avg_line_length": 30.526315789473685,
"alnum_prop": 0.6045977011494252,
"repo_name": "hivdb/sierra-client",
"id": "bab6a920ed03315733916c68b4d7edf74c08a827",
"size": "1788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "194"
},
{
"name": "Python",
"bytes": "57265"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import patterns, url
from .views import IndexView, DetailView
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name='index'),
url(r'^(?P<volume_id>[^/]+)/$', DetailView.as_view(), name='detail'),
)
| {
"content_hash": "1c3e1c433703d14415d5cbd835f1ea53",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 73,
"avg_line_length": 31.125,
"alnum_prop": 0.6546184738955824,
"repo_name": "1ukash/horizon",
"id": "f42c753f0930f12008a5581d4f685695998392ad",
"size": "249",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "horizon/dashboards/admin/volumes/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "234763"
},
{
"name": "Python",
"bytes": "1189371"
},
{
"name": "Shell",
"bytes": "12326"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.