text
stringlengths
4
1.02M
meta
dict
import time last = 0 def inittime(): global last last = time.time() def timecheck(label, echo=True): global last now = time.time() diff = now - last if echo: print(label, '%.2f ms' % (diff * 1000)) last = now return diff * 1000
{ "content_hash": "7e852e4e34289b374455b0ce5794c96a", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 47, "avg_line_length": 16.9375, "alnum_prop": 0.5682656826568265, "repo_name": "hughperkins/gpu-experiments", "id": "9290a36cde2ec0c541e7b291b0573c3c0933e0ed", "size": "271", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gpuexperiments/timecheck.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C", "bytes": "20127" }, { "name": "Python", "bytes": "162953" }, { "name": "Shell", "bytes": "777" } ], "symlink_target": "" }
from .constant import Constant __NR_exit = Constant("__NR_exit", 1) __NR_fork = Constant("__NR_fork", 2) __NR_read = Constant("__NR_read", 3) __NR_write = Constant("__NR_write", 4) __NR_open = Constant("__NR_open", 5) __NR_close = Constant("__NR_close", 6) __NR_waitpid = Constant("__NR_waitpid", 7) __NR_creat = Constant("__NR_creat", 8) __NR_link = Constant("__NR_link", 9) __NR_unlink = Constant("__NR_unlink", 10) __NR_execve = Constant("__NR_execve", 11) __NR_chdir = Constant("__NR_chdir", 12) __NR_time = Constant("__NR_time", 13) __NR_mknod = Constant("__NR_mknod", 14) __NR_chmod = Constant("__NR_chmod", 15) __NR_lchown = Constant("__NR_lchown", 16) __NR_break = Constant("__NR_break", 17) __NR_oldstat = Constant("__NR_oldstat", 18) __NR_lseek = Constant("__NR_lseek", 19) __NR_getpid = Constant("__NR_getpid", 20) __NR_mount = Constant("__NR_mount", 21) __NR_umount = Constant("__NR_umount", 22) __NR_setuid = Constant("__NR_setuid", 23) __NR_getuid = Constant("__NR_getuid", 24) __NR_stime = Constant("__NR_stime", 25) __NR_ptrace = Constant("__NR_ptrace", 26) __NR_alarm = Constant("__NR_alarm", 27) __NR_oldfstat = Constant("__NR_oldfstat", 28) __NR_pause = Constant("__NR_pause", 29) __NR_utime = Constant("__NR_utime", 30) __NR_stty = Constant("__NR_stty", 31) __NR_gtty = Constant("__NR_gtty", 32) __NR_access = Constant("__NR_access", 33) __NR_nice = Constant("__NR_nice", 34) __NR_ftime = Constant("__NR_ftime", 35) __NR_sync = Constant("__NR_sync", 36) __NR_kill = Constant("__NR_kill", 37) __NR_rename = Constant("__NR_rename", 38) __NR_mkdir = Constant("__NR_mkdir", 39) __NR_rmdir = Constant("__NR_rmdir", 40) __NR_dup = Constant("__NR_dup", 41) __NR_pipe = Constant("__NR_pipe", 42) __NR_times = Constant("__NR_times", 43) __NR_prof = Constant("__NR_prof", 44) __NR_brk = Constant("__NR_brk", 45) __NR_setgid = Constant("__NR_setgid", 46) __NR_getgid = Constant("__NR_getgid", 47) __NR_signal = Constant("__NR_signal", 48) __NR_geteuid = Constant("__NR_geteuid", 49) __NR_getegid = Constant("__NR_getegid", 50) __NR_acct = Constant("__NR_acct", 51) __NR_umount2 = Constant("__NR_umount2", 52) __NR_lock = Constant("__NR_lock", 53) __NR_ioctl = Constant("__NR_ioctl", 54) __NR_fcntl = Constant("__NR_fcntl", 55) __NR_mpx = Constant("__NR_mpx", 56) __NR_setpgid = Constant("__NR_setpgid", 57) __NR_ulimit = Constant("__NR_ulimit", 58) __NR_oldolduname = Constant("__NR_oldolduname", 59) __NR_umask = Constant("__NR_umask", 60) __NR_chroot = Constant("__NR_chroot", 61) __NR_ustat = Constant("__NR_ustat", 62) __NR_dup2 = Constant("__NR_dup2", 63) __NR_getppid = Constant("__NR_getppid", 64) __NR_getpgrp = Constant("__NR_getpgrp", 65) __NR_setsid = Constant("__NR_setsid", 66) __NR_sigaction = Constant("__NR_sigaction", 67) __NR_sgetmask = Constant("__NR_sgetmask", 68) __NR_ssetmask = Constant("__NR_ssetmask", 69) __NR_setreuid = Constant("__NR_setreuid", 70) __NR_setregid = Constant("__NR_setregid", 71) __NR_sigsuspend = Constant("__NR_sigsuspend", 72) __NR_sigpending = Constant("__NR_sigpending", 73) __NR_sethostname = Constant("__NR_sethostname", 74) __NR_setrlimit = Constant("__NR_setrlimit", 75) __NR_getrlimit = Constant("__NR_getrlimit", 76) __NR_getrusage = Constant("__NR_getrusage", 77) __NR_gettimeofday = Constant("__NR_gettimeofday", 78) __NR_settimeofday = Constant("__NR_settimeofday", 79) __NR_getgroups = Constant("__NR_getgroups", 80) __NR_setgroups = Constant("__NR_setgroups", 81) __NR_select = Constant("__NR_select", 82) __NR_symlink = Constant("__NR_symlink", 83) __NR_oldlstat = Constant("__NR_oldlstat", 84) __NR_readlink = Constant("__NR_readlink", 85) __NR_uselib = Constant("__NR_uselib", 86) __NR_swapon = Constant("__NR_swapon", 87) __NR_reboot = Constant("__NR_reboot", 88) __NR_readdir = Constant("__NR_readdir", 89) __NR_mmap = Constant("__NR_mmap", 90) __NR_munmap = Constant("__NR_munmap", 91) __NR_truncate = Constant("__NR_truncate", 92) __NR_ftruncate = Constant("__NR_ftruncate", 93) __NR_fchmod = Constant("__NR_fchmod", 94) __NR_fchown = Constant("__NR_fchown", 95) __NR_getpriority = Constant("__NR_getpriority", 96) __NR_setpriority = Constant("__NR_setpriority", 97) __NR_profil = Constant("__NR_profil", 98) __NR_statfs = Constant("__NR_statfs", 99) __NR_fstatfs = Constant("__NR_fstatfs", 100) __NR_ioperm = Constant("__NR_ioperm", 101) __NR_socketcall = Constant("__NR_socketcall", 102) __NR_syslog = Constant("__NR_syslog", 103) __NR_setitimer = Constant("__NR_setitimer", 104) __NR_getitimer = Constant("__NR_getitimer", 105) __NR_stat = Constant("__NR_stat", 106) __NR_lstat = Constant("__NR_lstat", 107) __NR_fstat = Constant("__NR_fstat", 108) __NR_olduname = Constant("__NR_olduname", 109) __NR_iopl = Constant("__NR_iopl", 110) __NR_vhangup = Constant("__NR_vhangup", 111) __NR_idle = Constant("__NR_idle", 112) __NR_vm86 = Constant("__NR_vm86", 113) __NR_wait4 = Constant("__NR_wait4", 114) __NR_swapoff = Constant("__NR_swapoff", 115) __NR_sysinfo = Constant("__NR_sysinfo", 116) __NR_ipc = Constant("__NR_ipc", 117) __NR_fsync = Constant("__NR_fsync", 118) __NR_sigreturn = Constant("__NR_sigreturn", 119) __NR_clone = Constant("__NR_clone", 120) __NR_setdomainname = Constant("__NR_setdomainname", 121) __NR_uname = Constant("__NR_uname", 122) __NR_modify_ldt = Constant("__NR_modify_ldt", 123) __NR_adjtimex = Constant("__NR_adjtimex", 124) __NR_mprotect = Constant("__NR_mprotect", 125) __NR_sigprocmask = Constant("__NR_sigprocmask", 126) __NR_create_module = Constant("__NR_create_module", 127) __NR_init_module = Constant("__NR_init_module", 128) __NR_delete_module = Constant("__NR_delete_module", 129) __NR_get_kernel_syms = Constant("__NR_get_kernel_syms", 130) __NR_quotactl = Constant("__NR_quotactl", 131) __NR_getpgid = Constant("__NR_getpgid", 132) __NR_fchdir = Constant("__NR_fchdir", 133) __NR_bdflush = Constant("__NR_bdflush", 134) __NR_sysfs = Constant("__NR_sysfs", 135) __NR_personality = Constant("__NR_personality", 136) __NR_afs_syscall = Constant("__NR_afs_syscall", 137) __NR_setfsuid = Constant("__NR_setfsuid", 138) __NR_setfsgid = Constant("__NR_setfsgid", 139) __NR__llseek = Constant("__NR__llseek", 140) __NR_getdents = Constant("__NR_getdents", 141) __NR__newselect = Constant("__NR__newselect", 142) __NR_flock = Constant("__NR_flock", 143) __NR_msync = Constant("__NR_msync", 144) __NR_readv = Constant("__NR_readv", 145) __NR_writev = Constant("__NR_writev", 146) __NR_getsid = Constant("__NR_getsid", 147) __NR_fdatasync = Constant("__NR_fdatasync", 148) __NR__sysctl = Constant("__NR__sysctl", 149) __NR_mlock = Constant("__NR_mlock", 150) __NR_munlock = Constant("__NR_munlock", 151) __NR_mlockall = Constant("__NR_mlockall", 152) __NR_munlockall = Constant("__NR_munlockall", 153) __NR_sched_setparam = Constant("__NR_sched_setparam", 154) __NR_sched_getparam = Constant("__NR_sched_getparam", 155) __NR_sched_setscheduler = Constant("__NR_sched_setscheduler", 156) __NR_sched_getscheduler = Constant("__NR_sched_getscheduler", 157) __NR_sched_yield = Constant("__NR_sched_yield", 158) __NR_sched_get_priority_max = Constant("__NR_sched_get_priority_max", 159) __NR_sched_get_priority_min = Constant("__NR_sched_get_priority_min", 160) __NR_sched_rr_get_interval = Constant("__NR_sched_rr_get_interval", 161) __NR_nanosleep = Constant("__NR_nanosleep", 162) __NR_mremap = Constant("__NR_mremap", 163) __NR_setresuid = Constant("__NR_setresuid", 164) __NR_getresuid = Constant("__NR_getresuid", 165) __NR_query_module = Constant("__NR_query_module", 166) __NR_poll = Constant("__NR_poll", 167) __NR_nfsservctl = Constant("__NR_nfsservctl", 168) __NR_setresgid = Constant("__NR_setresgid", 169) __NR_getresgid = Constant("__NR_getresgid", 170) __NR_prctl = Constant("__NR_prctl", 171) __NR_rt_sigreturn = Constant("__NR_rt_sigreturn", 172) __NR_rt_sigaction = Constant("__NR_rt_sigaction", 173) __NR_rt_sigprocmask = Constant("__NR_rt_sigprocmask", 174) __NR_rt_sigpending = Constant("__NR_rt_sigpending", 175) __NR_rt_sigtimedwait = Constant("__NR_rt_sigtimedwait", 176) __NR_rt_sigqueueinfo = Constant("__NR_rt_sigqueueinfo", 177) __NR_rt_sigsuspend = Constant("__NR_rt_sigsuspend", 178) __NR_pread = Constant("__NR_pread", 179) __NR_pwrite = Constant("__NR_pwrite", 180) __NR_chown = Constant("__NR_chown", 181) __NR_getcwd = Constant("__NR_getcwd", 182) __NR_capget = Constant("__NR_capget", 183) __NR_capset = Constant("__NR_capset", 184) __NR_sigaltstack = Constant("__NR_sigaltstack", 185) __NR_sendfile = Constant("__NR_sendfile", 186) __NR_getpmsg = Constant("__NR_getpmsg", 187) __NR_putpmsg = Constant("__NR_putpmsg", 188) __NR_vfork = Constant("__NR_vfork", 189) __NR_ugetrlimit = Constant("__NR_ugetrlimit", 190) __NR_readahead = Constant("__NR_readahead", 191) __NR_mmap2 = Constant("__NR_mmap2", 192) __NR_truncate64 = Constant("__NR_truncate64", 193) __NR_ftruncate64 = Constant("__NR_ftruncate64", 194) __NR_stat64 = Constant("__NR_stat64", 195) __NR_lstat64 = Constant("__NR_lstat64", 196) __NR_fstat64 = Constant("__NR_fstat64", 197) __NR_pciconfig_read = Constant("__NR_pciconfig_read", 198) __NR_pciconfig_write = Constant("__NR_pciconfig_write", 199) __NR_pciconfig_iobase = Constant("__NR_pciconfig_iobase", 200) __NR_multiplexer = Constant("__NR_multiplexer", 201) __NR_getdents64 = Constant("__NR_getdents64", 202) __NR_pivot_root = Constant("__NR_pivot_root", 203) __NR_fcntl64 = Constant("__NR_fcntl64", 204) __NR_madvise = Constant("__NR_madvise", 205) __NR_mincore = Constant("__NR_mincore", 206) __NR_gettid = Constant("__NR_gettid", 207) __NR_tkill = Constant("__NR_tkill", 208) __NR_setxattr = Constant("__NR_setxattr", 209) __NR_lsetxattr = Constant("__NR_lsetxattr", 210) __NR_fsetxattr = Constant("__NR_fsetxattr", 211) __NR_getxattr = Constant("__NR_getxattr", 212) __NR_lgetxattr = Constant("__NR_lgetxattr", 213) __NR_fgetxattr = Constant("__NR_fgetxattr", 214) __NR_listxattr = Constant("__NR_listxattr", 215) __NR_llistxattr = Constant("__NR_llistxattr", 216) __NR_flistxattr = Constant("__NR_flistxattr", 217) __NR_removexattr = Constant("__NR_removexattr", 218) __NR_lremovexattr = Constant("__NR_lremovexattr", 219) __NR_fremovexattr = Constant("__NR_fremovexattr", 220) __NR_futex = Constant("__NR_futex", 221) __NR_sched_setaffinity = Constant("__NR_sched_setaffinity", 222) __NR_sched_getaffinity = Constant("__NR_sched_getaffinity", 223) __NR_tuxcall = Constant("__NR_tuxcall", 225) __NR_sendfile64 = Constant("__NR_sendfile64", 226) __NR_io_setup = Constant("__NR_io_setup", 227) __NR_io_destroy = Constant("__NR_io_destroy", 228) __NR_io_getevents = Constant("__NR_io_getevents", 229) __NR_io_submit = Constant("__NR_io_submit", 230) __NR_io_cancel = Constant("__NR_io_cancel", 231) __NR_set_tid_address = Constant("__NR_set_tid_address", 232) __NR_fadvise64 = Constant("__NR_fadvise64", 233) __NR_exit_group = Constant("__NR_exit_group", 234) __NR_lookup_dcookie = Constant("__NR_lookup_dcookie", 235) __NR_epoll_create = Constant("__NR_epoll_create", 236) __NR_epoll_ctl = Constant("__NR_epoll_ctl", 237) __NR_epoll_wait = Constant("__NR_epoll_wait", 238) __NR_remap_file_pages = Constant("__NR_remap_file_pages", 239) __NR_timer_create = Constant("__NR_timer_create", 240) __NR_timer_settime = Constant("__NR_timer_settime", 241) __NR_timer_gettime = Constant("__NR_timer_gettime", 242) __NR_timer_getoverrun = Constant("__NR_timer_getoverrun", 243) __NR_timer_delete = Constant("__NR_timer_delete", 244) __NR_clock_settime = Constant("__NR_clock_settime", 245) __NR_clock_gettime = Constant("__NR_clock_gettime", 246) __NR_clock_getres = Constant("__NR_clock_getres", 247) __NR_clock_nanosleep = Constant("__NR_clock_nanosleep", 248) __NR_swapcontext = Constant("__NR_swapcontext", 249) __NR_tgkill = Constant("__NR_tgkill", 250) __NR_utimes = Constant("__NR_utimes", 251) __NR_statfs64 = Constant("__NR_statfs64", 252) __NR_fstatfs64 = Constant("__NR_fstatfs64", 253) __NR_fadvise64_64 = Constant("__NR_fadvise64_64", 254) __NR_rtas = Constant("__NR_rtas", 255) __NR_sys_debug_setcontext = Constant("__NR_sys_debug_setcontext", 256) __NR_mq_open = Constant("__NR_mq_open", 262) __NR_mq_unlink = Constant("__NR_mq_unlink", 263) __NR_mq_timedsend = Constant("__NR_mq_timedsend", 264) __NR_mq_timedreceive = Constant("__NR_mq_timedreceive", 265) __NR_mq_notify = Constant("__NR_mq_notify", 266) __NR_mq_getsetattr = Constant("__NR_mq_getsetattr", 267) __NR_kexec_load = Constant("__NR_kexec_load", 268) __NR_add_key = Constant("__NR_add_key", 269) __NR_request_key = Constant("__NR_request_key", 270) __NR_keyctl = Constant("__NR_keyctl", 271) __NR_waitid = Constant("__NR_waitid", 272) __NR_ioprio_set = Constant("__NR_ioprio_set", 273) __NR_ioprio_get = Constant("__NR_ioprio_get", 274) __NR_inotify_init = Constant("__NR_inotify_init", 275) __NR_inotify_add_watch = Constant("__NR_inotify_add_watch", 276) __NR_inotify_rm_watch = Constant("__NR_inotify_rm_watch", 277) __NR_spu_run = Constant("__NR_spu_run", 278) __NR_spu_create = Constant("__NR_spu_create", 279) __NR_pselect6 = Constant("__NR_pselect6", 280) __NR_ppoll = Constant("__NR_ppoll", 281) __NR_unshare = Constant("__NR_unshare", 282) __NR_splice = Constant("__NR_splice", 283) __NR_tee = Constant("__NR_tee", 284) __NR_vmsplice = Constant("__NR_vmsplice", 285) __NR_openat = Constant("__NR_openat", 286) __NR_mkdirat = Constant("__NR_mkdirat", 287) __NR_mknodat = Constant("__NR_mknodat", 288) __NR_fchownat = Constant("__NR_fchownat", 289) __NR_futimesat = Constant("__NR_futimesat", 290) __NR_fstatat64 = Constant("__NR_fstatat64", 291) __NR_unlinkat = Constant("__NR_unlinkat", 292) __NR_renameat = Constant("__NR_renameat", 293) __NR_linkat = Constant("__NR_linkat", 294) __NR_symlinkat = Constant("__NR_symlinkat", 295) __NR_readlinkat = Constant("__NR_readlinkat", 296) __NR_fchmodat = Constant("__NR_fchmodat", 297) __NR_faccessat = Constant("__NR_faccessat", 298) __NR_get_robust_list = Constant("__NR_get_robust_list", 299) __NR_set_robust_list = Constant("__NR_set_robust_list", 300) __NR_move_pages = Constant("__NR_move_pages", 301) __NR_getcpu = Constant("__NR_getcpu", 302) __NR_epoll_pwait = Constant("__NR_epoll_pwait", 303) __NR_utimensat = Constant("__NR_utimensat", 304) __NR_signalfd = Constant("__NR_signalfd", 305) __NR_timerfd = Constant("__NR_timerfd", 306) __NR_eventfd = Constant("__NR_eventfd", 307) __NR_sync_file_range2 = Constant("__NR_sync_file_range2", 308) __NR_fallocate = Constant("__NR_fallocate", 309) __NR_subpage_prot = Constant("__NR_subpage_prot", 310) __NR_timerfd_settime = Constant("__NR_timerfd_settime", 311) __NR_timerfd_gettime = Constant("__NR_timerfd_gettime", 312) __SYS_NERR = Constant("__SYS_NERR", ((129) + 1)) _SYS_TIME_H = Constant("_SYS_TIME_H", 1) SYS_access = Constant("SYS_access", 33) SYS_acct = Constant("SYS_acct", 51) SYS_add_key = Constant("SYS_add_key", 269) SYS_adjtimex = Constant("SYS_adjtimex", 124) SYS_afs_syscall = Constant("SYS_afs_syscall", 137) SYS_alarm = Constant("SYS_alarm", 27) SYS_bdflush = Constant("SYS_bdflush", 134) SYS_break = Constant("SYS_break", 17) SYS_brk = Constant("SYS_brk", 45) SYS_capget = Constant("SYS_capget", 183) SYS_capset = Constant("SYS_capset", 184) SYS_chdir = Constant("SYS_chdir", 12) SYS_chmod = Constant("SYS_chmod", 15) SYS_chown = Constant("SYS_chown", 181) SYS_chroot = Constant("SYS_chroot", 61) SYS_clock_getres = Constant("SYS_clock_getres", 247) SYS_clock_gettime = Constant("SYS_clock_gettime", 246) SYS_clock_nanosleep = Constant("SYS_clock_nanosleep", 248) SYS_clock_settime = Constant("SYS_clock_settime", 245) SYS_clone = Constant("SYS_clone", 120) SYS_close = Constant("SYS_close", 6) SYS_creat = Constant("SYS_creat", 8) SYS_create_module = Constant("SYS_create_module", 127) SYS_delete_module = Constant("SYS_delete_module", 129) SYS_dup = Constant("SYS_dup", 41) SYS_dup2 = Constant("SYS_dup2", 63) SYS_epoll_create = Constant("SYS_epoll_create", 236) SYS_epoll_ctl = Constant("SYS_epoll_ctl", 237) SYS_epoll_pwait = Constant("SYS_epoll_pwait", 303) SYS_epoll_wait = Constant("SYS_epoll_wait", 238) SYS_eventfd = Constant("SYS_eventfd", 307) SYS_execve = Constant("SYS_execve", 11) SYS_exit = Constant("SYS_exit", 1) SYS_exit_group = Constant("SYS_exit_group", 234) SYS_faccessat = Constant("SYS_faccessat", 298) SYS_fadvise64 = Constant("SYS_fadvise64", 233) SYS_fadvise64_64 = Constant("SYS_fadvise64_64", 254) SYS_fallocate = Constant("SYS_fallocate", 309) SYS_fchdir = Constant("SYS_fchdir", 133) SYS_fchmod = Constant("SYS_fchmod", 94) SYS_fchmodat = Constant("SYS_fchmodat", 297) SYS_fchown = Constant("SYS_fchown", 95) SYS_fchownat = Constant("SYS_fchownat", 289) SYS_fcntl = Constant("SYS_fcntl", 55) SYS_fcntl64 = Constant("SYS_fcntl64", 204) SYS_fdatasync = Constant("SYS_fdatasync", 148) SYS_fgetxattr = Constant("SYS_fgetxattr", 214) SYS_flistxattr = Constant("SYS_flistxattr", 217) SYS_flock = Constant("SYS_flock", 143) SYS_fork = Constant("SYS_fork", 2) SYS_fremovexattr = Constant("SYS_fremovexattr", 220) SYS_fsetxattr = Constant("SYS_fsetxattr", 211) SYS_fstat = Constant("SYS_fstat", 108) SYS_fstat64 = Constant("SYS_fstat64", 197) SYS_fstatat64 = Constant("SYS_fstatat64", 291) SYS_fstatfs = Constant("SYS_fstatfs", 100) SYS_fstatfs64 = Constant("SYS_fstatfs64", 253) SYS_fsync = Constant("SYS_fsync", 118) SYS_ftime = Constant("SYS_ftime", 35) SYS_ftruncate = Constant("SYS_ftruncate", 93) SYS_ftruncate64 = Constant("SYS_ftruncate64", 194) SYS_futex = Constant("SYS_futex", 221) SYS_futimesat = Constant("SYS_futimesat", 290) SYS_getcpu = Constant("SYS_getcpu", 302) SYS_getcwd = Constant("SYS_getcwd", 182) SYS_getdents = Constant("SYS_getdents", 141) SYS_getdents64 = Constant("SYS_getdents64", 202) SYS_getegid = Constant("SYS_getegid", 50) SYS_geteuid = Constant("SYS_geteuid", 49) SYS_getgid = Constant("SYS_getgid", 47) SYS_getgroups = Constant("SYS_getgroups", 80) SYS_getitimer = Constant("SYS_getitimer", 105) SYS_get_kernel_syms = Constant("SYS_get_kernel_syms", 130) SYS_getpgid = Constant("SYS_getpgid", 132) SYS_getpgrp = Constant("SYS_getpgrp", 65) SYS_getpid = Constant("SYS_getpid", 20) SYS_getpmsg = Constant("SYS_getpmsg", 187) SYS_getppid = Constant("SYS_getppid", 64) SYS_getpriority = Constant("SYS_getpriority", 96) SYS_getresgid = Constant("SYS_getresgid", 170) SYS_getresuid = Constant("SYS_getresuid", 165) SYS_getrlimit = Constant("SYS_getrlimit", 76) SYS_get_robust_list = Constant("SYS_get_robust_list", 299) SYS_getrusage = Constant("SYS_getrusage", 77) SYS_getsid = Constant("SYS_getsid", 147) SYS_gettid = Constant("SYS_gettid", 207) SYS_gettimeofday = Constant("SYS_gettimeofday", 78) SYS_getuid = Constant("SYS_getuid", 24) SYS_getxattr = Constant("SYS_getxattr", 212) SYS_gtty = Constant("SYS_gtty", 32) SYS_idle = Constant("SYS_idle", 112) SYS_init_module = Constant("SYS_init_module", 128) SYS_inotify_add_watch = Constant("SYS_inotify_add_watch", 276) SYS_inotify_init = Constant("SYS_inotify_init", 275) SYS_inotify_rm_watch = Constant("SYS_inotify_rm_watch", 277) SYS_io_cancel = Constant("SYS_io_cancel", 231) SYS_ioctl = Constant("SYS_ioctl", 54) SYS_io_destroy = Constant("SYS_io_destroy", 228) SYS_io_getevents = Constant("SYS_io_getevents", 229) SYS_ioperm = Constant("SYS_ioperm", 101) SYS_iopl = Constant("SYS_iopl", 110) SYS_ioprio_get = Constant("SYS_ioprio_get", 274) SYS_ioprio_set = Constant("SYS_ioprio_set", 273) SYS_io_setup = Constant("SYS_io_setup", 227) SYS_io_submit = Constant("SYS_io_submit", 230) SYS_ipc = Constant("SYS_ipc", 117) SYS_kexec_load = Constant("SYS_kexec_load", 268) SYS_keyctl = Constant("SYS_keyctl", 271) SYS_kill = Constant("SYS_kill", 37) SYS_lchown = Constant("SYS_lchown", 16) SYS_lgetxattr = Constant("SYS_lgetxattr", 213) SYS_link = Constant("SYS_link", 9) SYS_linkat = Constant("SYS_linkat", 294) SYS_listxattr = Constant("SYS_listxattr", 215) SYS_llistxattr = Constant("SYS_llistxattr", 216) SYS__llseek = Constant("SYS__llseek", 140) SYS_lock = Constant("SYS_lock", 53) SYS_lookup_dcookie = Constant("SYS_lookup_dcookie", 235) SYS_lremovexattr = Constant("SYS_lremovexattr", 219) SYS_lseek = Constant("SYS_lseek", 19) SYS_lsetxattr = Constant("SYS_lsetxattr", 210) SYS_lstat = Constant("SYS_lstat", 107) SYS_lstat64 = Constant("SYS_lstat64", 196) SYS_madvise = Constant("SYS_madvise", 205) SYS_mincore = Constant("SYS_mincore", 206) SYS_mkdir = Constant("SYS_mkdir", 39) SYS_mkdirat = Constant("SYS_mkdirat", 287) SYS_mknod = Constant("SYS_mknod", 14) SYS_mknodat = Constant("SYS_mknodat", 288) SYS_mlock = Constant("SYS_mlock", 150) SYS_mlockall = Constant("SYS_mlockall", 152) SYS_mmap = Constant("SYS_mmap", 90) SYS_mmap2 = Constant("SYS_mmap2", 192) SYS_modify_ldt = Constant("SYS_modify_ldt", 123) SYS_mount = Constant("SYS_mount", 21) SYS_move_pages = Constant("SYS_move_pages", 301) SYS_mprotect = Constant("SYS_mprotect", 125) SYS_mpx = Constant("SYS_mpx", 56) SYS_mq_getsetattr = Constant("SYS_mq_getsetattr", 267) SYS_mq_notify = Constant("SYS_mq_notify", 266) SYS_mq_open = Constant("SYS_mq_open", 262) SYS_mq_timedreceive = Constant("SYS_mq_timedreceive", 265) SYS_mq_timedsend = Constant("SYS_mq_timedsend", 264) SYS_mq_unlink = Constant("SYS_mq_unlink", 263) SYS_mremap = Constant("SYS_mremap", 163) SYS_msync = Constant("SYS_msync", 144) SYS_multiplexer = Constant("SYS_multiplexer", 201) SYS_munlock = Constant("SYS_munlock", 151) SYS_munlockall = Constant("SYS_munlockall", 153) SYS_munmap = Constant("SYS_munmap", 91) SYS_nanosleep = Constant("SYS_nanosleep", 162) SYS__newselect = Constant("SYS__newselect", 142) SYS_nfsservctl = Constant("SYS_nfsservctl", 168) SYS_nice = Constant("SYS_nice", 34) SYS_oldfstat = Constant("SYS_oldfstat", 28) SYS_oldlstat = Constant("SYS_oldlstat", 84) SYS_oldolduname = Constant("SYS_oldolduname", 59) SYS_oldstat = Constant("SYS_oldstat", 18) SYS_olduname = Constant("SYS_olduname", 109) SYS_open = Constant("SYS_open", 5) SYS_openat = Constant("SYS_openat", 286) SYS_pause = Constant("SYS_pause", 29) SYS_pciconfig_iobase = Constant("SYS_pciconfig_iobase", 200) SYS_pciconfig_read = Constant("SYS_pciconfig_read", 198) SYS_pciconfig_write = Constant("SYS_pciconfig_write", 199) SYS_personality = Constant("SYS_personality", 136) SYS_pipe = Constant("SYS_pipe", 42) SYS_pivot_root = Constant("SYS_pivot_root", 203) SYS_poll = Constant("SYS_poll", 167) SYS_ppoll = Constant("SYS_ppoll", 281) SYS_prctl = Constant("SYS_prctl", 171) SYS_pread = Constant("SYS_pread", 179) SYS_prof = Constant("SYS_prof", 44) SYS_profil = Constant("SYS_profil", 98) SYS_pselect6 = Constant("SYS_pselect6", 280) SYS_ptrace = Constant("SYS_ptrace", 26) SYS_putpmsg = Constant("SYS_putpmsg", 188) SYS_pwrite = Constant("SYS_pwrite", 180) SYS_query_module = Constant("SYS_query_module", 166) SYS_quotactl = Constant("SYS_quotactl", 131) SYS_read = Constant("SYS_read", 3) SYS_readahead = Constant("SYS_readahead", 191) SYS_readdir = Constant("SYS_readdir", 89) SYS_readlink = Constant("SYS_readlink", 85) SYS_readlinkat = Constant("SYS_readlinkat", 296) SYS_readv = Constant("SYS_readv", 145) SYS_reboot = Constant("SYS_reboot", 88) SYS_remap_file_pages = Constant("SYS_remap_file_pages", 239) SYS_removexattr = Constant("SYS_removexattr", 218) SYS_rename = Constant("SYS_rename", 38) SYS_renameat = Constant("SYS_renameat", 293) SYS_request_key = Constant("SYS_request_key", 270) SYS_rmdir = Constant("SYS_rmdir", 40) SYS_rtas = Constant("SYS_rtas", 255) SYS_rt_sigaction = Constant("SYS_rt_sigaction", 173) SYS_rt_sigpending = Constant("SYS_rt_sigpending", 175) SYS_rt_sigprocmask = Constant("SYS_rt_sigprocmask", 174) SYS_rt_sigqueueinfo = Constant("SYS_rt_sigqueueinfo", 177) SYS_rt_sigreturn = Constant("SYS_rt_sigreturn", 172) SYS_rt_sigsuspend = Constant("SYS_rt_sigsuspend", 178) SYS_rt_sigtimedwait = Constant("SYS_rt_sigtimedwait", 176) SYS_sched_getaffinity = Constant("SYS_sched_getaffinity", 223) SYS_sched_getparam = Constant("SYS_sched_getparam", 155) SYS_sched_get_priority_max = Constant("SYS_sched_get_priority_max", 159) SYS_sched_get_priority_min = Constant("SYS_sched_get_priority_min", 160) SYS_sched_getscheduler = Constant("SYS_sched_getscheduler", 157) SYS_sched_rr_get_interval = Constant("SYS_sched_rr_get_interval", 161) SYS_sched_setaffinity = Constant("SYS_sched_setaffinity", 222) SYS_sched_setparam = Constant("SYS_sched_setparam", 154) SYS_sched_setscheduler = Constant("SYS_sched_setscheduler", 156) SYS_sched_yield = Constant("SYS_sched_yield", 158) SYS_select = Constant("SYS_select", 82) SYS_sendfile = Constant("SYS_sendfile", 186) SYS_sendfile64 = Constant("SYS_sendfile64", 226) SYS_setdomainname = Constant("SYS_setdomainname", 121) SYS_setfsgid = Constant("SYS_setfsgid", 139) SYS_setfsuid = Constant("SYS_setfsuid", 138) SYS_setgid = Constant("SYS_setgid", 46) SYS_setgroups = Constant("SYS_setgroups", 81) SYS_sethostname = Constant("SYS_sethostname", 74) SYS_setitimer = Constant("SYS_setitimer", 104) SYS_setpgid = Constant("SYS_setpgid", 57) SYS_setpriority = Constant("SYS_setpriority", 97) SYS_setregid = Constant("SYS_setregid", 71) SYS_setresgid = Constant("SYS_setresgid", 169) SYS_setresuid = Constant("SYS_setresuid", 164) SYS_setreuid = Constant("SYS_setreuid", 70) SYS_setrlimit = Constant("SYS_setrlimit", 75) SYS_set_robust_list = Constant("SYS_set_robust_list", 300) SYS_setsid = Constant("SYS_setsid", 66) SYS_set_tid_address = Constant("SYS_set_tid_address", 232) SYS_settimeofday = Constant("SYS_settimeofday", 79) SYS_setuid = Constant("SYS_setuid", 23) SYS_setxattr = Constant("SYS_setxattr", 209) SYS_sgetmask = Constant("SYS_sgetmask", 68) SYS_sigaction = Constant("SYS_sigaction", 67) SYS_sigaltstack = Constant("SYS_sigaltstack", 185) SYS_signal = Constant("SYS_signal", 48) SYS_signalfd = Constant("SYS_signalfd", 305) SYS_sigpending = Constant("SYS_sigpending", 73) SYS_sigprocmask = Constant("SYS_sigprocmask", 126) SYS_sigreturn = Constant("SYS_sigreturn", 119) SYS_sigsuspend = Constant("SYS_sigsuspend", 72) SYS_socketcall = Constant("SYS_socketcall", 102) SYS_splice = Constant("SYS_splice", 283) SYS_spu_create = Constant("SYS_spu_create", 279) SYS_spu_run = Constant("SYS_spu_run", 278) SYS_ssetmask = Constant("SYS_ssetmask", 69) SYS_stat = Constant("SYS_stat", 106) SYS_stat64 = Constant("SYS_stat64", 195) SYS_statfs = Constant("SYS_statfs", 99) SYS_statfs64 = Constant("SYS_statfs64", 252) SYS_stime = Constant("SYS_stime", 25) SYS_stty = Constant("SYS_stty", 31) SYS_subpage_prot = Constant("SYS_subpage_prot", 310) SYS_swapcontext = Constant("SYS_swapcontext", 249) SYS_swapoff = Constant("SYS_swapoff", 115) SYS_swapon = Constant("SYS_swapon", 87) SYS_symlink = Constant("SYS_symlink", 83) SYS_symlinkat = Constant("SYS_symlinkat", 295) SYS_sync = Constant("SYS_sync", 36) SYS_sync_file_range2 = Constant("SYS_sync_file_range2", 308) SYS__sysctl = Constant("SYS__sysctl", 149) SYS_sys_debug_setcontext = Constant("SYS_sys_debug_setcontext", 256) SYS_sysfs = Constant("SYS_sysfs", 135) SYS_sysinfo = Constant("SYS_sysinfo", 116) SYS_syslog = Constant("SYS_syslog", 103) SYS_tee = Constant("SYS_tee", 284) SYS_tgkill = Constant("SYS_tgkill", 250) SYS_time = Constant("SYS_time", 13) SYS_timer_create = Constant("SYS_timer_create", 240) SYS_timer_delete = Constant("SYS_timer_delete", 244) SYS_timerfd = Constant("SYS_timerfd", 306) SYS_timerfd_gettime = Constant("SYS_timerfd_gettime", 312) SYS_timerfd_settime = Constant("SYS_timerfd_settime", 311) SYS_timer_getoverrun = Constant("SYS_timer_getoverrun", 243) SYS_timer_gettime = Constant("SYS_timer_gettime", 242) SYS_timer_settime = Constant("SYS_timer_settime", 241) SYS_times = Constant("SYS_times", 43) SYS_tkill = Constant("SYS_tkill", 208) SYS_truncate = Constant("SYS_truncate", 92) SYS_truncate64 = Constant("SYS_truncate64", 193) SYS_tuxcall = Constant("SYS_tuxcall", 225) SYS_ugetrlimit = Constant("SYS_ugetrlimit", 190) SYS_ulimit = Constant("SYS_ulimit", 58) SYS_umask = Constant("SYS_umask", 60) SYS_umount = Constant("SYS_umount", 22) SYS_umount2 = Constant("SYS_umount2", 52) SYS_uname = Constant("SYS_uname", 122) SYS_unlink = Constant("SYS_unlink", 10) SYS_unlinkat = Constant("SYS_unlinkat", 292) SYS_unshare = Constant("SYS_unshare", 282) SYS_uselib = Constant("SYS_uselib", 86) SYS_ustat = Constant("SYS_ustat", 62) SYS_utime = Constant("SYS_utime", 30) SYS_utimensat = Constant("SYS_utimensat", 304) SYS_utimes = Constant("SYS_utimes", 251) SYS_vfork = Constant("SYS_vfork", 189) SYS_vhangup = Constant("SYS_vhangup", 111) SYS_vm86 = Constant("SYS_vm86", 113) SYS_vmsplice = Constant("SYS_vmsplice", 285) SYS_wait4 = Constant("SYS_wait4", 114) SYS_waitid = Constant("SYS_waitid", 272) SYS_waitpid = Constant("SYS_waitpid", 7) SYS_write = Constant("SYS_write", 4) SYS_writev = Constant("SYS_writev", 146)
{ "content_hash": "3cb990cd3980ebb37d45e298eb34f7f8", "timestamp": "", "source": "github", "line_count": 616, "max_line_length": 74, "avg_line_length": 46.37987012987013, "alnum_prop": 0.6855092754637732, "repo_name": "pwndbg/pwndbg", "id": "41223f87fb187a54a845d776fb9eb3d2aee3aa66", "size": "28570", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "pwndbg/constants/powerpc.py", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "1149" }, { "name": "C", "bytes": "16361" }, { "name": "Dockerfile", "bytes": "1762" }, { "name": "Go", "bytes": "58" }, { "name": "Makefile", "bytes": "2818" }, { "name": "Python", "bytes": "2472150" }, { "name": "Shell", "bytes": "17094" } ], "symlink_target": "" }
from mock import patch, Mock import unittest2 as unittest from lib.controller import * from lib.view import * from lib.cluster import Cluster from lib.prefixdict import PrefixDict from lib.node import Node import lib import sys from cStringIO import StringIO real_stdout = sys.stdout def reset_stdout(): sys.stdout = real_stdout class ControllerTest(unittest.TestCase): def setUp(self): self.cluster_patch = patch('lib.cluster.Cluster') #self.view_patch = patch('lib.view.CliView') real_stdoup = sys.stdout sys.stdout = StringIO() self.addCleanup(patch.stopall) self.addCleanup(reset_stdout) self.MockCluster = self.cluster_patch.start() #self.MockView = self.view_patch.start() Cluster._crawl = classmethod(lambda self: None) Cluster._callNodeMethod = classmethod( lambda self, nodes, method_name, *args, **kwargs: {"test":IOError("test error")}) n = Node("172.99.99.99") Cluster.getNode = classmethod( lambda self, key: [n]) pd = PrefixDict() pd['test'] = 'test' Cluster.getPrefixes = classmethod(lambda self: pd) self.rc = RootController() def test_infoController(self): ic = InfoController() ic.preCommand([""]) ic.do_service(["service"]) #ic.do_network(["network"]) # TODO: view.infoNetwork needs a "real" node ic.do_namespace(["namespace"]) ic.do_xdr(["xdr"]) def test_showDistributionController(self): sdc = ShowDistributionController() sdc.preCommand([""]) sdc.do_time_to_live(["time_to_live"]) sdc.do_eviction(["evict"]) sdc.do_object_size(["object_size"]) def test_showConfigController(self): scc = ShowConfigController() scc.preCommand([""]) scc.do_service(["service"]) scc.do_network(["network"]) scc.do_namespace(["namespace"]) scc.do_xdr(["xdr"]) def test_showLatencyController(self): slc = ShowLatencyController() slc.preCommand([""]) slc._do_default(["latency"]) def test_ShowStatisticsController(self): ssc = ShowStatisticsController() ssc.preCommand([""]) ssc.do_bins("bins") ssc.do_sets("sets") ssc.do_service("service") ssc.do_namespace("namespace") ssc.do_xdr("xdr")
{ "content_hash": "fb16047587fb2106bc150bd06fd29ecd", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 80, "avg_line_length": 28.011494252873565, "alnum_prop": 0.6064833812064013, "repo_name": "PavanGupta01/aerospike-admin", "id": "0516d87d8d3e9d642cf0eb172de423f86a1b4b91", "size": "3017", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/test_controller.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Makefile", "bytes": "1741" }, { "name": "Python", "bytes": "200740" }, { "name": "Shell", "bytes": "614" } ], "symlink_target": "" }
"""Basic tools for workers that handle TransportMessages.""" import time import os import socket from twisted.internet.defer import ( inlineCallbacks, succeed, maybeDeferred, gatherResults) from twisted.python import log from vumi.service import Worker from vumi.middleware import setup_middlewares_from_config from vumi.connectors import ReceiveInboundConnector, ReceiveOutboundConnector from vumi.config import Config, ConfigInt from vumi.errors import DuplicateConnectorError from vumi.utils import generate_worker_id from vumi.blinkenlights.heartbeat import (HeartBeatPublisher, HeartBeatMessage) def then_call(d, func, *args, **kw): return d.addCallback(lambda r: func(*args, **kw)) class BaseConfig(Config): """Base config definition for workers. You should subclass this and add worker-specific fields. """ amqp_prefetch_count = ConfigInt( "The number of messages fetched concurrently from each AMQP queue" " by each worker instance.", default=20, static=True) class BaseWorker(Worker): """Base class for a message processing worker. This contains common functionality used by application, transport and dispatcher workers. It should be subclassed by workers that need to manage their own connectors. """ CONFIG_CLASS = BaseConfig def __init__(self, options, config=None): super(BaseWorker, self).__init__(options, config=config) self.connectors = {} self.middlewares = [] self._static_config = self.CONFIG_CLASS(self.config, static=True) self._hb_pub = None self._worker_id = None def startWorker(self): log.msg('Starting a %s worker with config: %s' % (self.__class__.__name__, self.config)) d = maybeDeferred(self._validate_config) then_call(d, self.setup_heartbeat) then_call(d, self.setup_middleware) then_call(d, self.setup_connectors) then_call(d, self.setup_worker) return d def stopWorker(self): log.msg('Stopping a %s worker.' % (self.__class__.__name__,)) d = succeed(None) then_call(d, self.teardown_worker) then_call(d, self.teardown_connectors) then_call(d, self.teardown_middleware) then_call(d, self.teardown_heartbeat) return d def setup_connectors(self): raise NotImplementedError() @inlineCallbacks def setup_heartbeat(self): # Disable heartbeats if worker_name is not set. We're # currently using it as the primary identifier for a worker if 'worker_name' in self.config: self._worker_name = self.config.get("worker_name") self._system_id = self.options.get("system-id", "global") self._worker_id = generate_worker_id(self._system_id, self._worker_name) log.msg("Starting HeartBeat publisher with worker_name=%s" % self._worker_name) self._hb_pub = yield self.start_publisher(HeartBeatPublisher, self._gen_heartbeat_attrs) else: log.msg("HeartBeat publisher disabled. No worker_id " "field found in config.") def teardown_heartbeat(self): if self._hb_pub is not None: self._hb_pub.stop() self._hb_pub = None def _gen_heartbeat_attrs(self): # worker_name is guaranteed to be set here, otherwise this func would # not have been called attrs = { 'version': HeartBeatMessage.VERSION_20130319, 'worker_id': self._worker_id, 'system_id': self._system_id, 'worker_name': self._worker_name, 'hostname': socket.gethostname(), 'timestamp': time.time(), 'pid': os.getpid(), } attrs.update(self.custom_heartbeat_attrs()) return attrs def custom_heartbeat_attrs(self): """Worker subclasses can override this to add custom attributes""" return {} def teardown_connectors(self): d = succeed(None) for connector_name in self.connectors.keys(): then_call(d, self.teardown_connector, connector_name) return d def setup_worker(self): raise NotImplementedError() def teardown_worker(self): raise NotImplementedError() def setup_middleware(self): """Create middlewares from config.""" d = setup_middlewares_from_config(self, self.config) d.addCallback(self.middlewares.extend) return d def teardown_middleware(self): """Teardown middlewares.""" d = succeed(None) for mw in reversed(self.middlewares): then_call(d, mw.teardown_middleware) return d def get_static_config(self): """Return static (message independent) configuration.""" return self._static_config def get_config(self, msg, ctxt=None): """This should return a message and context specific config object. It deliberately returns a deferred even when this isn't strictly necessary to ensure that workers will continue to work when per-message configuration needs to be fetched from elsewhere. """ return succeed(self.CONFIG_CLASS(self.config)) def _validate_config(self): """Once subclasses call `super().validate_config` properly, this method can be removed. """ # TODO: remove this once all uses of validate_config have been fixed. self.validate_config() def validate_config(self): """ Application-specific config validation happens in here. Subclasses may override this method to perform extra config validation. """ # TODO: deprecate this in favour of a similar method on # config classes. pass def setup_connector(self, connector_cls, connector_name, middleware=False): if connector_name in self.connectors: raise DuplicateConnectorError("Attempt to add duplicate connector" " with name %r" % (connector_name,)) prefetch_count = self.get_static_config().amqp_prefetch_count middlewares = self.middlewares if middleware else None connector = connector_cls(self, connector_name, prefetch_count=prefetch_count, middlewares=middlewares) self.connectors[connector_name] = connector d = connector.setup() d.addCallback(lambda r: connector) return d def teardown_connector(self, connector_name): connector = self.connectors.pop(connector_name) d = connector.teardown() d.addCallback(lambda r: connector) return d def setup_ri_connector(self, connector_name, middleware=True): return self.setup_connector(ReceiveInboundConnector, connector_name, middleware=middleware) def setup_ro_connector(self, connector_name, middleware=True): return self.setup_connector(ReceiveOutboundConnector, connector_name, middleware=middleware) def pause_connectors(self): return gatherResults([ connector.pause() for connector in self.connectors.itervalues()]) def unpause_connectors(self): for connector in self.connectors.itervalues(): connector.unpause()
{ "content_hash": "63edfc5a8c919a1b8bb4a98c79dc4436", "timestamp": "", "source": "github", "line_count": 210, "max_line_length": 79, "avg_line_length": 36.24285714285714, "alnum_prop": 0.6237025358034424, "repo_name": "harrissoerja/vumi", "id": "3bc7d40a1ff3f95ca6bbd675bada6d5806be3718", "size": "7661", "binary": false, "copies": "3", "ref": "refs/heads/develop", "path": "vumi/worker.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Erlang", "bytes": "14311" }, { "name": "JavaScript", "bytes": "5556" }, { "name": "Puppet", "bytes": "2557" }, { "name": "Python", "bytes": "2980136" }, { "name": "Shell", "bytes": "2232" } ], "symlink_target": "" }
import colorsys import itertools import math import random import subprocess from pyfrax.canvas import * from pyfrax.model import * from pyfrax.rendertools import gif from pyfrax.common import base_dir class Canvas3DShaded(Canvas): def shade(self, z, c): W = self.image.shape[0] return tuple(color * min(W, max(0, W - z)) / W for color in c) FRAMES = 32 FACES = 16 FRAMES_PER_FACE = 8 ROTATION = .1 SIZE = 250 BORDER = 20 for frame in xrange(FRAMES): if frame % FRAMES_PER_FACE == 0: abstractshit = Model() for face in xrange(FACES): corners = [ tuple(random.randint(BORDER, SIZE - BORDER) for _ in xrange(3)) for _ in xrange(3) ] color = tuple(int(n * 256) for n in colorsys.hsv_to_rgb(random.random(), 1, 1)) abstractshit.face(*corners + [color]) abstractshit.rotate((SIZE / 2, SIZE / 2, SIZE / 2), (0, ROTATION, 0)) canvas = Canvas3DShaded(SIZE, SIZE) canvas.import_model(abstractshit) canvas.write('abstractshit.%s' % str(frame).zfill(3)) print str(frame).zfill(3) for frame in xrange(FRAMES): proc = subprocess.Popen((( 'convert ' + base_dir + 'texture/stripes-250.png -negate -write MPR:mask ' '+delete ' + base_dir + 'render/abstractshit.%s.png ' '-mask MPR:mask ' + base_dir + 'render/abstractshit.%s.png ' '-composite +mask ' + base_dir + 'render/signalerror.%s.png') % ( str(frame).zfill(3), str((frame + 1) % FRAMES).zfill(3), str(frame).zfill(3), )).split(' ')) proc.wait() print str(frame).zfill(3) gif('signalerror')
{ "content_hash": "ad7f5101704e59287dc248985e3bcf75", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 91, "avg_line_length": 30.944444444444443, "alnum_prop": 0.599640933572711, "repo_name": "fraxtil/pyfrax", "id": "4f2dd36e47c4f80eef436b73a683ed7250866cdb", "size": "1693", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "signalerror.py", "mode": "33261", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "56124" } ], "symlink_target": "" }
from typing import Optional, TYPE_CHECKING from cloudfoundry_client.v3.entities import EntityManager, Entity, ToOneRelationship if TYPE_CHECKING: from cloudfoundry_client.client import CloudFoundryClient class OrganizationManager(EntityManager): def __init__(self, target_endpoint: str, client: "CloudFoundryClient"): super(OrganizationManager, self).__init__(target_endpoint, client, "/v3/organizations") def create( self, name: str, suspended: bool, meta_labels: Optional[dict] = None, meta_annotations: Optional[dict] = None ) -> Entity: data = {"name": name, "suspended": suspended, "metadata": {"labels": meta_labels, "annotations": meta_annotations}} return super(OrganizationManager, self)._create(data) def update( self, guid: str, name: str, suspended: Optional[bool], meta_labels: Optional[dict] = None, meta_annotations: Optional[dict] = None, ) -> Entity: data = {"name": name, "suspended": suspended, "metadata": {"labels": meta_labels, "annotations": meta_annotations}} return super(OrganizationManager, self)._update(guid, data) def remove(self, guid: str): super(OrganizationManager, self)._remove(guid) def assign_default_isolation_segment(self, org_guid: str, iso_seg_guid: str) -> Entity: return ToOneRelationship.from_json_object( super(OrganizationManager, self)._patch( "%s%s/%s/relationships/default_isolation_segment" % (self.target_endpoint, self.entity_uri, org_guid), data=ToOneRelationship(iso_seg_guid), ) ) def get_default_isolation_segment(self, guid: str) -> ToOneRelationship: return ToOneRelationship.from_json_object( super(OrganizationManager, self).get(guid, "relationships", "default_isolation_segment") ) def get_default_domain(self, guid: str) -> Entity: return super(OrganizationManager, self).get(guid, "domains", "default") def get_usage_summary(self, guid: str) -> Entity: return super(OrganizationManager, self).get(guid, "usage_summary")
{ "content_hash": "4cf9b64ddc133d81f78dc346e21c342b", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 123, "avg_line_length": 43.18, "alnum_prop": 0.66512274201019, "repo_name": "antechrestos/cf-python-client", "id": "601cd8373f3d9cd7a2de9818802e44546fce4542", "size": "2159", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "main/cloudfoundry_client/v3/organizations.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Protocol Buffer", "bytes": "595" }, { "name": "Python", "bytes": "122987" } ], "symlink_target": "" }
import os from pants.base.exceptions import TaskError from pants.process.xargs import Xargs from pants.contrib.go.tasks.go_task import GoTask class GoRun(GoTask): """Runs an executable Go binary.""" @classmethod def supports_passthru_args(cls): return True @classmethod def prepare(cls, options, round_manager): super().prepare(options, round_manager) round_manager.require_data("exec_binary") def execute(self): target = self.require_single_root_target() if self.is_binary(target): binary_path = self.context.products.get_data("exec_binary")[target] # TODO(cgibb): Wrap with workunit and stdout/stderr plumbing. res = Xargs.subprocess([binary_path]).execute( [*self.get_passthru_args(), *self.get_options().args] ) if res != 0: raise TaskError(f"{os.path.basename(binary_path)} exited non-zero ({res})")
{ "content_hash": "72ec8bbdadf0b523315baef5b55791b8", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 91, "avg_line_length": 32.266666666666666, "alnum_prop": 0.6332644628099173, "repo_name": "tdyas/pants", "id": "af3cbce700ad3c490571efaf9209dbb1065e3b47", "size": "1100", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "contrib/go/src/python/pants/contrib/go/tasks/go_run.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "655" }, { "name": "C++", "bytes": "2010" }, { "name": "CSS", "bytes": "9444" }, { "name": "Dockerfile", "bytes": "5596" }, { "name": "GAP", "bytes": "1283" }, { "name": "Gherkin", "bytes": "919" }, { "name": "Go", "bytes": "2765" }, { "name": "HTML", "bytes": "44381" }, { "name": "Java", "bytes": "518180" }, { "name": "JavaScript", "bytes": "22906" }, { "name": "Python", "bytes": "7955590" }, { "name": "Rust", "bytes": "1031208" }, { "name": "Scala", "bytes": "106520" }, { "name": "Shell", "bytes": "109904" }, { "name": "Starlark", "bytes": "502255" }, { "name": "Thrift", "bytes": "2953" } ], "symlink_target": "" }
from sqlalchemy import Column, Integer, String, TEXT, Date, INTEGER, NUMERIC from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker Base = declarative_base() Session = sessionmaker() class Contract(Base): # type: ignore __tablename__ = 'contracts' id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(String(255)) vendor_name = Column(TEXT, nullable=True) reference_number = Column(TEXT, nullable=True) contract_date = Column(Date, nullable=False) contract_period_start = Column(Date, nullable=False) contract_period_end = Column(Date, nullable=False) contract_value = Column(NUMERIC(20, 2), nullable=True) department = Column(TEXT, nullable=True) source_fiscal = Column(Date, nullable=True) object_code = Column(TEXT, nullable=True) reporting_period_start = Column(Date, nullable=False) reporting_period_end = Column(Date, nullable=False) reporting_period_value = Column(NUMERIC(20, 2), nullable=False) def __repr__(self): data_string = ', '.join([f'{key}={getattr(self, key)}' for key in self.metadata.tables[self.__tablename__].columns.keys()]) return f'<Contract({data_string})>' def __eq__(self, other): if not isinstance(other, Contract): return False return all([getattr(self, key) == getattr(other, key) for key in self.metadata.tables[self.__tablename__].columns.keys()]) class RawContract(Base): # type: ignore __tablename__ = 'raw_contracts' id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(String(255)) vendorName = Column(TEXT, nullable=True) referenceNumber = Column(TEXT, nullable=True) contractDate = Column(TEXT, nullable=True) description = Column(TEXT, nullable=True) extraDescription = Column(TEXT, nullable=True) contractPeriodStart = Column(TEXT, nullable=True) contractPeriodEnd = Column(TEXT, nullable=True) startYear = Column(INTEGER, nullable=True) endYear = Column(INTEGER, nullable=True) deliveryDate = Column(TEXT, nullable=True) originalValue = Column(NUMERIC(20, 2), nullable=True) contractValue = Column(NUMERIC(20, 2), nullable=True) comments = Column(TEXT, nullable=True) ownerAcronym = Column(TEXT, nullable=True) sourceYear = Column(INTEGER, nullable=True) sourceQuarter = Column(INTEGER, nullable=True) sourceFiscal = Column(TEXT, nullable=True) sourceFilename = Column(TEXT, nullable=True) sourceURL = Column(TEXT, nullable=True) objectCode = Column(TEXT, nullable=True) vendorClean = Column(TEXT, nullable=True)
{ "content_hash": "fb9983407ce79a8697b386a2d3e915a9", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 101, "avg_line_length": 42.3125, "alnum_prop": 0.6887001477104875, "repo_name": "GoC-Spending/fuzzy-tribble", "id": "f61a751272a408b93b2079b53b9a0b7d6a5ee7a1", "size": "2708", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/tribble/contract.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "63854" }, { "name": "Shell", "bytes": "1216" } ], "symlink_target": "" }
from factory import Sequence, PostGenerationMethodCall from factory.alchemy import SQLAlchemyModelFactory from recruit_app.user.models import User from recruit_app.database import db class BaseFactory(SQLAlchemyModelFactory): """Base factory.""" class Meta: """Factory configuration.""" abstract = True sqlalchemy_session = db.session class UserFactory(BaseFactory): """User factory.""" email = Sequence(lambda n: 'user{0}@example.com'.format(n)) password = PostGenerationMethodCall('set_password', 'example') active = True class Meta: """Factory configuration.""" model = User
{ "content_hash": "b6634e21639e1ea9fc6e5da3b89bed44", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 66, "avg_line_length": 23.428571428571427, "alnum_prop": 0.6905487804878049, "repo_name": "tyler274/Recruitment-App", "id": "fb5f79445b7518ab2232a0afe0eba08ca510f749", "size": "680", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/factories.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "2845" }, { "name": "HTML", "bytes": "55851" }, { "name": "JavaScript", "bytes": "222936" }, { "name": "Python", "bytes": "140234" } ], "symlink_target": "" }
def parse_jar(flo): """Parse a Record-Jar from a file like object into a list of dictionaries. This method parses a file like object as described in "The Art of Unix Programming" <http://www.faqs.org/docs/artu/ch05s02.html#id2906931>. The records are divided by lines containing '%%'. Each record consists of one or more lines, each containing a key, a colon, and a value. Whitespace around both key and value are ignored. >>> import StringIO >>> flo = StringIO.StringIO("a:b\\nc:d\\n%%\\nx:y\\n") >>> out = parse_jar(flo) >>> print out [{'a': 'b', 'c': 'd'}, {'x': 'y'}] If a record contains a key more than once, the value for this key is a list containing the values in their order of occurence. >>> flo = StringIO.StringIO("a:b\\nc:d\\n%%\\nx:y\\nx:z\\n") >>> out = parse_jar(flo) >>> print out [{'a': 'b', 'c': 'd'}, {'x': ['y', 'z']}] Leading or trailing separator lines ('%%') and lines containing only whitespace are ignored. >>> flo = StringIO.StringIO("%%\\na:b\\nc:d\\n%%\\n\\nx:y\\nx:z\\n") >>> out = parse_jar(flo) >>> print out [{'a': 'b', 'c': 'd'}, {'x': ['y', 'z']}] """ records = [] for record in flo.read().split("%%"): dict = {} for line in [line for line in record.split("\n") if line.strip() != ""]: key, value = line.split(":", 1) key, value = key.strip(), value.strip() try: dict[key].append(value) except AttributeError: dict[key] = [dict[key], value] except KeyError: dict[key] = value if len(dict) > 0: records.append(dict) return records def _test(): import doctest, recordjar return doctest.testmod(recordjar) if __name__ == "__main__": _test()
{ "content_hash": "235c3dcdc506d6126651ba750ad0054a", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 80, "avg_line_length": 34.2037037037037, "alnum_prop": 0.5489983757444504, "repo_name": "ActiveState/code", "id": "426595cc63c8a1bd42a1d7299fb4423d2def4b0c", "size": "2025", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "recipes/Python/436229_RecordJar_Parser/recipe-436229.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "35894" }, { "name": "C", "bytes": "56048" }, { "name": "C++", "bytes": "90880" }, { "name": "HTML", "bytes": "11656" }, { "name": "Java", "bytes": "57468" }, { "name": "JavaScript", "bytes": "181218" }, { "name": "PHP", "bytes": "250144" }, { "name": "Perl", "bytes": "37296" }, { "name": "Perl 6", "bytes": "9914" }, { "name": "Python", "bytes": "17387779" }, { "name": "Ruby", "bytes": "40233" }, { "name": "Shell", "bytes": "190732" }, { "name": "Tcl", "bytes": "674650" } ], "symlink_target": "" }
import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models from froide.helper.auth_migration_util import USER_DB_NAME APP_MODEL, APP_MODEL_NAME = 'account.User', 'account.user' class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'FoiRequest.resolution' db.add_column(u'foirequest_foirequest', 'resolution', self.gf('django.db.models.fields.CharField')(default='', max_length=50, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'FoiRequest.resolution' db.delete_column(u'foirequest_foirequest', 'resolution') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, APP_MODEL_NAME: { 'Meta': {'object_name': 'User', 'db_table': "'%s'" % USER_DB_NAME}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'foirequest.deferredmessage': { 'Meta': {'ordering': "('timestamp',)", 'object_name': 'DeferredMessage'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mail': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'recipient': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']", 'null': 'True', 'blank': 'True'}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}) }, u'foirequest.foiattachment': { 'Meta': {'ordering': "('name',)", 'object_name': 'FoiAttachment'}, 'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'belongs_to': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiMessage']", 'null': 'True'}), 'can_approve': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'converted': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'original_set'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['foirequest.FoiAttachment']"}), 'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255'}), 'filetype': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'format': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_converted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_redacted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'redacted': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'unredacted_set'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['foirequest.FoiAttachment']"}), 'size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) }, u'foirequest.foievent': { 'Meta': {'ordering': "('-timestamp',)", 'object_name': 'FoiEvent'}, 'context_json': ('django.db.models.fields.TextField', [], {}), 'event_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.PublicBody']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']"}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % APP_MODEL, 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}) }, u'foirequest.foimessage': { 'Meta': {'ordering': "('timestamp',)", 'object_name': 'FoiMessage'}, 'content_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_escalation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_postal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_response': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'not_publishable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'original': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'plaintext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'plaintext_redacted': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'recipient': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'recipient_public_body': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'received_messages'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['publicbody.PublicBody']"}), 'redacted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']"}), 'sender_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'sender_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'sender_public_body': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'send_messages'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['publicbody.PublicBody']"}), 'sender_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % APP_MODEL, 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'sent': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50', 'null': 'True', 'blank': 'True'}), 'subject': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'subject_redacted': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}) }, u'foirequest.foirequest': { 'Meta': {'ordering': "('last_message',)", 'object_name': 'FoiRequest'}, 'checked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'costs': ('django.db.models.fields.FloatField', [], {'default': '0.0'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'due_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'first_message': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_foi': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.Jurisdiction']", 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'last_message': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'law': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.FoiLaw']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.PublicBody']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'refusal_reason': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}), 'resolution': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}), 'resolved_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'same_as': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'same_as_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'secret': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'secret_address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}), 'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']", 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % APP_MODEL, 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'visibility': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}) }, u'foirequest.publicbodysuggestion': { 'Meta': {'ordering': "('timestamp',)", 'object_name': 'PublicBodySuggestion'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.PublicBody']"}), 'reason': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}), 'request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']"}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % APP_MODEL, 'null': 'True', 'on_delete': 'models.SET_NULL'}) }, u'foirequest.taggedfoirequest': { 'Meta': {'object_name': 'TaggedFoiRequest'}, 'content_object': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'foirequest_taggedfoirequest_items'", 'to': u"orm['taggit.Tag']"}) }, u'publicbody.foilaw': { 'Meta': {'object_name': 'FoiLaw'}, 'combined': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['publicbody.FoiLaw']", 'symmetrical': 'False', 'blank': 'True'}), 'created': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'email_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.Jurisdiction']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'letter_end': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'letter_start': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'long_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'max_response_time': ('django.db.models.fields.IntegerField', [], {'default': '30', 'null': 'True', 'blank': 'True'}), 'max_response_time_unit': ('django.db.models.fields.CharField', [], {'default': "'day'", 'max_length': '32', 'blank': 'True'}), 'mediator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'mediating_laws'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['publicbody.PublicBody']", 'blank': 'True', 'null': 'True'}), 'meta': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'priority': ('django.db.models.fields.SmallIntegerField', [], {'default': '3'}), 'refusal_reasons': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'request_note': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']", 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}), 'updated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}) }, u'publicbody.jurisdiction': { 'Meta': {'object_name': 'Jurisdiction'}, 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'rank': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}) }, u'publicbody.publicbody': { 'Meta': {'ordering': "('name',)", 'object_name': 'PublicBody'}, '_created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'public_body_creators'", 'on_delete': 'models.SET_NULL', 'default': '1', 'to': u"orm['%s']" % APP_MODEL, 'blank': 'True', 'null': 'True'}), '_updated_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'public_body_updaters'", 'on_delete': 'models.SET_NULL', 'default': '1', 'to': u"orm['%s']" % APP_MODEL, 'blank': 'True', 'null': 'True'}), 'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'classification': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'classification_slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}), 'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'contact': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'depth': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.Jurisdiction']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'laws': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['publicbody.FoiLaw']", 'symmetrical': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'number_of_requests': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'other_names': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'children'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['publicbody.PublicBody']", 'blank': 'True', 'null': 'True'}), 'request_note': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'root': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'descendants'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['publicbody.PublicBody']", 'blank': 'True', 'null': 'True'}), 'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']", 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}), 'topic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.PublicBodyTopic']", 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}), 'website_dump': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) }, u'publicbody.publicbodytopic': { 'Meta': {'object_name': 'PublicBodyTopic'}, 'count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'rank': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}) }, u'sites.site': { 'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"}, 'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'taggit.tag': { 'Meta': {'object_name': 'Tag'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}) } } complete_apps = ['foirequest']
{ "content_hash": "114751e782414b49751d2175f16f8b3a", "timestamp": "", "source": "github", "line_count": 250, "max_line_length": 236, "avg_line_length": 89.596, "alnum_prop": 0.5578374034555114, "repo_name": "catcosmo/froide", "id": "4f683bfa276a535674c6fdb26e89f62b27edd588", "size": "22423", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "froide/foirequest/south_migrations/0032_auto__add_field_foirequest_resolution.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "17829" }, { "name": "HTML", "bytes": "162326" }, { "name": "Java", "bytes": "1814475" }, { "name": "JavaScript", "bytes": "52679" }, { "name": "Makefile", "bytes": "329" }, { "name": "Python", "bytes": "1614641" }, { "name": "Shell", "bytes": "1621" } ], "symlink_target": "" }
"""Compressed Sparse Row matrix format""" __docformat__ = "restructuredtext en" __all__ = ['csr_matrix', 'isspmatrix_csr'] import numpy as np from .base import spmatrix from ._sparsetools import (csr_tocsc, csr_tobsr, csr_count_blocks, get_csr_submatrix) from .sputils import upcast, get_index_dtype from .compressed import _cs_matrix class csr_matrix(_cs_matrix): """ Compressed Sparse Row matrix This can be instantiated in several ways: csr_matrix(D) with a dense matrix or rank-2 ndarray D csr_matrix(S) with another sparse matrix S (equivalent to S.tocsr()) csr_matrix((M, N), [dtype]) to construct an empty matrix with shape (M, N) dtype is optional, defaulting to dtype='d'. csr_matrix((data, (row_ind, col_ind)), [shape=(M, N)]) where ``data``, ``row_ind`` and ``col_ind`` satisfy the relationship ``a[row_ind[k], col_ind[k]] = data[k]``. csr_matrix((data, indices, indptr), [shape=(M, N)]) is the standard CSR representation where the column indices for row i are stored in ``indices[indptr[i]:indptr[i+1]]`` and their corresponding values are stored in ``data[indptr[i]:indptr[i+1]]``. If the shape parameter is not supplied, the matrix dimensions are inferred from the index arrays. Attributes ---------- dtype : dtype Data type of the matrix shape : 2-tuple Shape of the matrix ndim : int Number of dimensions (this is always 2) nnz Number of stored values, including explicit zeros data CSR format data array of the matrix indices CSR format index array of the matrix indptr CSR format index pointer array of the matrix has_sorted_indices Whether indices are sorted Notes ----- Sparse matrices can be used in arithmetic operations: they support addition, subtraction, multiplication, division, and matrix power. Advantages of the CSR format - efficient arithmetic operations CSR + CSR, CSR * CSR, etc. - efficient row slicing - fast matrix vector products Disadvantages of the CSR format - slow column slicing operations (consider CSC) - changes to the sparsity structure are expensive (consider LIL or DOK) Examples -------- >>> import numpy as np >>> from scipy.sparse import csr_matrix >>> csr_matrix((3, 4), dtype=np.int8).toarray() array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], dtype=int8) >>> row = np.array([0, 0, 1, 2, 2, 2]) >>> col = np.array([0, 2, 2, 0, 1, 2]) >>> data = np.array([1, 2, 3, 4, 5, 6]) >>> csr_matrix((data, (row, col)), shape=(3, 3)).toarray() array([[1, 0, 2], [0, 0, 3], [4, 5, 6]]) >>> indptr = np.array([0, 2, 3, 6]) >>> indices = np.array([0, 2, 2, 0, 1, 2]) >>> data = np.array([1, 2, 3, 4, 5, 6]) >>> csr_matrix((data, indices, indptr), shape=(3, 3)).toarray() array([[1, 0, 2], [0, 0, 3], [4, 5, 6]]) Duplicate entries are summed together: >>> row = np.array([0, 1, 2, 0]) >>> col = np.array([0, 1, 1, 0]) >>> data = np.array([1, 2, 4, 8]) >>> csr_matrix((data, (row, col)), shape=(3, 3)).toarray() array([[9, 0, 0], [0, 2, 0], [0, 4, 0]]) As an example of how to construct a CSR matrix incrementally, the following snippet builds a term-document matrix from texts: >>> docs = [["hello", "world", "hello"], ["goodbye", "cruel", "world"]] >>> indptr = [0] >>> indices = [] >>> data = [] >>> vocabulary = {} >>> for d in docs: ... for term in d: ... index = vocabulary.setdefault(term, len(vocabulary)) ... indices.append(index) ... data.append(1) ... indptr.append(len(indices)) ... >>> csr_matrix((data, indices, indptr), dtype=int).toarray() array([[2, 1, 0, 0], [0, 1, 1, 1]]) """ format = 'csr' def transpose(self, axes=None, copy=False): if axes is not None: raise ValueError(("Sparse matrices do not support " "an 'axes' parameter because swapping " "dimensions is the only logical permutation.")) M, N = self.shape from .csc import csc_matrix return csc_matrix((self.data, self.indices, self.indptr), shape=(N, M), copy=copy) transpose.__doc__ = spmatrix.transpose.__doc__ def tolil(self, copy=False): from .lil import lil_matrix lil = lil_matrix(self.shape,dtype=self.dtype) self.sum_duplicates() ptr,ind,dat = self.indptr,self.indices,self.data rows, data = lil.rows, lil.data for n in range(self.shape[0]): start = ptr[n] end = ptr[n+1] rows[n] = ind[start:end].tolist() data[n] = dat[start:end].tolist() return lil tolil.__doc__ = spmatrix.tolil.__doc__ def tocsr(self, copy=False): if copy: return self.copy() else: return self tocsr.__doc__ = spmatrix.tocsr.__doc__ def tocsc(self, copy=False): idx_dtype = get_index_dtype((self.indptr, self.indices), maxval=max(self.nnz, self.shape[0])) indptr = np.empty(self.shape[1] + 1, dtype=idx_dtype) indices = np.empty(self.nnz, dtype=idx_dtype) data = np.empty(self.nnz, dtype=upcast(self.dtype)) csr_tocsc(self.shape[0], self.shape[1], self.indptr.astype(idx_dtype), self.indices.astype(idx_dtype), self.data, indptr, indices, data) from .csc import csc_matrix A = csc_matrix((data, indices, indptr), shape=self.shape) A.has_sorted_indices = True return A tocsc.__doc__ = spmatrix.tocsc.__doc__ def tobsr(self, blocksize=None, copy=True): from .bsr import bsr_matrix if blocksize is None: from .spfuncs import estimate_blocksize return self.tobsr(blocksize=estimate_blocksize(self)) elif blocksize == (1,1): arg1 = (self.data.reshape(-1,1,1),self.indices,self.indptr) return bsr_matrix(arg1, shape=self.shape, copy=copy) else: R,C = blocksize M,N = self.shape if R < 1 or C < 1 or M % R != 0 or N % C != 0: raise ValueError('invalid blocksize %s' % blocksize) blks = csr_count_blocks(M,N,R,C,self.indptr,self.indices) idx_dtype = get_index_dtype((self.indptr, self.indices), maxval=max(N//C, blks)) indptr = np.empty(M//R+1, dtype=idx_dtype) indices = np.empty(blks, dtype=idx_dtype) data = np.zeros((blks,R,C), dtype=self.dtype) csr_tobsr(M, N, R, C, self.indptr.astype(idx_dtype), self.indices.astype(idx_dtype), self.data, indptr, indices, data.ravel()) return bsr_matrix((data,indices,indptr), shape=self.shape) tobsr.__doc__ = spmatrix.tobsr.__doc__ # these functions are used by the parent class (_cs_matrix) # to remove redundancy between csc_matrix and csr_matrix def _swap(self, x): """swap the members of x if this is a column-oriented matrix """ return x def __iter__(self): indptr = np.zeros(2, dtype=self.indptr.dtype) shape = (1, self.shape[1]) i0 = 0 for i1 in self.indptr[1:]: indptr[1] = i1 - i0 indices = self.indices[i0:i1] data = self.data[i0:i1] yield csr_matrix((data, indices, indptr), shape=shape, copy=True) i0 = i1 def getrow(self, i): """Returns a copy of row i of the matrix, as a (1 x n) CSR matrix (row vector). """ M, N = self.shape i = int(i) if i < 0: i += M if i < 0 or i >= M: raise IndexError('index (%d) out of range' % i) indptr, indices, data = get_csr_submatrix( M, N, self.indptr, self.indices, self.data, i, i + 1, 0, N) return csr_matrix((data, indices, indptr), shape=(1, N), dtype=self.dtype, copy=False) def getcol(self, i): """Returns a copy of column i of the matrix, as a (m x 1) CSR matrix (column vector). """ M, N = self.shape i = int(i) if i < 0: i += N if i < 0 or i >= N: raise IndexError('index (%d) out of range' % i) indptr, indices, data = get_csr_submatrix( M, N, self.indptr, self.indices, self.data, 0, M, i, i + 1) return csr_matrix((data, indices, indptr), shape=(M, 1), dtype=self.dtype, copy=False) def _get_intXarray(self, row, col): return self.getrow(row)._minor_index_fancy(col) def _get_intXslice(self, row, col): if col.step in (1, None): return self._get_submatrix(row, col, copy=True) # TODO: uncomment this once it's faster: # return self.getrow(row)._minor_slice(col) M, N = self.shape start, stop, stride = col.indices(N) ii, jj = self.indptr[row:row+2] row_indices = self.indices[ii:jj] row_data = self.data[ii:jj] if stride > 0: ind = (row_indices >= start) & (row_indices < stop) else: ind = (row_indices <= start) & (row_indices > stop) if abs(stride) > 1: ind &= (row_indices - start) % stride == 0 row_indices = (row_indices[ind] - start) // stride row_data = row_data[ind] row_indptr = np.array([0, len(row_indices)]) if stride < 0: row_data = row_data[::-1] row_indices = abs(row_indices[::-1]) shape = (1, max(0, int(np.ceil(float(stop - start) / stride)))) return csr_matrix((row_data, row_indices, row_indptr), shape=shape, dtype=self.dtype, copy=False) def _get_sliceXint(self, row, col): if row.step in (1, None): return self._get_submatrix(row, col, copy=True) return self._major_slice(row)._get_submatrix(minor=col) def _get_sliceXarray(self, row, col): return self._major_slice(row)._minor_index_fancy(col) def _get_arrayXint(self, row, col): return self._major_index_fancy(row)._get_submatrix(minor=col) def _get_arrayXslice(self, row, col): if col.step not in (1, None): col = np.arange(*col.indices(self.shape[1])) return self._get_arrayXarray(row, col) return self._major_index_fancy(row)._get_submatrix(minor=col) def isspmatrix_csr(x): """Is x of csr_matrix type? Parameters ---------- x object to check for being a csr matrix Returns ------- bool True if x is a csr matrix, False otherwise Examples -------- >>> from scipy.sparse import csr_matrix, isspmatrix_csr >>> isspmatrix_csr(csr_matrix([[5]])) True >>> from scipy.sparse import csc_matrix, csr_matrix, isspmatrix_csc >>> isspmatrix_csr(csc_matrix([[5]])) False """ return isinstance(x, csr_matrix)
{ "content_hash": "63bf971fac34bfbb2631d508d190c013", "timestamp": "", "source": "github", "line_count": 358, "max_line_length": 79, "avg_line_length": 32.44972067039106, "alnum_prop": 0.5397262632349143, "repo_name": "Eric89GXL/scipy", "id": "54774d62f93d9d93b07e18652e70f290b178cad5", "size": "11617", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "scipy/sparse/csr.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "4207790" }, { "name": "C++", "bytes": "3692492" }, { "name": "Fortran", "bytes": "5573780" }, { "name": "HTML", "bytes": "124330" }, { "name": "Makefile", "bytes": "76425" }, { "name": "Matlab", "bytes": "4346" }, { "name": "Python", "bytes": "10892829" }, { "name": "Shell", "bytes": "2218" }, { "name": "TeX", "bytes": "52106" } ], "symlink_target": "" }
from __future__ import absolute_import import sys from os.path import basename from . import maybe_patch_concurrency __all__ = ['main'] DEPRECATED_FMT = """ The {old!r} command is deprecated, please use {new!r} instead: $ {new_argv} """ def _warn_deprecated(new): print(DEPRECATED_FMT.format( old=basename(sys.argv[0]), new=new, new_argv=' '.join([new] + sys.argv[1:])), ) def main(): if 'multi' not in sys.argv: maybe_patch_concurrency() from celery.bin.celery import main main() def _compat_worker(): maybe_patch_concurrency() _warn_deprecated('celery worker') from celery.bin.worker import main main() def _compat_multi(): _warn_deprecated('celery multi') from celery.bin.multi import main main() def _compat_beat(): maybe_patch_concurrency() _warn_deprecated('celery beat') from celery.bin.beat import main main() if __name__ == '__main__': # pragma: no cover main()
{ "content_hash": "c29aaa8e139b7103446248346f14c235", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 62, "avg_line_length": 18.203703703703702, "alnum_prop": 0.6307222787385555, "repo_name": "sunze/py_flask", "id": "04448e239b30e743e4a749fb1ef3be946b1db02c", "size": "983", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "venv/lib/python3.4/site-packages/celery/__main__.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "5939" }, { "name": "CSS", "bytes": "11745" }, { "name": "HTML", "bytes": "34870" }, { "name": "JavaScript", "bytes": "23176" }, { "name": "Mako", "bytes": "7564" }, { "name": "Python", "bytes": "12266826" }, { "name": "Shell", "bytes": "3634" } ], "symlink_target": "" }
from ._settings_operations import SettingsOperations from ._patch import __all__ as _patch_all from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk __all__ = [ "SettingsOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk()
{ "content_hash": "b4f8b5e3b80c131cc8d885503d57a053", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 78, "avg_line_length": 30.272727272727273, "alnum_prop": 0.6876876876876877, "repo_name": "Azure/azure-sdk-for-python", "id": "0b0925b026d700f8a6fe2d0c2665aef2b23e258c", "size": "801", "binary": false, "copies": "4", "ref": "refs/heads/main", "path": "sdk/security/azure-mgmt-security/azure/mgmt/security/v2022_05_01/aio/operations/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1224" }, { "name": "Bicep", "bytes": "24196" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "4892" }, { "name": "HTML", "bytes": "12058" }, { "name": "JavaScript", "bytes": "8137" }, { "name": "Jinja", "bytes": "10377" }, { "name": "Jupyter Notebook", "bytes": "272022" }, { "name": "PowerShell", "bytes": "518535" }, { "name": "Python", "bytes": "715484989" }, { "name": "Shell", "bytes": "3631" } ], "symlink_target": "" }
from django.db import models from django.utils.translation import ugettext_lazy as _ from mptt.models import MPTTModel from ..product.models import Product __all__ = ('Category', 'CategoryManager') class CategoryManager(models.Manager): def get_product_url(self, product, category): if not category: if not product.categories.exists(): raise ValueError('Cannot generate url for product' ' without categories') category = product.categories.all()[0] return ('product:details', ('%s%s/' % (category.parents_slug_path(), category.slug), product.slug)) class Category(MPTTModel): name = models.CharField(_('name'), max_length=128) description = models.TextField(_('description'), blank=True) meta_description = models.TextField(_('meta description'), blank=True, help_text=_("Description used by search and indexing engines")) slug = models.SlugField(max_length=50) parent = models.ForeignKey('self', null=True, blank=True, related_name='children') products = models.ManyToManyField(Product, related_name='categories', blank=True) objects = CategoryManager() class Meta: verbose_name = _("category") verbose_name_plural = _("categories") def __unicode__(self): return self.name @models.permalink def get_absolute_url(self): return ('product:category-details', (self.parents_slug_path(), self.slug)) def parents_slug_path(self): parents = '/'.join(c.slug for c in self.get_ancestors()) return '%s/' % parents if parents else ''
{ "content_hash": "6449be63675f9ed24458ab4f9994a6aa", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 75, "avg_line_length": 37.0625, "alnum_prop": 0.5997751545812254, "repo_name": "fusionbox/satchless", "id": "ce5271eae0ac32bfd1b79229dc33d619ec5a9409", "size": "1779", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "satchless/category/models.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "47040" }, { "name": "HTML", "bytes": "87824" }, { "name": "JavaScript", "bytes": "23123" }, { "name": "Python", "bytes": "376774" } ], "symlink_target": "" }
"""Module for testing anything related to executors.""" import os from threading import Thread from django.test import TestCase from norc.core.models import Executor, DBQueue, CommandTask, Instance from norc.core.constants import Status, Request from norc.norc_utils import wait_until, log class ExecutorTest(TestCase): """Tests for a Norc executor.""" @property def executor(self): return Executor.objects.get(pk=self._executor.pk) def setUp(self): """Create the executor and thread objects.""" self.queue = DBQueue.objects.create(name='test') self._executor = Executor.objects.create(queue=self.queue, concurrent=4) self._executor.log = log.Log(os.devnull) self.thread = Thread(target=self._executor.start) def test_start_stop(self): self.assertEqual(self.executor.status, Status.CREATED) self.thread.start() wait_until(lambda: self.executor.status == Status.RUNNING, 3) self.assertEqual(self.executor.status, Status.RUNNING) self.executor.make_request(Request.STOP) wait_until(lambda: Status.is_final(self.executor.status), 5) self.assertEqual(self.executor.status, Status.ENDED) def test_kill(self): self.thread.start() wait_until(lambda: self.executor.status == Status.RUNNING, 3) self.assertEqual(self.executor.status, Status.RUNNING) self.executor.make_request(Request.KILL) wait_until(lambda: Status.is_final(self.executor.status), 5) self.assertEqual(self.executor.status, Status.KILLED) def test_pause_resume(self): self.thread.start() wait_until(lambda: self.executor.status == Status.RUNNING, 3) self.assertEqual(self.executor.status, Status.RUNNING) self.executor.make_request(Request.PAUSE) wait_until(lambda: self.executor.status == Status.PAUSED, 5) self.assertEqual(self.executor.status, Status.PAUSED) self.executor.make_request(Request.RESUME) wait_until(lambda: self.executor.status == Status.RUNNING, 5) self.assertEqual(self.executor.status, Status.RUNNING) # This test does not work because of an issue with subprocesses using # the Django test database. # def test_run_instance(self): # self.thread.start() # ct = CommandTask.objects.create(name='test', command='echo "blah"') # _instance = Instance.objects.create(task=ct, executor=self._executor) # instance = lambda: Instance.objects.get(pk=_instance.pk) # wait_until(lambda: self.executor.status == Status.RUNNING, 3) # self.queue.push(_instance) # wait_until(lambda: Status.is_final(instance().status), 5) # self.assertEqual(instance().status, Status.SUCCESS) def tearDown(self): if not Status.is_final(self._executor.status): print self._executor.make_request(Request.KILL) self.thread.join(7) self._executor.heart.join(7) assert not self.thread.isAlive() assert not self._executor.heart.isAlive()
{ "content_hash": "0f6da7466a394abb3c4b45d3f294a678", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 80, "avg_line_length": 42.657534246575345, "alnum_prop": 0.6705202312138728, "repo_name": "darrellsilver/norc", "id": "bc80a148a90d131413fc934ace4f4fbe9dfa3c55", "size": "3115", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/tests/executor_test.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "20532" }, { "name": "Python", "bytes": "165903" } ], "symlink_target": "" }
from django.db import models # Create your models here. class HouseRace(models.Model): state = models.CharField(max_length=2) district = models.IntegerField(help_text="they use integers for districts; at-large = 1") rating_id = models.IntegerField() rating_label = models.CharField(max_length=63) incumbent = models.CharField(max_length=127) update_time = models.DateTimeField(auto_now=True) cycle = models.CharField(max_length=4, blank=True, null=True, help_text="text cycle; even number.") class Meta: unique_together = ("state", "district", "cycle") class SenateRace(models.Model): state = models.CharField(max_length=2) seat_class = models.CharField(max_length=4, help_text="they use roman numerals for 1-3") rating_id = models.IntegerField() rating_segment = models.CharField(max_length=63, help_text="they use roman numerals for 1-3") rating_label = models.CharField(max_length=63) incumbent = models.CharField(max_length=127) update_time = models.DateTimeField(auto_now=True) cycle = models.CharField(max_length=4, blank=True, null=True, help_text="text cycle; even number.") class Meta: unique_together = ("state", "seat_class", "cycle")
{ "content_hash": "85694e5b9a5472a8194e220fc68a8a69", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 103, "avg_line_length": 41.86666666666667, "alnum_prop": 0.6942675159235668, "repo_name": "sunlightlabs/read_FEC", "id": "2d4b8fe941e55bf88320c2e52d1d55b0db2056e2", "size": "1256", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "fecreader/rothenberg/models.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "27432" }, { "name": "HTML", "bytes": "357960" }, { "name": "JavaScript", "bytes": "129989" }, { "name": "Python", "bytes": "1881514" }, { "name": "Shell", "bytes": "10604" } ], "symlink_target": "" }
from __future__ import absolute_import, print_function from django.contrib.auth import login from sentry import features from sentry.web.forms.accounts import AuthenticationForm from sentry.web.frontend.base import BaseView from sentry.utils.auth import get_auth_providers, get_login_redirect class AuthLoginView(BaseView): auth_required = False def handle(self, request): if request.user.is_authenticated(): return self.redirect(get_login_redirect(request)) form = AuthenticationForm(request, request.POST or None, captcha=bool(request.session.get('needs_captcha'))) if form.is_valid(): login(request, form.get_user()) request.session.pop('needs_captcha', None) return self.redirect(get_login_redirect(request)) elif request.POST and not request.session.get('needs_captcha'): request.session['needs_captcha'] = 1 form = AuthenticationForm(request, request.POST or None, captcha=True) form.errors.pop('captcha', None) request.session.set_test_cookie() context = { 'form': form, 'next': request.session.get('_next'), 'CAN_REGISTER': features.has('auth:register') or request.session.get('can_register'), 'AUTH_PROVIDERS': get_auth_providers(), 'SOCIAL_AUTH_CREATE_USERS': features.has('social-auth:register'), } return self.respond('sentry/login.html', context)
{ "content_hash": "5fc38554583bcf578c140b988a7ab3c9", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 97, "avg_line_length": 37.09756097560975, "alnum_prop": 0.6443129520052597, "repo_name": "jokey2k/sentry", "id": "12478c1cd4dc795f0c0d6e860d3dcc1de6e3b68d", "size": "1521", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/sentry/web/frontend/auth_login.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "580459" }, { "name": "Gettext Catalog", "bytes": "2933595" }, { "name": "HTML", "bytes": "292821" }, { "name": "JavaScript", "bytes": "608760" }, { "name": "Makefile", "bytes": "2710" }, { "name": "Python", "bytes": "5105385" } ], "symlink_target": "" }
from __future__ import unicode_literals, division, absolute_import from builtins import * # noqa pylint: disable=unused-import, redefined-builtin from future.moves.urllib.parse import urlparse import json import logging from collections import MutableSet import requests from requests import RequestException from flexget import plugin from flexget.entry import Entry from flexget.event import event log = logging.getLogger('sonarr_list') class SonarrSet(MutableSet): supported_ids = ['tvdb_id', 'tvrage_id', 'tvmaze_id', 'imdb_id', 'slug', 'sonarr_id'] schema = { 'type': 'object', 'properties': { 'base_url': {'type': 'string'}, 'port': {'type': 'number', 'default': 80}, 'api_key': {'type': 'string'}, 'include_ended': {'type': 'boolean', 'default': True}, 'only_monitored': {'type': 'boolean', 'default': True}, 'include_data': {'type': 'boolean', 'default': False}, 'search_missing_episodes': {'type': 'boolean', 'default': True}, 'ignore_episodes_without_files': {'type': 'boolean', 'default': False}, 'ignore_episodes_with_files': {'type': 'boolean', 'default': False} }, 'required': ['api_key', 'base_url'], 'additionalProperties': False } def series_request_builder(self, base_url, port, api_key): parsedurl = urlparse(base_url) log.debug('Received series list request') url = '%s://%s:%s%s/api/series' % (parsedurl.scheme, parsedurl.netloc, port, parsedurl.path) headers = {'X-Api-Key': api_key} return url, headers def lookup_request(self, base_url, port, api_key): parsedurl = urlparse(base_url) log.debug('Received series lookup request') url = '%s://%s:%s%s/api/series/lookup?term=' % (parsedurl.scheme, parsedurl.netloc, port, parsedurl.path) headers = {'X-Api-Key': api_key} return url, headers def profile_list_request(self, base_url, port, api_key): parsedurl = urlparse(base_url) log.debug('Received profile list request') url = '%s://%s:%s%s/api/profile' % (parsedurl.scheme, parsedurl.netloc, port, parsedurl.path) headers = {'X-Api-Key': api_key} return url, headers def rootfolder_request(self, base_url, port, api_key): parsedurl = urlparse(base_url) log.debug('Received rootfolder list request') url = '%s://%s:%s%s/api/Rootfolder' % (parsedurl.scheme, parsedurl.netloc, port, parsedurl.path) headers = {'X-Api-Key': api_key} return url, headers def get_json(self, url, headers): try: response = requests.get(url, headers=headers) if response.status_code == 200: return response.json() else: raise plugin.PluginError('Invalid response received from Sonarr: %s' % response.content) except RequestException as e: raise plugin.PluginError('Unable to connect to Sonarr at %s. Error: %s' % (url, e)) def post_json(self, url, headers, data): try: response = requests.post(url, headers=headers, data=data) if response.status_code == 201: return response.json() else: raise plugin.PluginError('Invalid response received from Sonarr: %s' % response.content) except RequestException as e: raise plugin.PluginError('Unable to connect to Sonarr at %s. Error: %s' % (url, e)) def request_builder(self, base_url, request_type, port, api_key): if request_type == 'series': return self.series_request_builder(base_url, port, api_key) elif request_type == 'profile': return self.profile_list_request(base_url, port, api_key) elif request_type == 'lookup': return self.lookup_request(base_url, port, api_key) elif request_type == 'rootfolder': return self.rootfolder_request(base_url, port, api_key) else: raise plugin.PluginError('Received unknown API request, aborting.') def translate_quality(self, quality_name): """ Translate Sonnar's qualities to ones recognize by Flexget """ if quality_name == 'Raw-HD': # No better match yet in Flexget return 'remux' elif quality_name == 'DVD': # No better match yet in Flexget return 'dvdrip' else: return quality_name.replace('-', ' ').lower() def quality_requirement_builder(self, quality_profile): allowed_qualities = [self.translate_quality(quality['quality']['name']) for quality in quality_profile['items'] if quality['allowed']] cutoff = self.translate_quality(quality_profile['cutoff']['name']) return allowed_qualities, cutoff def list_entries(self): series_url, series_headers = self.request_builder(self.config.get('base_url'), 'series', self.config.get('port'), self.config['api_key']) json = self.get_json(series_url, series_headers) # Retrieves Sonarr's profile list if include_data is set to true if self.config.get('include_data'): profile_url, profile_headers = self.request_builder(self.config.get('base_url'), 'profile', self.config.get('port'), self.config['api_key']) profiles_json = self.get_json(profile_url, profile_headers) entries = [] for show in json: fg_qualities = '' # Initializes the quality parameter fg_cutoff = '' path = None if not show['monitored'] and self.config.get( 'only_monitored'): # Checks if to retrieve just monitored shows continue if show['status'] == 'ended' and not self.config.get('include_ended'): # Checks if to retrieve ended shows continue if self.config.get('include_data') and profiles_json: # Check if to retrieve quality & path path = show.get('path') for profile in profiles_json: if profile['id'] == show['profileId']: # Get show's profile data from all possible profiles fg_qualities, fg_cutoff = self.quality_requirement_builder(profile) entry = Entry(title=show['title'], url='', series_name=show['title'], tvdb_id=show.get('tvdbId'), tvrage_id=show.get('tvRageId'), tvmaze_id=show.get('tvMazeId'), imdb_id=show.get('imdbid'), slug=show.get('titleSlug'), sonarr_id=show.get('id'), configure_series_target=fg_cutoff) if self.config.get('include_data'): if len(fg_qualities) > 1: entry['configure_series_qualities'] = fg_qualities elif len(fg_qualities) == 1: entry['configure_series_quality'] = fg_qualities[0] else: entry['configure_series_quality'] = fg_qualities if path: entry['configure_series_path'] = path if entry.isvalid(): log.debug('returning entry %s', entry) entries.append(entry) else: log.error('Invalid entry created? %s' % entry) continue return entries def add_show(self, entry): log.debug('searching for show match for %s using Sonarr', entry) lookup_series_url, lookup_series_headers = self.request_builder(self.config.get('base_url'), 'lookup', self.config.get('port'), self.config['api_key']) if entry.get('tvdb_id'): lookup_series_url += 'tvdb:%s' % entry.get('tvdb_id') else: lookup_series_url += entry.get('title') lookup_results = self.get_json(lookup_series_url, headers=lookup_series_headers) if not lookup_results: log.debug('could not find series match to %s', entry) return else: if len(lookup_results) > 1: log.debug('got multiple results for Sonarr, using first one') show = lookup_results[0] log.debug('using show %s', show) # Getting rootfolder rootfolder_series_url, rootfolder_series_headers = self.request_builder(self.config.get('base_url'), 'rootfolder', self.config.get('port'), self.config['api_key']) rootfolder = self.get_json(rootfolder_series_url, headers=rootfolder_series_headers) # Setting defaults for Sonarr show['profileId'] = 1 show['qualityProfileId '] = 1 show['rootFolderPath'] = rootfolder[0]['path'] show['addOptions'] = {"ignoreEpisodesWithFiles": self.config.get('ignore_episodes_with_files'), "ignoreEpisodesWithoutFiles": self.config.get('ignore_episodes_without_files'), "searchForMissingEpisodes": self.config.get('search_missing_episodes')} series_url, series_headers = self.request_builder(self.config.get('base_url'), 'series', self.config.get('port'), self.config['api_key']) log.debug('adding show %s to sonarr', show) returned_show = self.post_json(series_url, headers=series_headers, data=json.dumps(show)) return returned_show def remove_show(self, show): delete_series_url, delete_series_headers = self.request_builder(self.config.get('base_url'), 'series', self.config.get('port'), self.config['api_key']) delete_series_url += '/%s' % show.get('sonarr_id') requests.delete(delete_series_url, headers=delete_series_headers) @property def shows(self): if self._shows is None: self._shows = self.list_entries() return self._shows def _find_entry(self, entry): for sb_entry in self.shows: if any(entry.get(id) is not None and entry[id] == sb_entry[id] for id in self.supported_ids): return sb_entry if entry.get('title').lower() == sb_entry.get('title').lower(): return sb_entry def _from_iterable(self, it): # TODO: is this the right answer? the returned object won't have our custom __contains__ logic return set(it) def __init__(self, config): self.config = config self._shows = None def __iter__(self): return (entry for entry in self.shows) def __len__(self): return len(self.shows) def __contains__(self, entry): return self._find_entry(entry) is not None def add(self, entry): if not self._find_entry(entry): show = self.add_show(entry) self._shows = None log.verbose('Successfully added show %s to Sonarr', show['title']) else: log.debug('entry %s already exists in Sonarr list', entry) def discard(self, entry): show = self._find_entry(entry) if not show: log.debug('Did not find matching show in Sonarr for %s, skipping', entry) return self.remove_show(show) log.verbose('removed show %s from Sonarr', show['title']) @property def immutable(self): return False @property def online(self): """ Set the online status of the plugin, online plugin should be treated differently in certain situations, like test mode""" return True def get(self, entry): return self._find_entry(entry) class SonarrList(object): schema = SonarrSet.schema @staticmethod def get_list(config): return SonarrSet(config) def on_task_input(self, task, config): return list(SonarrSet(config)) @event('plugin.register') def register_plugin(): plugin.register(SonarrList, 'sonarr_list', api_ver=2, interfaces=['task', 'list'])
{ "content_hash": "d9c63a7803063670b0323c9b63129dd9", "timestamp": "", "source": "github", "line_count": 288, "max_line_length": 120, "avg_line_length": 43.708333333333336, "alnum_prop": 0.5644264378773435, "repo_name": "OmgOhnoes/Flexget", "id": "6400aeacc754ce3f3b63456a96ba866dab8f8128", "size": "12588", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "flexget/plugins/list/sonarr_list.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "11875" }, { "name": "HTML", "bytes": "79376" }, { "name": "JavaScript", "bytes": "263723" }, { "name": "Python", "bytes": "3324701" }, { "name": "SRecode Template", "bytes": "3" } ], "symlink_target": "" }
""" parser.mobile package (imdb package). This package provides the IMDbMobileAccessSystem class used to access IMDb's data for mobile systems. the imdb.IMDb function will return an instance of this class when called with the 'accessSystem' argument set to "mobile". Copyright 2005-2012 Davide Alberani <da@erlug.linux.it> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """ import re import logging from urllib import unquote from imdb.Movie import Movie from imdb.utils import analyze_title, analyze_name, canonicalName, \ date_and_notes from imdb._exceptions import IMDbDataAccessError from imdb.parser.http import IMDbHTTPAccessSystem from imdb.parser.http.utils import subXMLRefs, subSGMLRefs, build_person, \ build_movie, re_spaces # XXX NOTE: the first version of this module was heavily based on # regular expressions. This new version replace regexps with # find() strings' method calls; despite being less flexible, it # seems to be at least as fast and, hopefully, much more # lightweight. Yes: the regexp-based version was too heavyweight # for systems with very limited CPU power and memory footprint. re_spacessub = re_spaces.sub # Strip html. re_unhtml = re.compile(r'<.+?>') re_unhtmlsub = re_unhtml.sub # imdb person or movie ids. re_imdbID = re.compile(r'(?<=nm|tt|ch)([0-9]{7})\b') # movie AKAs. re_makas = re.compile('(<p class="find-aka">.*?</p>)') # Remove episode numbers. re_filmo_episodes = re.compile('<div class="filmo-episodes">.*?</div>', re.M | re.I) def _unHtml(s): """Return a string without tags and no multiple spaces.""" return subSGMLRefs(re_spacessub(' ', re_unhtmlsub('', s)).strip()) _inttype = type(0) def _getTagsWith(s, cont, toClosure=False, maxRes=None): """Return the html tags in the 's' string containing the 'cont' string; if toClosure is True, everything between the opening tag and the closing tag is returned.""" lres = [] bi = s.find(cont) if bi != -1: btag = s[:bi].rfind('<') if btag != -1: if not toClosure: etag = s[bi+1:].find('>') if etag != -1: endidx = bi+2+etag lres.append(s[btag:endidx]) if maxRes is not None and len(lres) >= maxRes: return lres lres += _getTagsWith(s[endidx:], cont, toClosure=toClosure) else: spaceidx = s[btag:].find(' ') if spaceidx != -1: ctag = '</%s>' % s[btag+1:btag+spaceidx] closeidx = s[bi:].find(ctag) if closeidx != -1: endidx = bi+closeidx+len(ctag) lres.append(s[btag:endidx]) if maxRes is not None and len(lres) >= maxRes: return lres lres += _getTagsWith(s[endidx:], cont, toClosure=toClosure) return lres def _findBetween(s, begins, ends, beginindx=0, maxRes=None, lres=None): """Return the list of strings from the 's' string which are included between the 'begins' and 'ends' strings.""" if lres is None: lres = [] bi = s.find(begins, beginindx) if bi != -1: lbegins = len(begins) if isinstance(ends, (list, tuple)): eset = [s.find(end, bi+lbegins) for end in ends] eset[:] = [x for x in eset if x != -1] if not eset: ei = -1 else: ei = min(eset) else: ei = s.find(ends, bi+lbegins) if ei != -1: match = s[bi+lbegins:ei] lres.append(match) if maxRes is not None and len(lres) >= maxRes: return lres _findBetween(s, begins, ends, beginindx=ei, maxRes=maxRes, lres=lres) return lres class IMDbMobileAccessSystem(IMDbHTTPAccessSystem): """The class used to access IMDb's data through the web for mobile terminals.""" accessSystem = 'mobile' _mobile_logger = logging.getLogger('imdbpy.parser.mobile') def __init__(self, isThin=0, *arguments, **keywords): self.accessSystem = 'mobile' IMDbHTTPAccessSystem.__init__(self, isThin, *arguments, **keywords) def _clean_html(self, html): """Normalize the retrieve html.""" html = re_spaces.sub(' ', html) # Remove silly &nbsp;&raquo; chars. html = html.replace('&nbsp;&raquo;', '') return subXMLRefs(html) def _mretrieve(self, url, size=-1): """Retrieve an html page and normalize it.""" cont = self._retrieve(url, size=size) return self._clean_html(cont) def _getPersons(self, s, sep='<br/>'): """Return a list of Person objects, from the string s; items are assumed to be separated by the sep string.""" names = s.split(sep) pl = [] plappend = pl.append counter = 1 for name in names: pid = re_imdbID.findall(name) if not pid: continue characters = _getTagsWith(name, 'class="char"', toClosure=True, maxRes=1) chpids = [] if characters: for ch in characters[0].split(' / '): chid = re_imdbID.findall(ch) if not chid: chpids.append(None) else: chpids.append(chid[-1]) if not chpids: chpids = None elif len(chpids) == 1: chpids = chpids[0] name = _unHtml(name) # Catch unclosed tags. gt_indx = name.find('>') if gt_indx != -1: name = name[gt_indx+1:].lstrip() if not name: continue if name.endswith('...'): name = name[:-3] p = build_person(name, personID=str(pid[0]), billingPos=counter, modFunct=self._defModFunct, roleID=chpids, accessSystem=self.accessSystem) plappend(p) counter += 1 return pl def _search_movie(self, title, results): ##params = urllib.urlencode({'tt': 'on','mx': str(results),'q': title}) ##params = 'q=%s&tt=on&mx=%s' % (urllib.quote_plus(title), str(results)) ##cont = self._mretrieve(imdbURL_search % params) cont = subXMLRefs(self._get_search_content('tt', title, results)) title = _findBetween(cont, '<title>', '</title>', maxRes=1) res = [] if not title: self._mobile_logger.error('no title tag searching for movie %s', title) return res tl = title[0].lower() if not tl.startswith('find - imdb'): # a direct hit! title = _unHtml(title[0]) mid = None midtag = _getTagsWith(cont, 'rel="canonical"', maxRes=1) if midtag: mid = _findBetween(midtag[0], '/title/tt', '/', maxRes=1) if not (mid and title): self._mobile_logger.error('no direct hit title/movieID for' \ ' title %s', title) return res if cont.find('<span class="tv-extra">TV mini-series</span>') != -1: title += ' (mini)' res[:] = [(str(mid[0]), analyze_title(title))] else: # XXX: this results*3 prevents some recursion errors, but... # it's not exactly understandable (i.e.: why 'results' is # not enough to get all the results?) lis = _findBetween(cont, 'td class="result_text">', '</td>', maxRes=results*3) for li in lis: akas = re_makas.findall(li) for idx, aka in enumerate(akas): aka = aka.replace('" - ', '::', 1) aka = _unHtml(aka) if aka.startswith('aka "'): aka = aka[5:].strip() if aka[-1] == '"': aka = aka[:-1] akas[idx] = aka imdbid = re_imdbID.findall(li) li = re_makas.sub('', li) mtitle = _unHtml(li) if not (imdbid and mtitle): self._mobile_logger.debug('no title/movieID parsing' \ ' %s searching for title %s', li, title) continue mtitle = mtitle.replace('(TV mini-series)', '(mini)') resd = analyze_title(mtitle) if akas: resd['akas'] = akas res.append((str(imdbid[0]), resd)) return res def get_movie_main(self, movieID): cont = self._mretrieve(self.urls['movie_main'] % movieID + 'maindetails') title = _findBetween(cont, '<title>', '</title>', maxRes=1) if not title: raise IMDbDataAccessError('unable to get movieID "%s"' % movieID) title = _unHtml(title[0]) if title.endswith(' - IMDb'): title = title[:-7] if cont.find('<span class="tv-extra">TV mini-series</span>') != -1: title += ' (mini)' d = analyze_title(title) kind = d.get('kind') tv_series = _findBetween(cont, 'TV Series:</h5>', '</a>', maxRes=1) if tv_series: mid = re_imdbID.findall(tv_series[0]) else: mid = None if tv_series and mid: s_title = _unHtml(tv_series[0]) s_data = analyze_title(s_title) m = Movie(movieID=str(mid[0]), data=s_data, accessSystem=self.accessSystem, modFunct=self._defModFunct) d['kind'] = kind = u'episode' d['episode of'] = m if kind in ('tv series', 'tv mini series'): years = _findBetween(cont, '<h1>', '</h1>', maxRes=1) if years: years[:] = _findBetween(years[0], 'TV series', '</span>', maxRes=1) if years: d['series years'] = years[0].strip() air_date = _findBetween(cont, 'Original Air Date:</h5>', '</div>', maxRes=1) if air_date: air_date = air_date[0] vi = air_date.find('(') if vi != -1: date = _unHtml(air_date[:vi]).strip() if date != '????': d['original air date'] = date air_date = air_date[vi:] season = _findBetween(air_date, 'Season', ',', maxRes=1) if season: season = season[0].strip() try: season = int(season) except: pass if season or type(season) is _inttype: d['season'] = season episode = _findBetween(air_date, 'Episode', ')', maxRes=1) if episode: episode = episode[0].strip() try: episode = int(episode) except: pass if episode or type(season) is _inttype: d['episode'] = episode direct = _findBetween(cont, '<h5>Director', ('</div>', '<br/> <br/>'), maxRes=1) if direct: direct = direct[0] h5idx = direct.find('/h5>') if h5idx != -1: direct = direct[h5idx+4:] direct = self._getPersons(direct) if direct: d['director'] = direct if kind in ('tv series', 'tv mini series', 'episode'): if kind != 'episode': seasons = _findBetween(cont, 'Seasons:</h5>', '</div>', maxRes=1) if seasons: d['number of seasons'] = seasons[0].count('|') + 1 creator = _findBetween(cont, 'Created by</h5>', ('class="tn15more"', '</div>', '<br/> <br/>'), maxRes=1) if not creator: # They change 'Created by' to 'Creator' and viceversa # from time to time... # XXX: is 'Creators' also used? creator = _findBetween(cont, 'Creator:</h5>', ('class="tn15more"', '</div>', '<br/> <br/>'), maxRes=1) if creator: creator = creator[0] if creator.find('tn15more'): creator = '%s>' % creator creator = self._getPersons(creator) if creator: d['creator'] = creator writers = _findBetween(cont, '<h5>Writer', ('</div>', '<br/> <br/>'), maxRes=1) if writers: writers = writers[0] h5idx = writers.find('/h5>') if h5idx != -1: writers = writers[h5idx+4:] writers = self._getPersons(writers) if writers: d['writer'] = writers cvurl = _getTagsWith(cont, 'name="poster"', toClosure=True, maxRes=1) if cvurl: cvurl = _findBetween(cvurl[0], 'src="', '"', maxRes=1) if cvurl: d['cover url'] = cvurl[0] genres = _findBetween(cont, 'href="/genre/', '"') if genres: d['genres'] = list(set(genres)) ur = _findBetween(cont, 'id="star-bar-user-rate">', '</div>', maxRes=1) if ur: rat = _findBetween(ur[0], '<b>', '</b>', maxRes=1) if rat: if rat: d['rating'] = rat[0].strip() else: self._mobile_logger.warn('wrong rating: %s', rat) vi = ur[0].rfind('href="ratings"') if vi != -1 and ur[0][vi+10:].find('await') == -1: try: votes = _findBetween(ur[0][vi:], "title='", " IMDb", maxRes=1) votes = int(votes[0].replace(',', '')) d['votes'] = votes except (ValueError, IndexError): self._mobile_logger.warn('wrong votes: %s', ur) top250 = _findBetween(cont, 'href="/chart/top?', '</a>', maxRes=1) if top250: fn = top250[0].rfind('#') if fn != -1: try: td = int(top250[0][fn+1:]) d['top 250 rank'] = td except ValueError: self._mobile_logger.warn('wrong top250: %s', top250) castdata = _findBetween(cont, 'Cast overview', '</table>', maxRes=1) if not castdata: castdata = _findBetween(cont, 'Credited cast', '</table>', maxRes=1) if not castdata: castdata = _findBetween(cont, 'Complete credited cast', '</table>', maxRes=1) if not castdata: castdata = _findBetween(cont, 'Series Cast Summary', '</table>', maxRes=1) if not castdata: castdata = _findBetween(cont, 'Episode Credited cast', '</table>', maxRes=1) if castdata: castdata = castdata[0] # Reintegrate the fist tag. fl = castdata.find('href=') if fl != -1: castdata = '<a ' + castdata[fl:] # Exclude the 'rest of cast listed alphabetically' row. smib = castdata.find('<tr><td align="center" colspan="4"><small>') if smib != -1: smie = castdata.rfind('</small></td></tr>') if smie != -1: castdata = castdata[:smib].strip() + \ castdata[smie+18:].strip() castdata = castdata.replace('/tr> <tr', '/tr><tr') cast = self._getPersons(castdata, sep='</tr><tr') if cast: d['cast'] = cast akas = _findBetween(cont, 'Also Known As:</h5>', '</div>', maxRes=1) if akas: # For some reason, here <br> is still used in place of <br/>. akas[:] = [x for x in akas[0].split('<br>') if x.strip()] akas = [_unHtml(x).replace('" - ','::', 1).lstrip('"').strip() for x in akas] if 'See more' in akas: akas.remove('See more') akas[:] = [x for x in akas if x] if akas: d['akas'] = akas mpaa = _findBetween(cont, 'MPAA</a>:', '</div>', maxRes=1) if mpaa: d['mpaa'] = _unHtml(mpaa[0]) runtimes = _findBetween(cont, 'Runtime:</h5>', '</div>', maxRes=1) if runtimes: runtimes = runtimes[0] runtimes = [x.strip().replace(' min', '').replace(' (', '::(', 1) for x in runtimes.split('|')] d['runtimes'] = [_unHtml(x).strip() for x in runtimes] if kind == 'episode': # number of episodes. epsn = _findBetween(cont, 'title="Full Episode List">', '</a>', maxRes=1) if epsn: epsn = epsn[0].replace(' Episodes', '').strip() if epsn: try: epsn = int(epsn) except: self._mobile_logger.warn('wrong episodes #: %s', epsn) d['number of episodes'] = epsn country = _findBetween(cont, 'Country:</h5>', '</div>', maxRes=1) if country: country[:] = country[0].split(' | ') country[:] = ['<a %s' % x for x in country if x] country[:] = [_unHtml(x.replace(' <i>', '::')) for x in country] if country: d['countries'] = country lang = _findBetween(cont, 'Language:</h5>', '</div>', maxRes=1) if lang: lang[:] = lang[0].split(' | ') lang[:] = ['<a %s' % x for x in lang if x] lang[:] = [_unHtml(x.replace(' <i>', '::')) for x in lang] if lang: d['languages'] = lang col = _findBetween(cont, '"/search/title?colors=', '</div>') if col: col[:] = col[0].split(' | ') col[:] = ['<a %s' % x for x in col if x] col[:] = [_unHtml(x.replace(' <i>', '::')) for x in col] if col: d['color info'] = col sm = _findBetween(cont, '/search/title?sound_mixes=', '</div>', maxRes=1) if sm: sm[:] = sm[0].split(' | ') sm[:] = ['<a %s' % x for x in sm if x] sm[:] = [_unHtml(x.replace(' <i>', '::')) for x in sm] if sm: d['sound mix'] = sm cert = _findBetween(cont, 'Certification:</h5>', '</div>', maxRes=1) if cert: cert[:] = cert[0].split(' | ') cert[:] = [_unHtml(x.replace(' <i>', '::')) for x in cert] if cert: d['certificates'] = cert plotoutline = _findBetween(cont, 'Plot:</h5>', ['<a ', '</div>'], maxRes=1) if plotoutline: plotoutline = plotoutline[0].strip() plotoutline = plotoutline.rstrip('|').rstrip() if plotoutline: d['plot outline'] = _unHtml(plotoutline) aratio = _findBetween(cont, 'Aspect Ratio:</h5>', ['<a ', '</div>'], maxRes=1) if aratio: aratio = aratio[0].strip().replace(' (', '::(', 1) if aratio: d['aspect ratio'] = _unHtml(aratio) return {'data': d} def get_movie_plot(self, movieID): cont = self._mretrieve(self.urls['movie_main'] % movieID + 'plotsummary') plot = _findBetween(cont, '<p class="plotpar">', '</p>') plot[:] = [_unHtml(x) for x in plot] for i in xrange(len(plot)): p = plot[i] wbyidx = p.rfind(' Written by ') if wbyidx != -1: plot[i] = '%s::%s' % \ (p[:wbyidx].rstrip(), p[wbyidx+12:].rstrip().replace('{','<').replace('}','>')) if plot: return {'data': {'plot': plot}} return {'data': {}} def _search_person(self, name, results): ##params = urllib.urlencode({'nm': 'on', 'mx': str(results), 'q': name}) ##params = 'q=%s&nm=on&mx=%s' % (urllib.quote_plus(name), str(results)) ##cont = self._mretrieve(imdbURL_search % params) cont = subXMLRefs(self._get_search_content('nm', name, results)) name = _findBetween(cont, '<title>', '</title>', maxRes=1) res = [] if not name: self._mobile_logger.warn('no title tag searching for name %s', name) return res nl = name[0].lower() if not nl.startswith('find - imdb'): # a direct hit! name = _unHtml(name[0]) name = name.replace('- Filmography by type' , '').strip() pid = None pidtag = _getTagsWith(cont, 'rel="canonical"', maxRes=1) if pidtag: pid = _findBetween(pidtag[0], '/name/nm', '/', maxRes=1) if not (pid and name): self._mobile_logger.error('no direct hit name/personID for' \ ' name %s', name) return res res[:] = [(str(pid[0]), analyze_name(name, canonical=1))] else: lis = _findBetween(cont, 'td class="result_text">', '</td>', maxRes=results*3) for li in lis: akas = _findBetween(li, '<em>"', '"</em>') for sep in ['<small', '<br> aka', '<br> birth name']: sepIdx = li.find(sep) if sepIdx != -1: li = li[:sepIdx] pid = re_imdbID.findall(li) pname = _unHtml(li) if not (pid and pname): self._mobile_logger.debug('no name/personID parsing' \ ' %s searching for name %s', li, name) continue resd = analyze_name(pname, canonical=1) if akas: resd['akas'] = akas res.append((str(pid[0]), resd)) return res def get_person_main(self, personID, _parseChr=False): if not _parseChr: url = self.urls['person_main'] % personID + 'maindetails' else: url = self.urls['character_main'] % personID s = self._mretrieve(url) r = {} name = _findBetween(s, '<title>', '</title>', maxRes=1) if not name: if _parseChr: w = 'characterID' else: w = 'personID' raise IMDbDataAccessError('unable to get %s "%s"' % (w, personID)) name = _unHtml(name[0].replace(' - IMDb', '')) if _parseChr: name = name.replace('(Character)', '').strip() name = name.replace('- Filmography by type', '').strip() else: name = name.replace('- Filmography by', '').strip() r = analyze_name(name, canonical=not _parseChr) for dKind in ('Born', 'Died'): date = _findBetween(s, '%s:</h4>' % dKind.capitalize(), ('<div class', '</div>', '<br/><br/>'), maxRes=1) if date: date = _unHtml(date[0]) if date: #date, notes = date_and_notes(date) # TODO: fix to handle real names. date_notes = date.split(' in ', 1) notes = u'' date = date_notes[0] if len(date_notes) == 2: notes = date_notes[1] dtitle = 'birth' if dKind == 'Died': dtitle = 'death' if date: r['%s date' % dtitle] = date if notes: r['%s notes' % dtitle] = notes akas = _findBetween(s, 'Alternate Names:</h4>', ('</div>', '<br/><br/>'), maxRes=1) if akas: akas = akas[0] if akas: akas = _unHtml(akas) if akas.find(' | ') != -1: akas = akas.split(' | ') else: akas = akas.split(' / ') if akas: r['akas'] = filter(None, [x.strip() for x in akas]) hs = _findBetween(s, "rel='image_src'", '>', maxRes=1) if not hs: hs = _findBetween(s, 'rel="image_src"', '>', maxRes=1) if not hs: hs = _findBetween(s, '<a name="headshot"', '</a>', maxRes=1) if hs: hsl = _findBetween(hs[0], "href='", "'", maxRes=1) if not hsl: hsl = _findBetween(hs[0], 'href="', '"', maxRes=1) if hsl and 'imdb-share-logo' not in hsl[0]: r['headshot'] = hsl[0] # Build a list of tuples such [('hrefLink', 'section name')] workkind = _findBetween(s, 'id="jumpto_', '</a>') ws = [] for work in workkind: sep = '" >' if '">' in work: sep = '">' wsplit = work.split(sep, 1) if len(wsplit) == 2: sect = wsplit[0] if '"' in sect: sect = sect[:sect.find('"')] ws.append((sect, wsplit[1].lower())) # XXX: I think "guest appearances" are gone. if s.find('<a href="#guest-appearances"') != -1: ws.append(('guest-appearances', 'notable tv guest appearances')) #if _parseChr: # ws.append(('filmography', 'filmography')) for sect, sectName in ws: raws = u'' if sectName == 'self': sect = 'Self' # Everything between the current section link and the end # of the <ol> tag. if _parseChr and sect == 'filmography': inisect = s.find('<div class="filmo">') else: inisect = s.find('<a name="%s' % sect) if inisect != -1: endsect = s[inisect:].find('<div id="filmo-head-') if endsect == -1: endsect = s[inisect:].find('<div class="article"') if endsect != -1: raws = s[inisect:inisect+endsect] #if not raws: continue mlist = _findBetween(raws, '<div class="filmo-row', ('<div class="clear"/>',)) for m in mlist: fCB = m.find('>') if fCB != -1: m = m[fCB+1:].lstrip() m = re_filmo_episodes.sub('', m) # For every movie in the current section. movieID = re_imdbID.findall(m) if not movieID: self._mobile_logger.debug('no movieID in %s', m) continue m = m.replace('<br/>', ' .... ', 1) if not _parseChr: chrIndx = m.find(' .... ') else: chrIndx = m.find(' Played by ') chids = [] if chrIndx != -1: chrtxt = m[chrIndx+6:] if _parseChr: chrtxt = chrtxt[5:] for ch in chrtxt.split(' / '): chid = re_imdbID.findall(ch) if not chid: chids.append(None) else: chids.append(chid[-1]) if not chids: chids = None elif len(chids) == 1: chids = chids[0] movieID = str(movieID[0]) # Search the status. stidx = m.find('<i>') status = u'' if stidx != -1: stendidx = m.rfind('</i>') if stendidx != -1: status = _unHtml(m[stidx+3:stendidx]) m = m.replace(m[stidx+3:stendidx], '') year = _findBetween(m, 'year_column">', '</span>', maxRes=1) if year: year = year[0] m = m.replace('<span class="year_column">%s</span>' % year, '') else: year = None m = _unHtml(m) if not m: self._mobile_logger.warn('no title for movieID %s', movieID) continue movie = build_movie(m, movieID=movieID, status=status, roleID=chids, modFunct=self._defModFunct, accessSystem=self.accessSystem, _parsingCharacter=_parseChr, year=year) sectName = sectName.split(':')[0] r.setdefault(sectName, []).append(movie) # If available, take the always correct name from a form. itag = _getTagsWith(s, 'NAME="primary"', maxRes=1) if not itag: itag = _getTagsWith(s, 'name="primary"', maxRes=1) if itag: vtag = _findBetween(itag[0], 'VALUE="', ('"', '>'), maxRes=1) if not vtag: vtag = _findBetween(itag[0], 'value="', ('"', '>'), maxRes=1) if vtag: try: vtag = unquote(str(vtag[0])) vtag = unicode(vtag, 'latin_1') r.update(analyze_name(vtag)) except UnicodeEncodeError: pass return {'data': r, 'info sets': ('main', 'filmography')} def get_person_biography(self, personID): cont = self._mretrieve(self.urls['person_main'] % personID + 'bio') d = {} spouses = _findBetween(cont, 'Spouse</h5>', ('</table>', '</dd>'), maxRes=1) if spouses: sl = [] for spouse in spouses[0].split('</tr>'): if spouse.count('</td>') > 1: spouse = spouse.replace('</td>', '::</td>', 1) spouse = _unHtml(spouse) spouse = spouse.replace(':: ', '::').strip() if spouse: sl.append(spouse) if sl: d['spouse'] = sl nnames = _findBetween(cont, '<h5>Nickname</h5>', ('<br/> <br/>','<h5>'), maxRes=1) if nnames: nnames = nnames[0] if nnames: nnames = [x.strip().replace(' (', '::(', 1) for x in nnames.split('<br/>')] if nnames: d['nick names'] = nnames misc_sects = _findBetween(cont, '<h5>', '<br/>') misc_sects[:] = [x.split('</h5>') for x in misc_sects] misc_sects[:] = [x for x in misc_sects if len(x) == 2] for sect, data in misc_sects: sect = sect.lower().replace(':', '').strip() if d.has_key(sect) and sect != 'mini biography': continue elif sect in ('spouse', 'nickname'): continue if sect == 'salary': sect = 'salary history' elif sect == 'where are they now': sect = 'where now' elif sect == 'personal quotes': sect = 'quotes' data = data.replace('</p><p>', '::') data = data.replace('<br><br>', ' ') # for multi-paragraphs 'bio' data = data.replace('</td> <td valign="top">', '@@@@') data = data.replace('</td> </tr>', '::') data = _unHtml(data) data = [x.strip() for x in data.split('::')] data[:] = [x.replace('@@@@', '::') for x in data if x] if sect == 'height' and data: data = data[0] elif sect == 'birth name': data = canonicalName(data[0]) elif sect == 'date of birth': date, notes = date_and_notes(data[0]) if date: d['birth date'] = date if notes: d['birth notes'] = notes continue elif sect == 'date of death': date, notes = date_and_notes(data[0]) if date: d['death date'] = date if notes: d['death notes'] = notes continue elif sect == 'mini biography': ndata = [] for bio in data: byidx = bio.rfind('IMDb Mini Biography By') if byidx != -1: bioAuth = bio[:byidx].rstrip() else: bioAuth = 'Anonymous' bio = u'%s::%s' % (bioAuth, bio[byidx+23:].lstrip()) ndata.append(bio) data[:] = ndata if 'mini biography' in d: d['mini biography'].append(ndata[0]) continue d[sect] = data return {'data': d} def _search_character(self, name, results): cont = subXMLRefs(self._get_search_content('ch', name, results)) name = _findBetween(cont, '<title>', '</title>', maxRes=1) res = [] if not name: self._mobile_logger.error('no title tag searching character %s', name) return res nl = name[0].lower() if not nl.startswith('find - imdb'): # a direct hit! name = _unHtml(name[0]).replace('(Character)', '').strip() pid = None pidtag = _getTagsWith(cont, 'rel="canonical"', maxRes=1) if pidtag: pid = _findBetween(pidtag[0], '/character/ch', '/', maxRes=1) if not (pid and name): self._mobile_logger.error('no direct hit name/characterID for' \ ' character %s', name) return res res[:] = [(str(pid[0]), analyze_name(name))] else: lis = _findBetween(cont, '<td class="result_text"', ['<small', '</td>', '<br']) for li in lis: li = '<%s' % li pid = re_imdbID.findall(li) pname = _unHtml(li) if not (pid and pname): self._mobile_logger.debug('no name/characterID' \ ' parsing %s searching for' \ ' character %s', li, name) continue res.append((str(pid[0]), analyze_name(pname))) return res def get_character_main(self, characterID): return self.get_person_main(characterID, _parseChr=True) def get_character_biography(self, characterID): cont = self._mretrieve(self.urls['character_main'] % characterID + 'bio') d = {} intro = _findBetween(cont, '<div class="display">', ('<span>', '<h4>'), maxRes=1) if intro: intro = _unHtml(intro[0]).strip() if intro: d['introduction'] = intro tocidx = cont.find('<table id="toc..') if tocidx != -1: cont = cont[tocidx:] bios = _findBetween(cont, '<h4>', ('<h4>', '</div>')) if bios: for bio in bios: bio = bio.replace('</h4>', '::') bio = bio.replace('\n', ' ') bio = bio.replace('<br>', '\n') bio = bio.replace('<br/>', '\n') bio = subSGMLRefs(re_unhtmlsub('', bio).strip()) bio = bio.replace(' ::', '::').replace(':: ', '::') bio = bio.replace('::', ': ', 1) if bio: d.setdefault('biography', []).append(bio) return {'data': d}
{ "content_hash": "7ec3aa8e5e4d712530464c04b1399519", "timestamp": "", "source": "github", "line_count": 838, "max_line_length": 81, "avg_line_length": 44.233890214797135, "alnum_prop": 0.4541113629006151, "repo_name": "logituit/Recbot", "id": "e7ab589d36f7b0d6db0293dba73fe5a8f6b00263", "size": "37068", "binary": false, "copies": "76", "ref": "refs/heads/master", "path": "IMDbPY-5.1.1/imdb/parser/mobile/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "53195" }, { "name": "C++", "bytes": "15806" }, { "name": "CSS", "bytes": "2156" }, { "name": "HTML", "bytes": "10260" }, { "name": "JavaScript", "bytes": "357948" }, { "name": "Python", "bytes": "2205513" }, { "name": "Shell", "bytes": "5118" } ], "symlink_target": "" }
""" Branching recursive sequence (~list or tuple like), except for downward & upward methods: .parent, .root, .map, and built-in support for recursion. History tracking (~paths), support for avoiding loops DirectoryTree: basic Tree, but __getitem__ populates .data via os.listdir Core class itself should be an ABC/interface. Then - a differently named 'hard' implementation should also be provided. """ import abc class Tree(object): __metaclass__ = abc.ABCMeta
{ "content_hash": "3f0a981a7f1d042b82ffb3d5b5371b49", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 73, "avg_line_length": 29.6875, "alnum_prop": 0.7410526315789474, "repo_name": "OaklandPeters/recursor", "id": "512297bfdc4d8cee3fd2ab8cef7be4caba44e929", "size": "475", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "recursor/tree/tree.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "36736" } ], "symlink_target": "" }
"""Test script for ftplib module.""" # Modified by Giampaolo Rodola' to test FTP class, IPv6 and TLS # environment import ftplib import asyncore import asynchat import socket import StringIO import errno import os try: import ssl except ImportError: ssl = None from unittest import TestCase from test import test_support from test.test_support import HOST threading = test_support.import_module('threading') # the dummy data returned by server over the data channel when # RETR, LIST and NLST commands are issued RETR_DATA = 'abcde12345\r\n' * 1000 LIST_DATA = 'foo\r\nbar\r\n' NLST_DATA = 'foo\r\nbar\r\n' class DummyDTPHandler(asynchat.async_chat): dtp_conn_closed = False def __init__(self, conn, baseclass): asynchat.async_chat.__init__(self, conn) self.baseclass = baseclass self.baseclass.last_received_data = '' def handle_read(self): self.baseclass.last_received_data += self.recv(1024) def handle_close(self): # XXX: this method can be called many times in a row for a single # connection, including in clear-text (non-TLS) mode. # (behaviour witnessed with test_data_connection) if not self.dtp_conn_closed: self.baseclass.push('226 transfer complete') self.close() self.dtp_conn_closed = True def handle_error(self): raise class DummyFTPHandler(asynchat.async_chat): dtp_handler = DummyDTPHandler def __init__(self, conn): asynchat.async_chat.__init__(self, conn) self.set_terminator("\r\n") self.in_buffer = [] self.dtp = None self.last_received_cmd = None self.last_received_data = '' self.next_response = '' self.rest = None self.push('220 welcome') def collect_incoming_data(self, data): self.in_buffer.append(data) def found_terminator(self): line = ''.join(self.in_buffer) self.in_buffer = [] if self.next_response: self.push(self.next_response) self.next_response = '' cmd = line.split(' ')[0].lower() self.last_received_cmd = cmd space = line.find(' ') if space != -1: arg = line[space + 1:] else: arg = "" if hasattr(self, 'cmd_' + cmd): method = getattr(self, 'cmd_' + cmd) method(arg) else: self.push('550 command "%s" not understood.' %cmd) def handle_error(self): raise def push(self, data): asynchat.async_chat.push(self, data + '\r\n') def cmd_port(self, arg): addr = map(int, arg.split(',')) ip = '%d.%d.%d.%d' %tuple(addr[:4]) port = (addr[4] * 256) + addr[5] s = socket.create_connection((ip, port), timeout=10) self.dtp = self.dtp_handler(s, baseclass=self) self.push('200 active data connection established') def cmd_pasv(self, arg): sock = socket.socket() sock.bind((self.socket.getsockname()[0], 0)) sock.listen(5) sock.settimeout(10) ip, port = sock.getsockname()[:2] ip = ip.replace('.', ',') p1, p2 = divmod(port, 256) self.push('227 entering passive mode (%s,%d,%d)' %(ip, p1, p2)) conn, addr = sock.accept() self.dtp = self.dtp_handler(conn, baseclass=self) def cmd_eprt(self, arg): af, ip, port = arg.split(arg[0])[1:-1] port = int(port) s = socket.create_connection((ip, port), timeout=10) self.dtp = self.dtp_handler(s, baseclass=self) self.push('200 active data connection established') def cmd_epsv(self, arg): sock = socket.socket(socket.AF_INET6) sock.bind((self.socket.getsockname()[0], 0)) sock.listen(5) sock.settimeout(10) port = sock.getsockname()[1] self.push('229 entering extended passive mode (|||%d|)' %port) conn, addr = sock.accept() self.dtp = self.dtp_handler(conn, baseclass=self) def cmd_echo(self, arg): # sends back the received string (used by the test suite) self.push(arg) def cmd_user(self, arg): self.push('331 username ok') def cmd_pass(self, arg): self.push('230 password ok') def cmd_acct(self, arg): self.push('230 acct ok') def cmd_rnfr(self, arg): self.push('350 rnfr ok') def cmd_rnto(self, arg): self.push('250 rnto ok') def cmd_dele(self, arg): self.push('250 dele ok') def cmd_cwd(self, arg): self.push('250 cwd ok') def cmd_size(self, arg): self.push('250 1000') def cmd_mkd(self, arg): self.push('257 "%s"' %arg) def cmd_rmd(self, arg): self.push('250 rmd ok') def cmd_pwd(self, arg): self.push('257 "pwd ok"') def cmd_type(self, arg): self.push('200 type ok') def cmd_quit(self, arg): self.push('221 quit ok') self.close() def cmd_stor(self, arg): self.push('125 stor ok') def cmd_rest(self, arg): self.rest = arg self.push('350 rest ok') def cmd_retr(self, arg): self.push('125 retr ok') if self.rest is not None: offset = int(self.rest) else: offset = 0 self.dtp.push(RETR_DATA[offset:]) self.dtp.close_when_done() self.rest = None def cmd_list(self, arg): self.push('125 list ok') self.dtp.push(LIST_DATA) self.dtp.close_when_done() def cmd_nlst(self, arg): self.push('125 nlst ok') self.dtp.push(NLST_DATA) self.dtp.close_when_done() class DummyFTPServer(asyncore.dispatcher, threading.Thread): handler = DummyFTPHandler def __init__(self, address, af=socket.AF_INET): threading.Thread.__init__(self) asyncore.dispatcher.__init__(self) self.create_socket(af, socket.SOCK_STREAM) self.bind(address) self.listen(5) self.active = False self.active_lock = threading.Lock() self.host, self.port = self.socket.getsockname()[:2] def start(self): assert not self.active self.__flag = threading.Event() threading.Thread.start(self) self.__flag.wait() def run(self): self.active = True self.__flag.set() while self.active and asyncore.socket_map: self.active_lock.acquire() asyncore.loop(timeout=0.1, count=1) self.active_lock.release() asyncore.close_all(ignore_all=True) def stop(self): assert self.active self.active = False self.join() def handle_accept(self): conn, addr = self.accept() self.handler = self.handler(conn) self.close() def handle_connect(self): self.close() handle_read = handle_connect def writable(self): return 0 def handle_error(self): raise if ssl is not None: CERTFILE = os.path.join(os.path.dirname(__file__), "keycert.pem") class SSLConnection(object, asyncore.dispatcher): """An asyncore.dispatcher subclass supporting TLS/SSL.""" _ssl_accepting = False _ssl_closing = False def secure_connection(self): self.socket = ssl.wrap_socket(self.socket, suppress_ragged_eofs=False, certfile=CERTFILE, server_side=True, do_handshake_on_connect=False, ssl_version=ssl.PROTOCOL_SSLv23) self._ssl_accepting = True def _do_ssl_handshake(self): try: self.socket.do_handshake() except ssl.SSLError, err: if err.args[0] in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): return elif err.args[0] == ssl.SSL_ERROR_EOF: return self.handle_close() raise except socket.error, err: if err.args[0] == errno.ECONNABORTED: return self.handle_close() else: self._ssl_accepting = False def _do_ssl_shutdown(self): self._ssl_closing = True try: self.socket = self.socket.unwrap() except ssl.SSLError, err: if err.args[0] in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): return except socket.error, err: # Any "socket error" corresponds to a SSL_ERROR_SYSCALL return # from OpenSSL's SSL_shutdown(), corresponding to a # closed socket condition. See also: # http://www.mail-archive.com/openssl-users@openssl.org/msg60710.html pass self._ssl_closing = False super(SSLConnection, self).close() def handle_read_event(self): if self._ssl_accepting: self._do_ssl_handshake() elif self._ssl_closing: self._do_ssl_shutdown() else: super(SSLConnection, self).handle_read_event() def handle_write_event(self): if self._ssl_accepting: self._do_ssl_handshake() elif self._ssl_closing: self._do_ssl_shutdown() else: super(SSLConnection, self).handle_write_event() def send(self, data): try: return super(SSLConnection, self).send(data) except ssl.SSLError, err: if err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN, ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): return 0 raise def recv(self, buffer_size): try: return super(SSLConnection, self).recv(buffer_size) except ssl.SSLError, err: if err.args[0] in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): return '' if err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN): self.handle_close() return '' raise def handle_error(self): raise def close(self): if (isinstance(self.socket, ssl.SSLSocket) and self.socket._sslobj is not None): self._do_ssl_shutdown() class DummyTLS_DTPHandler(SSLConnection, DummyDTPHandler): """A DummyDTPHandler subclass supporting TLS/SSL.""" def __init__(self, conn, baseclass): DummyDTPHandler.__init__(self, conn, baseclass) if self.baseclass.secure_data_channel: self.secure_connection() class DummyTLS_FTPHandler(SSLConnection, DummyFTPHandler): """A DummyFTPHandler subclass supporting TLS/SSL.""" dtp_handler = DummyTLS_DTPHandler def __init__(self, conn): DummyFTPHandler.__init__(self, conn) self.secure_data_channel = False def cmd_auth(self, line): """Set up secure control channel.""" self.push('234 AUTH TLS successful') self.secure_connection() def cmd_pbsz(self, line): """Negotiate size of buffer for secure data transfer. For TLS/SSL the only valid value for the parameter is '0'. Any other value is accepted but ignored. """ self.push('200 PBSZ=0 successful.') def cmd_prot(self, line): """Setup un/secure data channel.""" arg = line.upper() if arg == 'C': self.push('200 Protection set to Clear') self.secure_data_channel = False elif arg == 'P': self.push('200 Protection set to Private') self.secure_data_channel = True else: self.push("502 Unrecognized PROT type (use C or P).") class DummyTLS_FTPServer(DummyFTPServer): handler = DummyTLS_FTPHandler class TestFTPClass(TestCase): def setUp(self): self.server = DummyFTPServer((HOST, 0)) self.server.start() self.client = ftplib.FTP(timeout=10) self.client.connect(self.server.host, self.server.port) def tearDown(self): self.client.close() self.server.stop() def test_getwelcome(self): self.assertEqual(self.client.getwelcome(), '220 welcome') def test_sanitize(self): self.assertEqual(self.client.sanitize('foo'), repr('foo')) self.assertEqual(self.client.sanitize('pass 12345'), repr('pass *****')) self.assertEqual(self.client.sanitize('PASS 12345'), repr('PASS *****')) def test_exceptions(self): self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 400') self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 499') self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 500') self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 599') self.assertRaises(ftplib.error_proto, self.client.sendcmd, 'echo 999') def test_all_errors(self): exceptions = (ftplib.error_reply, ftplib.error_temp, ftplib.error_perm, ftplib.error_proto, ftplib.Error, IOError, EOFError) for x in exceptions: try: raise x('exception not included in all_errors set') except ftplib.all_errors: pass def test_set_pasv(self): # passive mode is supposed to be enabled by default self.assertTrue(self.client.passiveserver) self.client.set_pasv(True) self.assertTrue(self.client.passiveserver) self.client.set_pasv(False) self.assertFalse(self.client.passiveserver) def test_voidcmd(self): self.client.voidcmd('echo 200') self.client.voidcmd('echo 299') self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 199') self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 300') def test_login(self): self.client.login() def test_acct(self): self.client.acct('passwd') def test_rename(self): self.client.rename('a', 'b') self.server.handler.next_response = '200' self.assertRaises(ftplib.error_reply, self.client.rename, 'a', 'b') def test_delete(self): self.client.delete('foo') self.server.handler.next_response = '199' self.assertRaises(ftplib.error_reply, self.client.delete, 'foo') def test_size(self): self.client.size('foo') def test_mkd(self): dir = self.client.mkd('/foo') self.assertEqual(dir, '/foo') def test_rmd(self): self.client.rmd('foo') def test_pwd(self): dir = self.client.pwd() self.assertEqual(dir, 'pwd ok') def test_quit(self): self.assertEqual(self.client.quit(), '221 quit ok') # Ensure the connection gets closed; sock attribute should be None self.assertEqual(self.client.sock, None) def test_retrbinary(self): received = [] self.client.retrbinary('retr', received.append) self.assertEqual(''.join(received), RETR_DATA) def test_retrbinary_rest(self): for rest in (0, 10, 20): received = [] self.client.retrbinary('retr', received.append, rest=rest) self.assertEqual(''.join(received), RETR_DATA[rest:], msg='rest test case %d %d %d' % (rest, len(''.join(received)), len(RETR_DATA[rest:]))) def test_retrlines(self): received = [] self.client.retrlines('retr', received.append) self.assertEqual(''.join(received), RETR_DATA.replace('\r\n', '')) def test_storbinary(self): f = StringIO.StringIO(RETR_DATA) self.client.storbinary('stor', f) self.assertEqual(self.server.handler.last_received_data, RETR_DATA) # test new callback arg flag = [] f.seek(0) self.client.storbinary('stor', f, callback=lambda x: flag.append(None)) self.assertTrue(flag) def test_storbinary_rest(self): f = StringIO.StringIO(RETR_DATA) for r in (30, '30'): f.seek(0) self.client.storbinary('stor', f, rest=r) self.assertEqual(self.server.handler.rest, str(r)) def test_storlines(self): f = StringIO.StringIO(RETR_DATA.replace('\r\n', '\n')) self.client.storlines('stor', f) self.assertEqual(self.server.handler.last_received_data, RETR_DATA) # test new callback arg flag = [] f.seek(0) self.client.storlines('stor foo', f, callback=lambda x: flag.append(None)) self.assertTrue(flag) def test_nlst(self): self.client.nlst() self.assertEqual(self.client.nlst(), NLST_DATA.split('\r\n')[:-1]) def test_dir(self): l = [] self.client.dir(lambda x: l.append(x)) self.assertEqual(''.join(l), LIST_DATA.replace('\r\n', '')) def test_makeport(self): self.client.makeport() # IPv4 is in use, just make sure send_eprt has not been used self.assertEqual(self.server.handler.last_received_cmd, 'port') def test_makepasv(self): host, port = self.client.makepasv() conn = socket.create_connection((host, port), 10) conn.close() # IPv4 is in use, just make sure send_epsv has not been used self.assertEqual(self.server.handler.last_received_cmd, 'pasv') class TestIPv6Environment(TestCase): def setUp(self): self.server = DummyFTPServer((HOST, 0), af=socket.AF_INET6) self.server.start() self.client = ftplib.FTP() self.client.connect(self.server.host, self.server.port) def tearDown(self): self.client.close() self.server.stop() def test_af(self): self.assertEqual(self.client.af, socket.AF_INET6) def test_makeport(self): self.client.makeport() self.assertEqual(self.server.handler.last_received_cmd, 'eprt') def test_makepasv(self): host, port = self.client.makepasv() conn = socket.create_connection((host, port), 10) conn.close() self.assertEqual(self.server.handler.last_received_cmd, 'epsv') def test_transfer(self): def retr(): received = [] self.client.retrbinary('retr', received.append) self.assertEqual(''.join(received), RETR_DATA) self.client.set_pasv(True) retr() self.client.set_pasv(False) retr() class TestTLS_FTPClassMixin(TestFTPClass): """Repeat TestFTPClass tests starting the TLS layer for both control and data connections first. """ def setUp(self): self.server = DummyTLS_FTPServer((HOST, 0)) self.server.start() self.client = ftplib.FTP_TLS(timeout=10) self.client.connect(self.server.host, self.server.port) # enable TLS self.client.auth() self.client.prot_p() class TestTLS_FTPClass(TestCase): """Specific TLS_FTP class tests.""" def setUp(self): self.server = DummyTLS_FTPServer((HOST, 0)) self.server.start() self.client = ftplib.FTP_TLS(timeout=10) self.client.connect(self.server.host, self.server.port) def tearDown(self): self.client.close() self.server.stop() def test_control_connection(self): self.assertNotIsInstance(self.client.sock, ssl.SSLSocket) self.client.auth() self.assertIsInstance(self.client.sock, ssl.SSLSocket) def test_data_connection(self): # clear text sock = self.client.transfercmd('list') self.assertNotIsInstance(sock, ssl.SSLSocket) sock.close() self.assertEqual(self.client.voidresp(), "226 transfer complete") # secured, after PROT P self.client.prot_p() sock = self.client.transfercmd('list') self.assertIsInstance(sock, ssl.SSLSocket) sock.close() self.assertEqual(self.client.voidresp(), "226 transfer complete") # PROT C is issued, the connection must be in cleartext again self.client.prot_c() sock = self.client.transfercmd('list') self.assertNotIsInstance(sock, ssl.SSLSocket) sock.close() self.assertEqual(self.client.voidresp(), "226 transfer complete") def test_login(self): # login() is supposed to implicitly secure the control connection self.assertNotIsInstance(self.client.sock, ssl.SSLSocket) self.client.login() self.assertIsInstance(self.client.sock, ssl.SSLSocket) # make sure that AUTH TLS doesn't get issued again self.client.login() def test_auth_issued_twice(self): self.client.auth() self.assertRaises(ValueError, self.client.auth) def test_auth_ssl(self): try: self.client.ssl_version = ssl.PROTOCOL_SSLv3 self.client.auth() self.assertRaises(ValueError, self.client.auth) finally: self.client.ssl_version = ssl.PROTOCOL_TLSv1 class TestTimeouts(TestCase): def setUp(self): self.evt = threading.Event() self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.settimeout(3) self.port = test_support.bind_port(self.sock) threading.Thread(target=self.server, args=(self.evt,self.sock)).start() # Wait for the server to be ready. self.evt.wait() self.evt.clear() ftplib.FTP.port = self.port def tearDown(self): self.evt.wait() def server(self, evt, serv): # This method sets the evt 3 times: # 1) when the connection is ready to be accepted. # 2) when it is safe for the caller to close the connection # 3) when we have closed the socket serv.listen(5) # (1) Signal the caller that we are ready to accept the connection. evt.set() try: conn, addr = serv.accept() except socket.timeout: pass else: conn.send("1 Hola mundo\n") # (2) Signal the caller that it is safe to close the socket. evt.set() conn.close() finally: serv.close() # (3) Signal the caller that we are done. evt.set() def testTimeoutDefault(self): # default -- use global socket timeout self.assertTrue(socket.getdefaulttimeout() is None) socket.setdefaulttimeout(30) try: ftp = ftplib.FTP("localhost") finally: socket.setdefaulttimeout(None) self.assertEqual(ftp.sock.gettimeout(), 30) self.evt.wait() ftp.close() def testTimeoutNone(self): # no timeout -- do not use global socket timeout self.assertTrue(socket.getdefaulttimeout() is None) socket.setdefaulttimeout(30) try: ftp = ftplib.FTP("localhost", timeout=None) finally: socket.setdefaulttimeout(None) self.assertTrue(ftp.sock.gettimeout() is None) self.evt.wait() ftp.close() def testTimeoutValue(self): # a value ftp = ftplib.FTP(HOST, timeout=30) self.assertEqual(ftp.sock.gettimeout(), 30) self.evt.wait() ftp.close() def testTimeoutConnect(self): ftp = ftplib.FTP() ftp.connect(HOST, timeout=30) self.assertEqual(ftp.sock.gettimeout(), 30) self.evt.wait() ftp.close() def testTimeoutDifferentOrder(self): ftp = ftplib.FTP(timeout=30) ftp.connect(HOST) self.assertEqual(ftp.sock.gettimeout(), 30) self.evt.wait() ftp.close() def testTimeoutDirectAccess(self): ftp = ftplib.FTP() ftp.timeout = 30 ftp.connect(HOST) self.assertEqual(ftp.sock.gettimeout(), 30) self.evt.wait() ftp.close() def test_main(): tests = [TestFTPClass, TestTimeouts] if socket.has_ipv6: try: DummyFTPServer((HOST, 0), af=socket.AF_INET6) except socket.error: pass else: tests.append(TestIPv6Environment) if ssl is not None: tests.extend([TestTLS_FTPClassMixin, TestTLS_FTPClass]) thread_info = test_support.threading_setup() try: test_support.run_unittest(*tests) finally: test_support.threading_cleanup(*thread_info) if __name__ == '__main__': test_main()
{ "content_hash": "5452ca8b1ef788daf103a814eae3e846", "timestamp": "", "source": "github", "line_count": 778, "max_line_length": 85, "avg_line_length": 33.17737789203085, "alnum_prop": 0.5590423059042305, "repo_name": "ktan2020/legacy-automation", "id": "99d34e565c0d3cf2244fa016a7c938116033eebf", "size": "25812", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "win/Lib/test/test_ftplib.py", "mode": "33261", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "913" }, { "name": "Ada", "bytes": "289" }, { "name": "Assembly", "bytes": "687" }, { "name": "Boo", "bytes": "540" }, { "name": "C", "bytes": "40116" }, { "name": "C#", "bytes": "474" }, { "name": "C++", "bytes": "393" }, { "name": "CSS", "bytes": "70883" }, { "name": "ColdFusion", "bytes": "1012" }, { "name": "Common Lisp", "bytes": "1034" }, { "name": "D", "bytes": "1858" }, { "name": "Eiffel", "bytes": "426" }, { "name": "Erlang", "bytes": "9243" }, { "name": "FORTRAN", "bytes": "1810" }, { "name": "Forth", "bytes": "182" }, { "name": "Groovy", "bytes": "2366" }, { "name": "Haskell", "bytes": "816" }, { "name": "Haxe", "bytes": "455" }, { "name": "Java", "bytes": "1155" }, { "name": "JavaScript", "bytes": "69444" }, { "name": "Lua", "bytes": "795" }, { "name": "Matlab", "bytes": "1278" }, { "name": "OCaml", "bytes": "350" }, { "name": "Objective-C++", "bytes": "885" }, { "name": "PHP", "bytes": "1411" }, { "name": "Pascal", "bytes": "388" }, { "name": "Perl", "bytes": "252651" }, { "name": "Pike", "bytes": "589" }, { "name": "Python", "bytes": "42085780" }, { "name": "R", "bytes": "1156" }, { "name": "Ruby", "bytes": "480" }, { "name": "Scheme", "bytes": "282" }, { "name": "Shell", "bytes": "30518" }, { "name": "Smalltalk", "bytes": "926" }, { "name": "Squirrel", "bytes": "697" }, { "name": "Stata", "bytes": "302" }, { "name": "SystemVerilog", "bytes": "3145" }, { "name": "Tcl", "bytes": "1039" }, { "name": "TeX", "bytes": "1746" }, { "name": "VHDL", "bytes": "985" }, { "name": "Vala", "bytes": "664" }, { "name": "Verilog", "bytes": "439" }, { "name": "Visual Basic", "bytes": "2142" }, { "name": "XSLT", "bytes": "152770" }, { "name": "ooc", "bytes": "890" }, { "name": "xBase", "bytes": "769" } ], "symlink_target": "" }
# Copyright 2015 Dejan D. M. Milosavljevic # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import platform import nucleotide import nucleotide.component import nucleotide.component.function def _component_linux_gcc_atom_module_opencv_CPPDEFINES( P_data ): Ir_list = ["OPENCV"] return Ir_list def _component_linux_gcc_atom_module_opencv_CPPPATH( P_data ): Ir_list = ['/usr/local/include/opencv', '/usr/local/include/opencv2'] return Ir_list def _component_linux_gcc_atom_module_opencv_LINKFLAGS( P_data ): Ir_list = [] return Ir_list def _component_linux_gcc_atom_module_opencv_LIBPATH( P_data ): Ir_list = [ "/usr/local/lib/", "/usr/lib" ] return Ir_list def _component_linux_gcc_atom_module_opencv_LIBS( P_data ): Ir_list = [ "libopencv_core.dll", "opencv_highgui.dll", "opencv_imgproc.dll", "opencv_videoio.dll", "opencv_imgproc.dll" ] return Ir_list atom_linux_configuration = { 'platform' : { 'host' : 'Linux', 'guest' : 'Linux' }, 'cc' : { 'vendor' : 'FSF', 'name' : 'gcc', 'version': 'X' }, 'config' : { 'CPPDEFINES' : _component_linux_gcc_atom_module_opencv_CPPDEFINES, 'CPPPATH' : _component_linux_gcc_atom_module_opencv_CPPPATH, 'LINKFLAGS' : _component_linux_gcc_atom_module_opencv_LINKFLAGS, 'LIBPATH' : _component_linux_gcc_atom_module_opencv_LIBPATH, 'LIBS' : _component_linux_gcc_atom_module_opencv_LIBS, }, 'name' :'package', 'class': [ 'package::opencv', 'linux:package:opencv' ] } class Configuration: def __init__(self): pass @staticmethod def extend( P_option ): nucleotide.component.function.extend( P_option, 'A:linux:compiler:configuration', atom_linux_configuration ) atom_linux_configuration['platform']['host']='X' nucleotide.component.function.extend( P_option, 'x:linux:compiler:configuration', atom_linux_configuration ) atom_linux_configuration['platform']['guest']='X' nucleotide.component.function.extend( P_option, 'y:linux:compiler:configuration', atom_linux_configuration ) @staticmethod def check(): pass
{ "content_hash": "876bfbe306f816f519351bbc7c784230", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 131, "avg_line_length": 34.45783132530121, "alnum_prop": 0.6339160839160839, "repo_name": "dmilos/nucleotide", "id": "7f707e87bf67e0b3f1e1308e962329d3b09c7b68", "size": "2884", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/nucleotide/component/linux/gcc/atom/module/opencv.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3225" }, { "name": "C++", "bytes": "7281" }, { "name": "Python", "bytes": "250438" }, { "name": "Shell", "bytes": "973" } ], "symlink_target": "" }
from PyObjCTools.TestSupport import * from Quartz.CoreGraphics import * try: unicode except NameError: unicode = str try: long except NameError: long = int class TestCGWindow (TestCase): def testConstants(self): self.assertEqual(kCGWindowIDCFNumberType, kCFNumberSInt32Type) self.assertEqual(kCGNullWindowID, 0) self.assertEqual(kCGWindowSharingNone, 0) self.assertEqual(kCGWindowSharingReadOnly, 1) self.assertEqual(kCGWindowSharingReadWrite, 2) self.assertEqual(kCGBackingStoreRetained, 0) self.assertEqual(kCGBackingStoreNonretained, 1) self.assertEqual(kCGBackingStoreBuffered, 2) self.assertIsInstance(kCGWindowNumber, unicode) self.assertIsInstance(kCGWindowStoreType, unicode) self.assertIsInstance(kCGWindowLayer, unicode) self.assertIsInstance(kCGWindowBounds, unicode) self.assertIsInstance(kCGWindowSharingState, unicode) self.assertIsInstance(kCGWindowAlpha, unicode) self.assertIsInstance(kCGWindowOwnerPID, unicode) self.assertIsInstance(kCGWindowMemoryUsage, unicode) self.assertIsInstance(kCGWindowWorkspace, unicode) self.assertIsInstance(kCGWindowOwnerName, unicode) self.assertIsInstance(kCGWindowName, unicode) self.assertIsInstance(kCGWindowIsOnscreen, unicode) self.assertIsInstance(kCGWindowBackingLocationVideoMemory, unicode) self.assertEqual(kCGWindowListOptionAll, 0) self.assertEqual(kCGWindowListOptionOnScreenOnly, (1 << 0)) self.assertEqual(kCGWindowListOptionOnScreenAboveWindow, (1 << 1)) self.assertEqual(kCGWindowListOptionOnScreenBelowWindow, (1 << 2)) self.assertEqual(kCGWindowListOptionIncludingWindow, (1 << 3)) self.assertEqual(kCGWindowListExcludeDesktopElements, (1 << 4)) self.assertEqual(kCGWindowImageDefault, 0) self.assertEqual(kCGWindowImageBoundsIgnoreFraming, (1 << 0)) self.assertEqual(kCGWindowImageShouldBeOpaque, (1 << 1)) self.assertEqual(kCGWindowImageOnlyShadows, (1 << 2)) def testFunctions(self): self.assertResultIsCFRetained(CGWindowListCopyWindowInfo) v = CGWindowListCopyWindowInfo(0, 0) self.assertIsInstance(v, CFArrayRef) self.assertTrue(len(v) > 0) self.assertIsInstance(v[0], CFDictionaryRef) v = CGWindowListCreate(0, 0) self.assertIsInstance(v, tuple) self.assertTrue(len(v) > 0) self.assertIsInstance(v[0], (int, long)) aWindowID = v[0] windowArray = v v = CGWindowListCreateDescriptionFromArray(v) self.assertIsInstance(v, CFArrayRef) self.assertTrue(len(v) > 0) self.assertIsInstance(v[0], CFDictionaryRef) self.assertResultIsCFRetained(CGWindowListCreateImage) v = CGWindowListCreateImage(((0, 0), (100, 100)), aWindowID, 0, 0) self.assertIsInstance(v, CGImageRef) v = CGWindowListCreateImageFromArray(((0, 0), (100, 100)), windowArray, 0) self.assertIsInstance(v, CGImageRef) if __name__ == "__main__": main()
{ "content_hash": "8ec2ad16ac50889437f930c95bc3ef05", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 82, "avg_line_length": 38.72839506172839, "alnum_prop": 0.7054510678992668, "repo_name": "albertz/music-player", "id": "3182ceddfab4296d2136aae519446c2135778040", "size": "3138", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mac/pyobjc-framework-Quartz/PyObjCTest/test_cgwindow.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Assembly", "bytes": "47481" }, { "name": "C", "bytes": "435926" }, { "name": "C++", "bytes": "149133" }, { "name": "CSS", "bytes": "16435" }, { "name": "HTML", "bytes": "914432" }, { "name": "JavaScript", "bytes": "52869" }, { "name": "M", "bytes": "10808" }, { "name": "Makefile", "bytes": "13304" }, { "name": "Mathematica", "bytes": "61418" }, { "name": "Objective-C", "bytes": "2082720" }, { "name": "Objective-C++", "bytes": "62427" }, { "name": "PostScript", "bytes": "2783" }, { "name": "Prolog", "bytes": "217" }, { "name": "Python", "bytes": "7789845" }, { "name": "QMake", "bytes": "9667" }, { "name": "Roff", "bytes": "8329" }, { "name": "Shell", "bytes": "3521" } ], "symlink_target": "" }
import time import math import random #setup for scooter unoPort = "COM6" i01 = Runtime.createAndStart("i01", "InMoov") # parallax propeller controller serial = Runtime.start("serial","Serial") serial.connect("COM3",115200, 8, 1, 0) #i01.setMute(True) # create parts rather than start them so that they may be customized before starting head = i01.createPeer("head") # i01.createPeer("head") - but we want a referenced handle on them so we can customize parameters jaw = head.createPeer("jaw") eyeX = head.createPeer("eyeX") #removed servo motor eyeY = head.createPeer("eyeY") #removed servo motor rothead = head.createPeer("rothead") neck = head.createPeer("neck") uno = head.createPeer("arduino") uno.connect(unoPort) print "pir is working" readDigitalPin = 4 uno.addListener("publishPin", "python", "input") uno.setSampleRate(4000) #i01.startPIR("COM6",4) #moved to later in code # used to be custom pins for uno board to be used to control head now using mega bd. jaw.setPin(26) eyeX.setPin(22) #not hooked up yet eyeY.setPin(24) #has servo but not connected rothead.setPin(13) neck.setPin(12) # i01.startHead(unoPort, "uno") i01.startHead(unoPort, "mega") i01.startMouthControl(unoPort) i01.startMouth() #i01.mouth.setLanguage("en") #i01.mouth.setGoogleURI("http://thehackettfamily.org/Voice_api/api2.php?voice=Ella&txt=") i01.mouth.setLanguage("au") i01.mouth.setGoogleURI("http://thehackettfamily.org/Voice_api/api2.php?voice=Liam&txt=") i01.head.eyeY.setMinMax(25,85) i01.head.eyeX.setMinMax(10,60) i01.head.neck.setMinMax(50,170) i01.head.rothead.setMinMax(10,170) i01.head.eyeY.setRest(80) i01.head.eyeX.setRest(40) i01.head.neck.setRest(110) i01.head.rothead.setRest(90) jaw.setMinMax(50, 110) i01.head.jaw.map(0,180,110,50) i01.mouthControl.setmouth(50,110) print("opencv") opencv = i01.startOpenCV() opencv.capture() #sleep(10) #tracker = Runtime.createAndStart("tracker", "Tracking") #ni = Runtime.createAndStart("ni", "OpenNI") #ni.startUserTracking() #3 for microsoft bluetooth joystick joystickId = 2 #speed for forward motors can be changed with button on xbox # reverse and turns are fixed at slower speed speed = 25 serdata = "" # I/R sensors data not great more like on/off distcen = 0 distright = 0 distleft = 0 # ping sensors left and right info very good distpingright = 0 distpingleft = 0 # battery level volts = 0 # added so i can limit scotter from waking up to soon as in any movement timespir = 0 # xbox joystick thanks to kwaters uberjoy = Runtime.createAndStart("uberjoy", "Joystick") uberjoy.setController(joystickId) uberjoy.startPolling() i01.head.neck.setSpeedControlOnUC(False) i01.head.rothead.setSpeedControlOnUC(False) i01.head.eyeY.setSpeedControlOnUC(False) i01.head.eyeX.setSpeedControlOnUC(False) def StickRYListener(value): absValue = math.fabs(value) if (absValue < 0.222): # print "Stop sweep neck up down" i01.head.neck.stop() return absValue = absValue-0.01 i01.head.neck.setSpeed(absValue) delay = int((1-absValue) * 200) if (value > 0.0): if (i01.head.neck.isSweeping()): i01.head.neck.setSweeperDelay(delay) else: i01.head.neck.sweep(i01.head.neck.pos, i01.head.neck.max, delay, 1, True) else: if (i01.head.neck.isSweeping()): i01.head.neck.setSweeperDelay(delay) else: i01.head.neck.sweep(i01.head.neck.min, i01.head.neck.pos, delay, -1, True) def StickRXListener(value): absValue = math.fabs(value) if (absValue < 0.222): # print "Stop sweep head rotate" i01.head.rothead.stop() return absValue = absValue-0.01 i01.head.rothead.setSpeed(absValue) delay = int((1-absValue) * 200) if (value > 0.0): if (i01.head.rothead.isSweeping()): i01.head.rothead.setSweeperDelay(delay) else: i01.head.rothead.sweep(i01.head.rothead.pos, i01.head.rothead.max, delay, 1, True) else: if (i01.head.rothead.isSweeping()): i01.head.rothead.setSweeperDelay(delay) else: i01.head.rothead.sweep(i01.head.rothead.min, i01.head.rothead.pos, delay, -1, True) def StickeyeYListener(value): absValue = math.fabs(value) if (absValue < 0.222): # print "Stop sweep eye Y" i01.head.eyeY.stop() return absValue = absValue-0.01 i01.head.eyeY.setSpeed(absValue) delay = int((1-absValue) * 200) if (value > 0.0): if (i01.head.eyeY.isSweeping()): i01.head.eyeY.setSweeperDelay(delay) else: i01.head.eyeY.sweep(i01.head.eyeY.pos, i01.head.eyeY.max, delay, 1, True) else: if (i01.head.eyeY.isSweeping()): i01.head.eyeY.setSweeperDelay(delay) else: i01.head.eyeY.sweep(i01.head.eyeY.min, i01.head.eyeY.pos, delay, -1, True) def StickeyeXListener(value): absValue = math.fabs(value) if (absValue < 0.222): # print "Stop sweep eye X" i01.head.eyeX.stop() return absValue = absValue-0.01 i01.head.eyeX.setSpeed(absValue) delay = int((1-absValue) * 200) if (value > 0.0): if (i01.head.eyeX.isSweeping()): i01.head.eyeX.setSweeperDelay(delay) else: i01.head.eyeX.sweep(i01.head.eyeX.pos, i01.head.eyeX.max, delay, 1, True) else: if (i01.head.eyeX.isSweeping()): i01.head.eyeX.setSweeperDelay(delay) else: i01.head.eyeX.sweep(i01.head.eyeX.min, i01.head.eyeX.pos, delay, -1, True) #set speed value def ZButtonListener(value): global speed absValue = math.fabs(value) print "Z raw value = " + str(value) if (value > 0.95): serial.write("HDLT 20\r") #set lights if speed >19: speed = 10 #set to slow for running into base print "speed=12" i01.mouth.speak("speed slow") sleep(2) else: speed = 25 print "speed= 25" i01.mouth.speak("speed normal") sleep(10) if (value < -0.95): serial.write("HDLT 00\r") speed = 35 print "speed= 35" i01.mouth.speak("speed fast") sleep(2) # lights in head 01=left face, 02=head-nose, 04=right face # 08=right eye, 10=kinect, 20=base, 40=red vertical, 00=all off # Attach the joystick to the inmoov service servos only activate when the value is 1.0 def AButtonListener(value): print("backward") if value == 1.0: serial.write("HDLT 40\r") #lights i01.setHeadSpeed(0.80, 0.80) i01.moveHead(120,88) #move head in direction of turn sleep(.5) serial.write("GO D0 D0\r") #turn move wheels if value == 0.0: sstop() def XButtonListener(value): print( "go lft") if value == 1.0: serial.write("HDLT 01\r") i01.setHeadSpeed(0.80, 0.80) i01.moveHead(90,140) sleep(.5) serial.write("GO E0 20\r") if value == 0.0: sstop() def BButtonListener(value): print("go rgt") if value == 1.0: serial.write("HDLT 04\r") i01.setHeadSpeed(0.80, 0.80) i01.moveHead(90,30) sleep(.5) serial.write("GO 20 E0\r") if value == 0.0: sstop() def fforward(): global speed getping() serial.write("HDLT 05\r") print("fwd") i01.setHeadSpeed(0.80, 0.80) i01.moveHead(60,88) # irsensors() REMOVED 1-12 if((distpingright >12) and (distpingleft >12)): serial.write("GOSPD " + str(speed) + " " + str(speed) + "\r") else: sstop() def sstop(): print("stop") serial.write("STOP 0\r") serial.write("HDLT 02\r") def YButtonListener(value): global speed if value == 1.0: fforward() if value == 0.0: sstop() def blights(data): # lights in head 01=left face, 02=head-nose, 04=right face # 08=right eye, 10=kinect, 20=base, 40=red vertical, 00=all off for i in range (0,data): serial.write("HDLT 07\r") time.sleep(.5) serial.write("HDLT 10\r") time.sleep(.5) serial.write("HDLT 20\r") time.sleep(.5) serial.write("HDLT 00\r") def RButtonListener(value): if value == 1.0: print("rgt-but") x = (random.randint(1, 4)) if x == 1: serial.write("HDLT 20\r") i01.mouth.speak("i'm looking for a way out of here") if x == 2: i01.mouth.speak("scooter is lost") if x == 3: serial.write("HDLT 40\r") i01.mouth.speak("scooter is happy") if x == 4: i01.mouth.speak("i'm hungery") blights(2) serial.write("HDLT 02\r") def LButtonListener(value): if value == 1.0: print("lft-but scooter at rest") # i01.mouth.speak("Scooter is taking a break") i01.powerDown() serial.write("HDLT 40\r") #red leds on showing at rest sleep(60) #let stuff become calm so does not retrigger uno.digitalReadPollingStart(readDigitalPin) def DirPadListener(value): if value == 1.0: print("auto range mode") serial.write("HDLT 10\r") i01.mouth.speak("automatic wondering") #want to spin around scooter and take measurements with ping sensors #determine FIRST longest distance and move in that direction #drive there checking ir sensors #will start out by turning 30 degrees or 12 measurements fwddist = 0 #distance to move distrgtold = 0 #track last mesasurement distlftold = 0 scootangle = 0 #need to remember where scooter is pointed global distpingright global distpingleft global distcen global distright global distleft lightpin = 1 i01.setHeadSpeed(0.70, 0.70) i01.moveHead(50,80) #move head in direction of travel for y in range(0,11): #do 12 measurements serial.write("HDLT " + str(lightpin) + "\r") #shift bit for next light each bit runs one light lightpin = lightpin << 1 print("find range position = " + str(y)) getping() #get ping distance # need to save the longest distance and angle if (distrgtold < distpingright): distrgtold = distpingright scootangle = y if (distlftold < distpingleft): distlftold = distpingleft scootangle = y print("turn rping=" + str(distpingright) + "in lping=" + str(distpingleft) + "in " + str(scootangle)) serial.write("TURN 30 25\r") #turn 30 deg at a speed of 25 # next line delay needed! sleep(.5) # have distance which is the best now need to turn there i01.moveHead(60,50) #move head in direction of travel serial.write("HDLT 07\r") #lights in head on print( "TURN=" + str(scootangle * 30)) i01.mouth.speak( "TURN TO " + str(scootangle * 30)) serial.write("TURN " + str(scootangle * 30) + " 25\r") #turn to new angle at a speed of 25 sleep(3) # move forward longest distance right or left if (distrgtold > distlftold): fwddist = distrgtold else: fwddist = distlftold i01.mouth.speak( "distance is " + str(fwddist)) getping() fwddist = fwddist - 12 print("forward distance = " + str(fwddist) ) # rothead neck eyex eyey jaw i01.moveHead(100,70) serial.write("ACC 200\r") if((distpingright >12) and (distpingleft >12)): #check for safe distance just incase # now let move forward but watch the ir sensors serial.write("trvl " + str(fwddist) + " 25\r") for fdwsteps in range(0,(fwddist / 4)): irsensors() if ((distleft > 1000) or (distcen > 1000) or (distright > 1000)): print("all stop ir sensor") i01.mouth.speak("ir sensor stop") break sstop() blights(2) i01.mouth.speak("that was fun") if value == 0.75: print("down arrow, at rest") serial.write("HDLT 00\r") i01.mouth.speak("scooter at rest") # head neck 50-170, rotatehead 10-170, eyeX, eyeY 5-60, jaw 50-110 i01.setHeadSpeed(0.60, 0.60) i01.moveHead(60,68) batvolts() irsensors() #reset distance counter which software does not use yet serial.write("RST\r") getping() i01.mouth.speak( "right ping" + str(distpingright) + "in") i01.mouth.speak( "left ping" + str(distpingleft) + "in") if value == 0.5: print("right arrow") serial.write("TURN 180 35\r") i01.mouth.speak("lets spin") blights(1) i01.mouth.speak("thats fun") #HEAD if value == 0.25: print("up arrow, head active") serial.write("HDLT 47\r") i01.mouth.speak("head") uberjoy.addListener("publishRX", "python", "StickRXheadListener") uberjoy.addListener("publishRY", "python", "StickRYheadListener") def clearbuffer(): crap = 0 crap = serial.available() print("cbuf " + str(crap)) #would like to clean out serial buffer for i in range(1,crap): junk = serial.read() # print("junk=" + str(junk)) def getadc(): global serdata clearbuffer() #send command to read 8 channel atod on eddie board # print("adc cmd") serial.write("ADC\r") time.sleep(1) code = 1 #looking for > which is the start of the ADC data for i in range(1,90): if(code == 0x3E): break code = serial.read() # print(str(code)) # now read data from ADC and end when you see cr serdata = "" for i in range(0,31): code = (serial.read() & 0xFF) serdata += chr(code) if ((code == 0x0D)): # print("exit") break # print("raw data=" + str(serdata)) def getping(): global distpingright global distpingleft global serdata clearbuffer() serial.write("PING\r") # need time delay for ping sensor sleep(1) code = 1 #looking for < which is the start of the ping data 2 sensors for i in range(1,60): code = serial.read() if(code == 0x3C): # print("found start") break # print("junk data=" + str(serdata)) #read data from ping sensors and end when you see cr serdata = "" code=1 for i in range(0,20): code = (serial.read() & 0xFF) serdata += chr(code) if ((code == 0x0D)): # print("exit ping") break # print("raw data=" + str(serdata)) # ping sensor output from 12 to B54 hex distpingright = (((int((serdata[0]), base=16)) * 256) + ((int((serdata[1]), base=16)) * 16) + (int((serdata[2]), base=16))) distpingright = distpingright / 23 if( distpingright == 0 ): print("pr>120") # set to max so you can move forward with no reading distpingright = 120 # i01.mouth.speak("out of range for right ping") else: print("pr=" + str(distpingright)) # i01.mouth.speak( str(distpingright) + "in") distpingleft = (((int((serdata[4]), base=16)) * 256) + ((int((serdata[5]), base=16)) * 16) + (int((serdata[6]), base=16))) distpingleft = distpingleft / 23 if( distpingleft == 0 ): print("pl>120") distpingleft = 120 # i01.mouth.speak("out of range for left ping") else: print("pl=" + str(round(distpingleft,1))) # i01.mouth.speak( str(distpingleft) + "in") def batvolts(): #get right values from character stream #the calculation figures out to be .0039volt per count or 1/.0039=256.4 (was using 238.2) #using that number shows a voltage that is lower than I see on a voltmeter #so I adjusted the divisor to match my meter and then round it to 1 places global serdata global volts getadc() volts1 = (int((serdata[28]), base=16)) * 256 volts = (int((serdata[29]), base=16)) * 16 volts = (volts1 + volts + (int((serdata[30]), base=16))) / 256.4 volts = volts + 0.65 #adjust for some offset needed print(str(round(volts,1)) + "VDC") i01.mouth.speak( str(round(volts,1)) + "volts") def irsensors(): # 3 I/R sensors on platform measure from 10 to 80cm or 3.94 to 31.5inches # ADC is 12 bit or FFF but eddie bd only uses 10 bits or 1024 counts # note spec says out voltage at 80cm is .25 to .55v and at 10cm is 1.85 to 2.7v # so these are more like on off for set distance not very accurate for real distance # 30cm or 11.8 inches should be good starting point or about 1 volt for center # may want closer for sides want to be able to go through doors # 1 volt should be around 1024 cnts/5v or about 205 counts # remember higher number is closer global serdata global distcen global distright global distleft getadc() distleft = (((int((serdata[8]), base=16)) * 256) + ((int((serdata[9]), base=16)) * 16) + (int(serdata[10], base=16))) distcen = (((int((serdata[4]), base=16)) * 256) + ((int((serdata[5]), base=16)) * 16) + (int(serdata[6], base=16))) distright = (((int((serdata[0]), base=16)) * 256) + ((int((serdata[0]), base=16)) * 16) + (int(serdata[2], base=16))) print("L=" + str(distleft) + " C=" + str(distcen) + " R=" + str(distright)) # note parallax pir is active high, small radio shack pir is active low # scooter has radio shack pir sensor # azul has parallax sensor def input(pin): global timespir # print( pin.pin, pin.value, pin.type, pin.source ) if (pin.value == 0): timespir = timespir + 1 #added because it was going off too munch if (timespir > 5): uno.digitalReadPollingStop(readDigitalPin) #turn off pir sensor timespir = 0 #reset counter for pir print pin.pin, pin.value, pin.type, pin.source, print("*** some one is here ***") #show me in code working i01.mouth.speak("howdy partner, i was resting") blights(2) for pos in range(0,2): #move head like waking up i01.setHeadSpeed(0.80, 0.80) i01.moveHead(100,60) sleep(2) i01.moveHead(60,110) sleep(2) i01.moveHead(90,90) serial.write("HDLT 05\r") #lights # invert controls # dont know how invert works tried but nothing seemed to change #uberjoy.map("y", -1, 1, 1, -1) #uberjoy.map("ry", -1, 1, 1, -1) #startup scooter and reset distance sensor, get battery voltage batvolts() irsensors() serial.write("RST\r") #reset wheel counters getping() # lights in head 01=left face, 02=head-nose, 04=right face # 08=right eye, 10=kinect, 20=base, 40=red vertical, 00=all off #blink lights to let me know system is ready blights(2) #now start up pir sensor i01.startPIR("COM6",4) #uberjoy.addListener("publishX", "python", "StickeyeXListener") #uberjoy.addListener("publishY", "python", "StickeyeYListener") uberjoy.addListener("publishRX", "python", "StickRXListener") uberjoy.addListener("publishRY", "python", "StickRYListener") uberjoy.addListener("publish0", "python", "AButtonListener") uberjoy.addListener("publish1", "python", "BButtonListener") uberjoy.addListener("publish2", "python", "XButtonListener") uberjoy.addListener("publish3", "python", "YButtonListener") uberjoy.addListener("publish4", "python", "LButtonListener") uberjoy.addListener("publish5", "python", "RButtonListener") uberjoy.addListener("publishZ", "python", "ZButtonListener") uberjoy.addListener("publishPOV", "python", "DirPadListener")
{ "content_hash": "2ef87c6940903b99dedc281656b781ca", "timestamp": "", "source": "github", "line_count": 569, "max_line_length": 125, "avg_line_length": 32.757469244288224, "alnum_prop": 0.6506250335318419, "repo_name": "sstocker46/pyrobotlab", "id": "2f7dd536e9ec0da1eadddfe06549b703c7819d49", "size": "19001", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "home/harland/scooter.xbox.v27.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Arduino", "bytes": "122885" }, { "name": "C", "bytes": "36143" }, { "name": "Java", "bytes": "6142" }, { "name": "Processing", "bytes": "4587" }, { "name": "Python", "bytes": "1259082" } ], "symlink_target": "" }
"""Python class that implements Sentencepiece tokenizer. It follows TF.text designers design. """ import tensorflow.compat.v2 as tf # pylint: disable=g-direct-tensorflow-import from tensorflow.python.ops.ragged import ragged_tensor # pylint: disable=g-direct-tensorflow-import from tensorflow_text.core.pybinds import pywrap_model_converter # pylint: disable=g-bad-import-order from tensorflow.python.framework import load_library from tensorflow.python.platform import resource_loader gen_fast_sentencepiece_tokenizer = load_library.load_op_library(resource_loader.get_path_to_datafile('_fast_sentencepiece_tokenizer.so')) class FastSentencepieceTokenizer: """Sentencepiece tokenizer with tf.text interface.""" def __init__(self, model, reverse=False, add_bos=False, add_eos=False): converted_model = pywrap_model_converter.convert_sentencepiece_model(model) converted_model_detokenizer = pywrap_model_converter.convert_sentencepiece_model_for_decoder( model) # Use uint8 tensor as a buffer for the model to avoid any possible changes, # for example truncation by '\0'. self._converted_model = tf.constant(list(converted_model), dtype=tf.uint8) self._converted_model_detokenizer = tf.constant( list(converted_model_detokenizer), dtype=tf.uint8) self._vocab_size = pywrap_model_converter.get_vocabulary_size( converted_model) self._reverse = reverse self._add_bos = add_bos self._add_eos = add_eos def tokenize(self, inputs): """The main tokenization function.""" input_tensor = ragged_tensor.convert_to_tensor_or_ragged_tensor(inputs) if input_tensor.shape.ndims is None: raise ValueError("Rank of input_tensor must be statically known.") if ragged_tensor.is_ragged(input_tensor): # Ensure that input has row_split_dtype is int32 input_tensor = input_tensor.with_row_splits_dtype(tf.int32) # Recursively process the values of the ragged tensor. tokens = self.tokenize(input_tensor.flat_values) return input_tensor.with_flat_values(tokens) else: if input_tensor.shape.ndims > 1: # Convert the input tensor to ragged and process it. return self.tokenize( tf.RaggedTensor.from_tensor( input_tensor, row_splits_dtype=tf.int32)) elif input_tensor.shape.ndims == 0: tokens = self.tokenize(tf.stack([input_tensor])) return tokens.values else: # Our rank 1 tensor is the correct shape, so we can process it as # normal. (output_values, row_splits) = ( gen_fast_sentencepiece_tokenizer .tf_text_fast_sentencepiece_tokenize( self._converted_model, input_tensor, 0, 0, self._add_bos, self._add_eos, self._reverse)) tokens = tf.RaggedTensor.from_nested_row_splits( flat_values=output_values, nested_row_splits=[row_splits], validate=False) return tokens def detokenize(self, input): # pylint: disable=redefined-builtin """Detokenizes tokens into preprocessed text. Args: input: A `RaggedTensor` or `Tensor` with int32 encoded text with rank >= 1. Returns: A N-1 dimensional string Tensor or RaggedTensor of the detokenized text. """ input_tensor = ragged_tensor.convert_to_tensor_or_ragged_tensor(input) if input_tensor.shape.ndims is None: raise ValueError("Rank of input_tensor must be statically known.") if input_tensor.shape.ndims == 0: raise ValueError("Rank of input_tensor must be at least 1.") if ragged_tensor.is_ragged(input_tensor): if input_tensor.flat_values.shape.ndims > 1: # If the flat_values of our ragged tensor is multi-dimensional, we can # process it separately and our output will have the same nested # splits as our input. tokens = self.detokenize(input_tensor.flat_values) return input_tensor.with_flat_values(tokens) elif input_tensor.ragged_rank > 1: # Recursively process the values of the ragged tensor. tokens = self.detokenize(input_tensor.values) return input_tensor.with_values(tokens) else: return gen_fast_sentencepiece_tokenizer.tf_text_fast_sentencepiece_detokenize( self._converted_model_detokenizer, input_tensor.flat_values, input_tensor.row_splits) else: if input_tensor.shape.ndims > 1: # Convert the input tensor to ragged and process it. return self.detokenize( tf.RaggedTensor.from_tensor( input_tensor, row_splits_dtype=tf.int32)) else: tokens = self.detokenize(tf.stack([input_tensor])) return tf.reshape(tokens, []) def vocab_size(self): """Returns size of the vocabulary in Sentencepiece model.""" return self._vocab_size
{ "content_hash": "4ac2c6b3007528ea8430b70b3ab4dc89", "timestamp": "", "source": "github", "line_count": 110, "max_line_length": 137, "avg_line_length": 44.345454545454544, "alnum_prop": 0.6849118491184912, "repo_name": "tensorflow/text", "id": "8bb767d63dfa1d4a9353183233eff11c59b8d865", "size": "6164", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tensorflow_text/python/ops/fast_sentencepiece_tokenizer.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "1236707" }, { "name": "PureBasic", "bytes": "2" }, { "name": "Python", "bytes": "1111081" }, { "name": "Shell", "bytes": "12515" }, { "name": "Starlark", "bytes": "113773" } ], "symlink_target": "" }
import inspect from sqlalchemy.types import \ BLOB, BOOLEAN, CHAR, CLOB, DATE, DATETIME, DECIMAL, FLOAT, INT, \ NCHAR, NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, \ Binary, Boolean, Date, DateTime, Float, Integer, Interval, Numeric, \ PickleType, SmallInteger, String, Text, Time, Unicode, UnicodeText from sqlalchemy.sql import \ func, modifier, text, literal, literal_column, null, alias, \ and_, or_, not_, \ select, subquery, union, union_all, insert, update, delete, \ join, outerjoin, \ bindparam, outparam, asc, desc, collate, \ except_, except_all, exists, intersect, intersect_all, \ between, case, cast, distinct, extract from sqlalchemy.schema import \ MetaData, ThreadLocalMetaData, Table, Column, ForeignKey, \ Sequence, Index, ForeignKeyConstraint, PrimaryKeyConstraint, \ CheckConstraint, UniqueConstraint, Constraint, \ PassiveDefault, ColumnDefault, DDL from sqlalchemy.engine import create_engine, engine_from_config __all__ = [ name for name, obj in locals().items() if not (name.startswith('_') or inspect.ismodule(obj)) ] __version__ = '0.4.7p1'
{ "content_hash": "5134c412b7075c784be5aa312332c438", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 73, "avg_line_length": 41.107142857142854, "alnum_prop": 0.6985230234578628, "repo_name": "carlgao/lenga", "id": "2889a4cde1e2babeb53a45167d40b721bdc3707e", "size": "1374", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "images/lenny64-peon/usr/share/python-support/python-sqlalchemy/sqlalchemy/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Awk", "bytes": "3281" }, { "name": "C#", "bytes": "97763" }, { "name": "CSS", "bytes": "39383" }, { "name": "Emacs Lisp", "bytes": "6274490" }, { "name": "Frege", "bytes": "463786" }, { "name": "IDL", "bytes": "377510" }, { "name": "JavaScript", "bytes": "1032063" }, { "name": "Mathematica", "bytes": "11862" }, { "name": "Perl", "bytes": "57841501" }, { "name": "Prolog", "bytes": "9867" }, { "name": "Python", "bytes": "10875379" }, { "name": "Ruby", "bytes": "72162" }, { "name": "Shell", "bytes": "22775" }, { "name": "Slash", "bytes": "126702" }, { "name": "SystemVerilog", "bytes": "105693" }, { "name": "TeX", "bytes": "742855" }, { "name": "VimL", "bytes": "1845" }, { "name": "XProc", "bytes": "22962" }, { "name": "XSLT", "bytes": "4075" } ], "symlink_target": "" }
from google.appengine.ext.appstats import recording appstats_CALC_RPC_COSTS = True def webapp_add_wsgi_middleware(app): app = recording.appstats_wsgi_middleware(app) return app
{ "content_hash": "fa84751b1d1723288297b18c59248761", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 51, "avg_line_length": 23.5, "alnum_prop": 0.7659574468085106, "repo_name": "sangwonl/audi", "id": "baf30fb0574472c80392605e9fcad7ea5456973d", "size": "188", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "appengine_config.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "11807" }, { "name": "Python", "bytes": "36442" } ], "symlink_target": "" }
from .dataObject import BaseDataObject CELL_RDF_TYPE = BaseDataObject.base_namespace.Cell
{ "content_hash": "58810d6dd87e667d141d72b455b3ce48", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 50, "avg_line_length": 30.333333333333332, "alnum_prop": 0.8351648351648352, "repo_name": "gsarma/PyOpenWorm", "id": "b77089e7edb6b9be0da2f6e6d9db2bff04c8c8d1", "size": "91", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "PyOpenWorm/cell_common.py", "mode": "33188", "license": "mit", "language": [ { "name": "Prolog", "bytes": "149462" }, { "name": "Python", "bytes": "422141" }, { "name": "Shell", "bytes": "493" }, { "name": "TeX", "bytes": "7280" } ], "symlink_target": "" }
def register(model_or_iterable, **options): """ Registers the given model(s) with the given translation options. The model(s) should be Model classes, not instances. Fields declared for translation on a base class are inherited by subclasses. If the model or one of its subclasses is already registered for translation, this will raise an exception. @register(Author) class AuthorTranslation(TranslationOptions): pass """ from wagtail_modeltranslation.translator import translator, TranslationOptions def wrapper(opts_class): if not issubclass(opts_class, TranslationOptions): raise ValueError('Wrapped class must subclass TranslationOptions.') translator.register(model_or_iterable, opts_class, **options) return wrapper
{ "content_hash": "f74e035e2da6f743c2a450b90250b259", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 82, "avg_line_length": 36.86363636363637, "alnum_prop": 0.7237977805178791, "repo_name": "tomdyson/wagtail-modeltranslation", "id": "873eab4c1693243426ac0d39bb63ec9d601e3a26", "size": "837", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "wagtail_modeltranslation/decorators.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "1819" }, { "name": "Python", "bytes": "109340" } ], "symlink_target": "" }
"""Code to support homekit_controller tests.""" from datetime import timedelta import json import os from unittest import mock from aiohomekit.model import Accessories, Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from aiohomekit.testing import FakeController from homeassistant.components.homekit_controller import config_flow from homeassistant.components.homekit_controller.const import ( CONTROLLER, DOMAIN, HOMEKIT_ACCESSORY_DISPATCH, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture class Helper: """Helper methods for interacting with HomeKit fakes.""" def __init__(self, hass, entity_id, pairing, accessory, config_entry): """Create a helper for a given accessory/entity.""" self.hass = hass self.entity_id = entity_id self.pairing = pairing self.accessory = accessory self.config_entry = config_entry self.characteristics = {} for service in self.accessory.services: service_name = ServicesTypes.get_short(service.type) for char in service.characteristics: char_name = CharacteristicsTypes.get_short(char.type) self.characteristics[(service_name, char_name)] = char async def update_named_service(self, service, characteristics): """Update a service.""" self.pairing.testing.update_named_service(service, characteristics) await self.hass.async_block_till_done() async def poll_and_get_state(self): """Trigger a time based poll and return the current entity state.""" await time_changed(self.hass, 60) state = self.hass.states.get(self.entity_id) assert state is not None return state async def time_changed(hass, seconds): """Trigger time changed.""" next_update = dt_util.utcnow() + timedelta(seconds) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() async def setup_accessories_from_file(hass, path): """Load an collection of accessory defs from JSON data.""" accessories_fixture = await hass.async_add_executor_job( load_fixture, os.path.join("homekit_controller", path) ) accessories_json = json.loads(accessories_fixture) accessories = Accessories.from_list(accessories_json) return accessories async def setup_platform(hass): """Load the platform but with a fake Controller API.""" config = {"discovery": {}} with mock.patch("aiohomekit.Controller") as controller: fake_controller = controller.return_value = FakeController() await async_setup_component(hass, DOMAIN, config) return fake_controller async def setup_test_accessories(hass, accessories): """Load a fake homekit device based on captured JSON profile.""" fake_controller = await setup_platform(hass) pairing_id = "00:00:00:00:00:00" accessories_obj = Accessories() for accessory in accessories: accessories_obj.add_accessory(accessory) pairing = await fake_controller.add_paired_device(accessories_obj, pairing_id) config_entry = MockConfigEntry( version=1, domain="homekit_controller", entry_id="TestData", data={"AccessoryPairingID": pairing_id}, title="test", ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() return config_entry, pairing async def device_config_changed(hass, accessories): """Discover new devices added to Home Assistant at runtime.""" # Update the accessories our FakePairing knows about controller = hass.data[CONTROLLER] pairing = controller.pairings["00:00:00:00:00:00"] accessories_obj = Accessories() for accessory in accessories: accessories_obj.add_accessory(accessory) pairing.accessories = accessories_obj discovery_info = { "name": "TestDevice", "host": "127.0.0.1", "port": 8080, "properties": { "md": "TestDevice", "id": "00:00:00:00:00:00", "c#": "2", "sf": "0", }, } # Config Flow will abort and notify us if the discovery event is of # interest - in this case c# has incremented flow = config_flow.HomekitControllerFlowHandler() flow.hass = hass flow.context = {} result = await flow.async_step_zeroconf(discovery_info) assert result["type"] == "abort" assert result["reason"] == "already_configured" # Wait for services to reconfigure await hass.async_block_till_done() await hass.async_block_till_done() async def setup_test_component(hass, setup_accessory, capitalize=False, suffix=None): """Load a fake homekit accessory based on a homekit accessory model. If capitalize is True, property names will be in upper case. If suffix is set, entityId will include the suffix """ accessory = Accessory.create_with_info( "TestDevice", "example.com", "Test", "0001", "0.1" ) setup_accessory(accessory) domain = None for service in accessory.services: service_name = ServicesTypes.get_short(service.type) if service_name in HOMEKIT_ACCESSORY_DISPATCH: domain = HOMEKIT_ACCESSORY_DISPATCH[service_name] break assert domain, "Cannot map test homekit services to Home Assistant domain" config_entry, pairing = await setup_test_accessories(hass, [accessory]) entity = "testdevice" if suffix is None else f"testdevice_{suffix}" return Helper(hass, ".".join((domain, entity)), pairing, accessory, config_entry)
{ "content_hash": "54c82ad65c9c736abd7e3b4448703bb5", "timestamp": "", "source": "github", "line_count": 170, "max_line_length": 85, "avg_line_length": 34.311764705882354, "alnum_prop": 0.6835247728441626, "repo_name": "FreekingDean/home-assistant", "id": "c3c182c8b51e1befeb9405a0927d9b74e1c243b6", "size": "5833", "binary": false, "copies": "6", "ref": "refs/heads/dev", "path": "tests/components/homekit_controller/common.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "2335" }, { "name": "Python", "bytes": "36746639" }, { "name": "Shell", "bytes": "4910" } ], "symlink_target": "" }
import tangelo import cherrypy #just a sample to test the service is up @tangelo.restful def get(sz): tangelo.content_type("application/json") return { "echo" : sz } @tangelo.restful def post(*pargs, **kwargs): body = cherrypy.request.body.read() path = '.'.join(pargs) tangelo.content_type("application/json") return { "echo" : { "path" : path, "body" : body }}
{ "content_hash": "1c23ce033b46786f44f70c81c0a736b5", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 55, "avg_line_length": 25.933333333333334, "alnum_prop": 0.6580976863753213, "repo_name": "Sotera/track-communities", "id": "9d79169be30b7a6f38d8c52d55102fec7a2e4b2c", "size": "985", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tangelo_html/echo.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "4779" }, { "name": "HTML", "bytes": "8062" }, { "name": "Java", "bytes": "44413" }, { "name": "JavaScript", "bytes": "226480" }, { "name": "Puppet", "bytes": "4311" }, { "name": "Python", "bytes": "48710" }, { "name": "Ruby", "bytes": "2567" }, { "name": "Shell", "bytes": "10547" } ], "symlink_target": "" }
from galaxy.web.base.controller import * from galaxy.model.orm import * from galaxy.datatypes import sniff from galaxy import util from galaxy.util.streamball import StreamBall import logging, tempfile, zipfile, tarfile, os, sys if sys.version_info[:2] < ( 2, 6 ): zipfile.BadZipFile = zipfile.error if sys.version_info[:2] < ( 2, 5 ): zipfile.LargeZipFile = zipfile.error log = logging.getLogger( __name__ ) # Test for available compression types tmpd = tempfile.mkdtemp() comptypes = [] for comptype in ( 'gz', 'bz2' ): tmpf = os.path.join( tmpd, 'compression_test.tar.' + comptype ) try: archive = tarfile.open( tmpf, 'w:' + comptype ) archive.close() comptypes.append( comptype ) except tarfile.CompressionError: log.exception( "Compression error when testing %s compression. This option will be disabled for library downloads." % comptype ) try: os.unlink( tmpf ) except OSError: pass ziptype = '32' tmpf = os.path.join( tmpd, 'compression_test.zip' ) try: archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) archive.close() comptypes.append( 'zip' ) ziptype = '64' except RuntimeError: log.exception( "Compression error when testing zip compression. This option will be disabled for library downloads." ) except (TypeError, zipfile.LargeZipFile): # ZIP64 is only in Python2.5+. Remove TypeError when 2.4 support is dropped log.warning( 'Max zip file size is 2GB, ZIP64 not supported' ) comptypes.append( 'zip' ) try: os.unlink( tmpf ) except OSError: pass os.rmdir( tmpd ) class Library( BaseController ): @web.expose def index( self, trans, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) return trans.fill_template( "/library/index.mako", default_action=params.get( 'default_action', None ), msg=msg, messagetype=messagetype ) @web.expose def browse_libraries( self, trans, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted==False ).order_by( trans.app.model.Library.name ).all() authorized_libraries = [] for library in all_libraries: if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library ) or \ trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library ) or \ trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=library ) or \ trans.app.security_agent.check_folder_contents( trans.user, library ) or \ trans.app.security_agent.show_library_item( trans.user, library ): authorized_libraries.append( library ) return trans.fill_template( '/library/browse_libraries.mako', libraries=authorized_libraries, default_action=params.get( 'default_action', None ), msg=msg, messagetype=messagetype ) @web.expose def browse_library( self, trans, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) id = params.get( 'id', None ) if not id: msg = "You must specify a library id." return trans.response.send_redirect( web.url_for( controller='library', action='browse_libraries', default_action=params.get( 'default_action', None ), msg=util.sanitize_text( msg ), messagetype='error' ) ) library = library=trans.app.model.Library.get( id ) if not library: msg = "Invalid library id ( %s )." return trans.response.send_redirect( web.url_for( controller='library', action='browse_libraries', default_action=params.get( 'default_action', None ), msg=util.sanitize_text( msg ), messagetype='error' ) ) created_ldda_ids = params.get( 'created_ldda_ids', '' ) return trans.fill_template( '/library/browse_library.mako', library=trans.app.model.Library.get( id ), created_ldda_ids=created_ldda_ids, default_action=params.get( 'default_action', None ), comptypes=comptypes, msg=msg, messagetype=messagetype ) @web.expose def library( self, trans, id=None, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) # TODO: eventually we'll want the ability for users to create libraries if params.get( 'delete', False ): action = 'delete' elif params.get( 'permissions', False ): action = 'permissions' else: action = 'information' if not id: msg = "You must specify a library." return trans.response.send_redirect( web.url_for( controller='library', action='browse_libraries', msg=util.sanitize_text( msg ), messagetype='error' ) ) library = trans.app.model.Library.get( int( id ) ) if not library: msg = "Invalid library id ( %s ) specified." % str( id ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_libraries', msg=util.sanitize_text( msg ), messagetype='error' ) ) if action == 'information': if params.get( 'rename_library_button', False ): old_name = library.name new_name = util.restore_text( params.name ) new_description = util.restore_text( params.description ) if not new_name: msg = 'Enter a valid name' return trans.fill_template( '/library/library_info.mako', library=library, restrict=params.get( 'restrict', False ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype='error' ) else: library.name = new_name library.description = new_description library.flush() # Rename the root_folder library.root_folder.name = new_name library.root_folder.description = new_description library.root_folder.flush() msg = "Library '%s' has been renamed to '%s'" % ( old_name, new_name ) return trans.response.send_redirect( web.url_for( controller='library', action='library', id=id, edit_info=True, msg=util.sanitize_text( msg ), messagetype='done' ) ) return trans.fill_template( '/library/library_info.mako', library=library, restrict=params.get( 'restrict', False ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype=messagetype ) elif action == 'permissions': if params.get( 'update_roles_button', False ): # The user clicked the Save button on the 'Associate With Roles' form permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles trans.app.security_agent.set_all_library_permissions( library, permissions ) library.refresh() # Copy the permissions to the root folder trans.app.security_agent.copy_library_permissions( library, library.root_folder ) msg = "Permissions updated for library '%s'" % library.name return trans.response.send_redirect( web.url_for( controller='library', action='library', id=id, permissions=True, msg=util.sanitize_text( msg ), messagetype='done' ) ) return trans.fill_template( '/library/library_permissions.mako', library=library, msg=msg, messagetype=messagetype ) @web.expose def datasets( self, trans, library_id, ldda_ids='', **kwd ): # This method is used by the select list labeled "Perform action on selected datasets" # on the analysis library browser. if not ldda_ids: msg = "You must select at least one dataset" return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) ldda_ids = util.listify( ldda_ids ) params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) if not params.do_action: msg = "You must select an action to perform on selected datasets" return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if params.do_action == 'add': history = trans.get_history() for ldda_id in ldda_ids: ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id ) hda = ldda.to_history_dataset_association( target_history=history, add_to_history = True ) history.flush() msg = "%i dataset(s) have been imported into your history" % len( ldda_ids ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='done' ) ) elif params.do_action == 'manage_permissions': # We need the folder containing the LibraryDatasetDatasetAssociation(s) ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_ids[0] ) trans.response.send_redirect( web.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, id=','.join( ldda_ids ), permissions=True, msg=util.sanitize_text( msg ), messagetype=messagetype ) ) else: try: if params.do_action == 'zip': # Can't use mkstemp - the file must not exist first tmpd = tempfile.mkdtemp() tmpf = os.path.join( tmpd, 'library_download.' + params.do_action ) if ziptype == '64': archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) else: archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED ) archive.add = lambda x, y: archive.write( x, y.encode('CP437') ) elif params.do_action == 'tgz': archive = util.streamball.StreamBall( 'w|gz' ) elif params.do_action == 'tbz': archive = util.streamball.StreamBall( 'w|bz2' ) except (OSError, zipfile.BadZipFile): log.exception( "Unable to create archive for download" ) msg = "Unable to create archive for download, please report this error" return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) seen = [] for id in ldda_ids: ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( id ) if not ldda or not trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.DATASET_ACCESS, dataset = ldda.dataset ): continue path = "" parent_folder = ldda.library_dataset.folder while parent_folder is not None: # Exclude the now-hidden "root folder" if parent_folder.parent is None: path = os.path.join( parent_folder.library_root[0].name, path ) break path = os.path.join( parent_folder.name, path ) parent_folder = parent_folder.parent path += ldda.name while path in seen: path += '_' seen.append( path ) try: archive.add( ldda.dataset.file_name, path ) except IOError: log.exception( "Unable to write to temporary library download archive" ) msg = "Unable to create archive for download, please report this error" return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if params.do_action == 'zip': archive.close() tmpfh = open( tmpf ) # clean up now try: os.unlink( tmpf ) os.rmdir( tmpd ) except OSError: log.exception( "Unable to remove temporary library download archive and directory" ) msg = "Unable to create archive for download, please report this error" return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) trans.response.set_content_type( "application/x-zip-compressed" ) trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % params.do_action return tmpfh else: trans.response.set_content_type( "application/x-tar" ) trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % params.do_action archive.wsgi_status = trans.response.wsgi_status() archive.wsgi_headeritems = trans.response.wsgi_headeritems() return archive.stream @web.expose def download_dataset_from_folder(self, trans, id, library_id=None, **kwd): """Catches the dataset id and displays file contents as directed""" # id must refer to a LibraryDatasetDatasetAssociation object ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( id ) if not ldda.dataset: msg = 'Invalid LibraryDatasetDatasetAssociation id %s received for file downlaod' % str( id ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=msg, messagetype='error' ) ) mime = trans.app.datatypes_registry.get_mimetype_by_extension( ldda.extension.lower() ) trans.response.set_content_type( mime ) fStat = os.stat( ldda.file_name ) trans.response.headers[ 'Content-Length' ] = int( fStat.st_size ) valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' fname = ldda.name fname = ''.join( c in valid_chars and c or '_' for c in fname )[ 0:150 ] trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryDataset-%s-[%s]" % ( str( id ), fname ) try: return open( ldda.file_name ) except: msg = 'This dataset contains no content' return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=msg, messagetype='error' ) ) @web.expose def library_dataset( self, trans, id, library_id, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) if params.get( 'permissions', False ): action = 'permissions' else: action = 'information' library_dataset = trans.app.model.LibraryDataset.get( id ) if not library_dataset: msg = "Invalid library dataset specified, id: %s" %str( id ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if action == 'information': if params.get( 'edit_attributes_button', False ): if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library_dataset ): if params.get( 'edit_attributes_button', False ): old_name = library_dataset.name new_name = util.restore_text( params.get( 'name', '' ) ) new_info = util.restore_text( params.get( 'info', '' ) ) if not new_name: msg = 'Enter a valid name' messagetype = 'error' else: library_dataset.name = new_name library_dataset.info = new_info library_dataset.flush() msg = "Dataset '%s' has been renamed to '%s'" % ( old_name, new_name ) messagetype = 'done' else: msg = "You are not authorized to change the attributes of this dataset" messagetype = "error" return trans.fill_template( '/library/library_dataset_info.mako', library_dataset=library_dataset, library_id=library_id, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype=messagetype ) elif action == 'permissions': if params.get( 'update_roles_button', False ): if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=library_dataset ): # The user clicked the Save button on the 'Associate With Roles' form permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles # Set the LIBRARY permissions on the LibraryDataset # NOTE: the LibraryDataset and LibraryDatasetDatasetAssociation will be set with the same permissions trans.app.security_agent.set_all_library_permissions( library_dataset, permissions ) library_dataset.refresh() # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation trans.app.security_agent.set_all_library_permissions( library_dataset.library_dataset_dataset_association, permissions ) library_dataset.library_dataset_dataset_association.refresh() msg = 'Permissions and roles have been updated for library dataset %s' % library_dataset.name messagetype = 'done' else: msg = "You are not authorized to managed the permissions of this dataset" messagetype = "error" return trans.fill_template( '/library/library_dataset_permissions.mako', library_dataset=library_dataset, library_id=library_id, msg=msg, messagetype=messagetype ) @web.expose def library_dataset_dataset_association( self, trans, library_id, folder_id, id=None, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) dbkey = params.get( 'dbkey', None ) if isinstance( dbkey, list ): last_used_build = dbkey[0] else: last_used_build = dbkey folder = trans.app.model.LibraryFolder.get( folder_id ) if folder and last_used_build in [ 'None', None, '?' ]: last_used_build = folder.genome_build replace_id = params.get( 'replace_id', None ) if replace_id: replace_dataset = trans.app.model.LibraryDataset.get( params.get( 'replace_id', None ) ) if not last_used_build: last_used_build = replace_dataset.library_dataset_dataset_association.dbkey else: replace_dataset = None # Let's not overwrite the imported datatypes module with the variable datatypes? # The built-in 'id' is overwritten in lots of places as well ldatatypes = [ x for x in trans.app.datatypes_registry.datatypes_by_extension.iterkeys() ] ldatatypes.sort() if id: if params.get( 'permissions', False ): action = 'permissions' elif params.get( 'edit_info', False ): action = 'edit_info' else: action = 'info' if id.count( ',' ): ids = id.split( ',' ) id = None else: ids = None else: ids = None if id: # ldda_id specified, display attributes form ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( id ) if not ldda: msg = "Invalid LibraryDatasetDatasetAssociation specified, id: %s" % str( id ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if action == 'permissions': if params.get( 'update_roles_button', False ): # The user clicked the Save button on the 'Associate With Roles' form if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=ldda ) and \ trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset=ldda.dataset ): permissions = {} for k, v in trans.app.model.Dataset.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles # Set the DATASET permissions on the Dataset trans.app.security_agent.set_all_dataset_permissions( ldda.dataset, permissions ) ldda.dataset.refresh() permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles # Set the LIBRARY permissions on the LibraryDataset # NOTE: the LibraryDataset and LibraryDatasetDatasetAssociation will be set with the same permissions trans.app.security_agent.set_all_library_permissions( ldda.library_dataset, permissions ) ldda.library_dataset.refresh() # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation trans.app.security_agent.set_all_library_permissions( ldda, permissions ) ldda.refresh() msg = "Permissions updated for dataset '%s'" % ldda.name messagetype = 'done' else: msg = "You are not authorized to change the permissions of dataset '%s'" % ldda.name messagetype = 'error' return trans.fill_template( '/library/ldda_permissions.mako', ldda=ldda, library_id=library_id, msg=msg, messagetype=messagetype ) elif action == 'info': return trans.fill_template( '/library/ldda_info.mako', ldda=ldda, library_id=library_id, msg=msg, messagetype=messagetype ) elif action == 'edit_info': if params.get( 'change', False ): # The user clicked the Save button on the 'Change data type' form if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda ): trans.app.datatypes_registry.change_datatype( ldda, params.datatype ) trans.app.model.flush() msg = "Data type changed for library dataset '%s'" % ldda.name messagetype = 'done' else: msg = "You are not authorized to change the data type of dataset '%s'" % ldda.name messagetype = 'error' return trans.fill_template( "/library/ldda_edit_info.mako", ldda=ldda, library_id=library_id, datatypes=ldatatypes, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype=messagetype ) elif params.get( 'save', False ): # The user clicked the Save button on the 'Edit Attributes' form if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda ): old_name = ldda.name new_name = util.restore_text( params.get( 'name', '' ) ) new_info = util.restore_text( params.get( 'info', '' ) ) new_message = util.restore_text( params.get( 'message', '' ) ) if not new_name: msg = 'Enter a valid name' messagetype = 'error' else: ldda.name = new_name ldda.info = new_info ldda.message = new_message # The following for loop will save all metadata_spec items for name, spec in ldda.datatype.metadata_spec.items(): if spec.get("readonly"): continue optional = params.get( "is_" + name, None ) if optional and optional == 'true': # optional element... == 'true' actually means it is NOT checked (and therefore ommitted) setattr( ldda.metadata, name, None ) else: setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) ) ldda.metadata.dbkey = dbkey ldda.datatype.after_edit( ldda ) trans.app.model.flush() msg = 'Attributes updated for library dataset %s' % ldda.name messagetype = 'done' else: msg = "you are not authorized to edit the attributes of dataset '%s'" % ldda.name messagetype = 'error' return trans.fill_template( "/library/ldda_edit_info.mako", ldda=ldda, library_id=library_id, datatypes=ldatatypes, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype=messagetype ) elif params.get( 'detect', False ): # The user clicked the Auto-detect button on the 'Edit Attributes' form if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda ): for name, spec in ldda.datatype.metadata_spec.items(): # We need to be careful about the attributes we are resetting if name not in [ 'name', 'info', 'dbkey' ]: if spec.get( 'default' ): setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) ) ldda.datatype.set_meta( ldda ) ldda.datatype.after_edit( ldda ) trans.app.model.flush() msg = 'Attributes updated for library dataset %s' % ldda.name messagetype = 'done' else: msg = "you are not authorized to edit the attributes of dataset '%s'" % ldda.name messagetype = 'error' return trans.fill_template( "/library/ldda_edit_info.mako", ldda=ldda, library_id=library_id, datatypes=ldatatypes, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype=messagetype ) elif params.get( 'delete', False ): if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=folder ): ldda.deleted = True ldda.flush() msg = 'Dataset %s has been removed from this library' % ldda.name messagetype = 'done' else: msg = "you are not authorized to delete dataset '%s'" % ldda.name messagetype = 'error' return trans.fill_template( "/library/ldda_edit_info.mako", ldda=ldda, library_id=library_id, datatypes=ldatatypes, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype=messagetype ) if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=ldda ): ldda.datatype.before_edit( ldda ) if "dbkey" in ldda.datatype.metadata_spec and not ldda.metadata.dbkey: # Copy dbkey into metadata, for backwards compatability # This looks like it does nothing, but getting the dbkey # returns the metadata dbkey unless it is None, in which # case it resorts to the old dbkey. Setting the dbkey # sets it properly in the metadata ldda.metadata.dbkey = ldda.dbkey return trans.fill_template( "/library/ldda_edit_info.mako", ldda=ldda, library_id=library_id, datatypes=ldatatypes, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype=messagetype ) elif ids: # Multiple ids specfied, display permission form, permissions will be updated for all simultaneously. lddas = [] for id in [ int( id ) for id in ids ]: ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( id ) if ldda is None: msg = 'You specified an invalid LibraryDatasetDatasetAssociation id: %s' %str( id ) trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) lddas.append( ldda ) if len( lddas ) < 2: msg = 'You must specify at least two datasets on which to modify permissions, ids you sent: %s' % str( ids ) trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if action == 'permissions': if params.get( 'update_roles_button', False ): if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=ldda ) and \ trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset=ldda.dataset ): permissions = {} for k, v in trans.app.model.Dataset.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles for ldda in lddas: # Set the DATASET permissions on the Dataset trans.app.security_agent.set_all_dataset_permissions( ldda.dataset, permissions ) ldda.dataset.refresh() permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles for ldda in lddas: # Set the LIBRARY permissions on the LibraryDataset # NOTE: the LibraryDataset and LibraryDatasetDatasetAssociation will be set with the same permissions trans.app.security_agent.set_all_library_permissions( ldda.library_dataset, permissions ) ldda.library_dataset.refresh() # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation trans.app.security_agent.set_all_library_permissions( ldda, permissions ) ldda.refresh() msg = 'Permissions and roles have been updated on %d datasets' % len( lddas ) messagetype = 'done' else: msg = "You are not authorized to change the permissions of dataset '%s'" % ldda.name messagetype = 'error' return trans.fill_template( "/library/ldda_permissions.mako", ldda=lddas, library_id=library_id, msg=msg, messagetype=messagetype ) if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=ldda ) and \ trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset=ldda.dataset ): # Ensure that the permissions across all library items are identical, otherwise we can't update them together. check_list = [] for ldda in lddas: permissions = [] # Check the library level permissions - the permissions on the LibraryDatasetDatasetAssociation # will always be the same as the permissions on the associated LibraryDataset, so we only need to # check one Library object for library_permission in trans.app.security_agent.get_library_dataset_permissions( ldda.library_dataset ): if library_permission.action not in permissions: permissions.append( library_permission.action ) for dataset_permission in trans.app.security_agent.get_dataset_permissions( ldda.dataset ): if dataset_permission.action not in permissions: permissions.append( dataset_permission.action ) permissions.sort() if not check_list: check_list = permissions if permissions != check_list: msg = 'The datasets you selected do not have identical permissions, so they can not be updated together' trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) else: msg = "You are not authorized to change the permissions of dataset '%s'" % ldda.name messagetype = 'error' return trans.fill_template( "/library/ldda_permissions.mako", ldda=lddas, library_id=library_id, msg=msg, messagetype=messagetype ) if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=folder ) or \ ( replace_dataset and trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=replace_dataset ) ): if params.get( 'new_dataset_button', False ): upload_option = params.get( 'upload_option', 'upload_file' ) created_ldda_ids = trans.webapp.controllers[ 'library_dataset' ].upload_dataset( trans, controller='library', library_id=library_id, folder_id=folder_id, replace_dataset=replace_dataset, **kwd ) if created_ldda_ids: ldda_id_list = created_ldda_ids.split( ',' ) total_added = len( ldda_id_list ) if replace_dataset: msg = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, replace_dataset.name, folder.name ) else: if not folder.parent: # Libraries have the same name as their root_folder msg = "Added %d datasets to the library '%s' ( each is selected ). " % ( total_added, folder.name ) else: msg = "Added %d datasets to the folder '%s' ( each is selected ). " % ( total_added, folder.name ) # Since permissions on all LibraryDatasetDatasetAssociations must be the same at this point, we only need # to check one of them to see if the current user can manage permissions on them. check_ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id_list[0] ) if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=check_ldda ): if replace_dataset: default_action = '' else: msg += "Click the Go button at the bottom of this page to edit the permissions on these datasets if necessary." default_action = 'manage_permissions' else: default_action = 'add' trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, default_action=default_action, created_ldda_ids=created_ldda_ids, msg=util.sanitize_text( msg ), messagetype='done' ) ) else: msg = "Upload failed" trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, created_ldda_ids=created_ldda_ids, msg=util.sanitize_text( msg ), messagetype='error' ) ) if not id or replace_dataset: upload_option = params.get( 'upload_option', 'upload_file' ) # No dataset(s) specified, so display the upload form. Send list of data formats to the form # so the "extension" select list can be populated dynamically file_formats = trans.app.datatypes_registry.upload_file_formats # Send list of genome builds to the form so the "dbkey" select list can be populated dynamically def get_dbkey_options( last_used_build ): for dbkey, build_name in util.dbnames: yield build_name, dbkey, ( dbkey==last_used_build ) dbkeys = get_dbkey_options( last_used_build ) # Send list of roles to the form so the dataset can be associated with 1 or more of them. roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.c.name ).all() # Send the current history to the form to enable importing datasets from history to library history = trans.get_history() history.refresh() return trans.fill_template( '/library/new_dataset.mako', upload_option=upload_option, library_id=library_id, folder_id=folder_id, replace_id=replace_id, file_formats=file_formats, dbkeys=dbkeys, last_used_build=last_used_build, roles=roles, history=history, msg=msg, messagetype=messagetype, replace_dataset=replace_dataset ) @web.expose def add_history_datasets_to_library( self, trans, library_id, folder_id, hda_ids='', **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: folder = trans.app.model.LibraryFolder.get( int( folder_id ) ) except: msg = "Invalid folder id: %s" % str( folder_id ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) replace_id = params.get( 'replace_id', None ) if replace_id: replace_dataset = trans.app.model.LibraryDataset.get( replace_id ) else: replace_dataset = None # See if the current history is empty history = trans.get_history() history.refresh() if not history.active_datasets: msg = 'Your current history is empty' return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if params.get( 'add_history_datasets_to_library_button', False ): hda_ids = util.listify( hda_ids ) if hda_ids: dataset_names = [] created_ldda_ids = '' for hda_id in hda_ids: hda = trans.app.model.HistoryDatasetAssociation.get( hda_id ) if hda: ldda = hda.to_library_dataset_dataset_association( target_folder=folder, replace_dataset=replace_dataset ) created_ldda_ids = '%s,%s' % ( created_ldda_ids, str( ldda.id ) ) dataset_names.append( ldda.name ) if not replace_dataset: # If replace_dataset is None, the Library level permissions will be taken from the folder and applied to the new # LDDA and LibraryDataset. trans.app.security_agent.copy_library_permissions( folder, ldda ) trans.app.security_agent.copy_library_permissions( folder, ldda.library_dataset ) # Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset trans.app.security_agent.copy_library_permissions( ldda.library_dataset, ldda ) else: msg = "The requested HistoryDatasetAssociation id %s is invalid" % str( hda_id ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if created_ldda_ids: created_ldda_ids = created_ldda_ids.lstrip( ',' ) ldda_id_list = created_ldda_ids.split( ',' ) total_added = len( ldda_id_list ) if replace_dataset: msg = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, replace_dataset.name, folder.name ) else: if not folder.parent: # Libraries have the same name as their root_folder msg = "Added %d datasets to the library '%s' ( each is selected ). " % ( total_added, folder.name ) else: msg = "Added %d datasets to the folder '%s' ( each is selected ). " % ( total_added, folder.name ) # Since permissions on all LibraryDatasetDatasetAssociations must be the same at this point, we only need # to check one of them to see if the current user can manage permissions on them. check_ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id_list[0] ) if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=check_ldda ): if replace_dataset: default_action = '' else: msg += "Click the Go button at the bottom of this page to edit the permissions on these datasets if necessary." default_action = 'manage_permissions' else: default_action = 'add' return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, created_ldda_ids=created_ldda_ids.lstrip( ',' ), default_action=default_action, msg=util.sanitize_text( msg ), messagetype='done' ) ) else: msg = 'Select at least one dataset from the list of active datasets in your current history' messagetype = 'error' last_used_build = folder.genome_build upload_option = params.get( 'upload_option', 'import_from_history' ) # Send list of data formats to the form so the "extension" select list can be populated dynamically file_formats = trans.app.datatypes_registry.upload_file_formats # Send list of genome builds to the form so the "dbkey" select list can be populated dynamically def get_dbkey_options( last_used_build ): for dbkey, build_name in util.dbnames: yield build_name, dbkey, ( dbkey==last_used_build ) dbkeys = get_dbkey_options( last_used_build ) # Send list of roles to the form so the dataset can be associated with 1 or more of them. roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.c.name ).all() return trans.fill_template( "/library/new_dataset.mako", upload_option=upload_option, library_id=library_id, folder_id=folder_id, replace_id=replace_id, file_formats=file_formats, dbkeys=dbkeys, last_used_build=last_used_build, roles=roles, history=history, msg=msg, messagetype=messagetype ) @web.expose def folder( self, trans, id, library_id, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) if params.get( 'new', False ): action = 'new' elif params.get( 'delete', False ): action = 'delete' elif params.get( 'permissions', False ): action = 'permissions' else: # 'information' will be the default action = 'information' folder = trans.app.model.LibraryFolder.get( int( id ) ) if not folder: msg = "Invalid folder specified, id: %s" % str( id ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if action == 'new': if params.new == 'submitted': new_folder = trans.app.model.LibraryFolder( name=util.restore_text( params.name ), description=util.restore_text( params.description ) ) # We are associating the last used genome build with folders, so we will always # initialize a new folder with the first dbkey in util.dbnames which is currently # ? unspecified (?) new_folder.genome_build = util.dbnames.default_value folder.add_folder( new_folder ) new_folder.flush() # New folders default to having the same permissions as their parent folder trans.app.security_agent.copy_library_permissions( folder, new_folder ) msg = "New folder named '%s' has been added to the library" % new_folder.name return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='done' ) ) return trans.fill_template( '/library/new_folder.mako', library_id=library_id, folder=folder, msg=msg, messagetype=messagetype ) elif action == 'information': if params.get( 'rename_folder_button', False ): if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=folder ): old_name = folder.name new_name = util.restore_text( params.name ) new_description = util.restore_text( params.description ) if not new_name: msg = 'Enter a valid name' return trans.fill_template( "/library/folder_info.mako", folder=folder, library_id=library_id, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype='error' ) else: folder.name = new_name folder.description = new_description folder.flush() msg = "Folder '%s' has been renamed to '%s'" % ( old_name, new_name ) return trans.response.send_redirect( web.url_for( controller='library', action='folder', id=id, library_id=library_id, rename=True, msg=util.sanitize_text( msg ), messagetype='done' ) ) else: msg = "You are not authorized to edit this folder" return trans.fill_template( "/library/folder_info.mako", folder=folder, library_id=library_id, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype='error' ) return trans.fill_template( '/library/folder_info.mako', folder=folder, library_id=library_id, restrict=params.get( 'restrict', True ), render_templates=params.get( 'render_templates', False ), msg=msg, messagetype=messagetype ) elif action == 'permissions': if params.get( 'update_roles_button', False ): # The user clicked the Save button on the 'Associate With Roles' form if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=folder ): permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( int( x ) ) for x in util.listify( params.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles trans.app.security_agent.set_all_library_permissions( folder, permissions ) folder.refresh() msg = 'Permissions updated for folder %s' % folder.name return trans.response.send_redirect( web.url_for( controller='library', action='folder', id=id, library_id=library_id, permissions=True, msg=util.sanitize_text( msg ), messagetype='done' ) ) else: msg = "You are not authorized to manage permissions on this folder" return trans.response.send_redirect( web.url_for( controller='library', action='folder', id=id, library_id=library_id, permissions=True, msg=util.sanitize_text( msg ), messagetype='error' ) ) return trans.fill_template( '/library/folder_permissions.mako', folder=folder, library_id=library_id, msg=msg, messagetype=messagetype ) @web.expose def info_template( self, trans, library_id, id=None, num_fields=0, folder_id=None, ldda_id=None, library_dataset_id=None, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) if not num_fields: num_fields = 0 else: num_fields = int( num_fields ) if params.get( 'new_template', False ): action = 'new_template' elif params.get( 'permissions', False ): action = 'permissions' else: action = 'edit_template' if id: library_item = trans.app.model.LibraryItemInfoTemplate.get( int( id ) ) library_item_desc = 'information template' response_action = 'info_template' response_id = id elif folder_id: library_item = trans.app.model.LibraryFolder.get( int( folder_id ) ) library_item_desc = 'folder' response_action = 'folder' response_id = folder_id elif ldda_id: library_item = trans.app.model.LibraryDatasetDatasetAssociation.get( int( ldda_id ) ) library_item_desc = 'library dataset' response_action = 'library_dataset_dataset_association' response_id = ldda_id elif library_dataset_id: library_item = trans.app.model.LibraryDataset.get( int( library_dataset_id ) ) library_item_desc = 'dataset' response_action = 'library_dataset_dataset_association' response_id = library_item.library_dataset_dataset_association.id else: library_item = trans.app.model.Library.get( int( library_id ) ) library_item_desc = 'library' response_action = 'browse_library' response_id = library_id if action == 'new_template': if params.get( 'create_info_template_button', False ): return trans.fill_template( '/library/new_info_template.mako', library_item=library_item, library_item_name=library_item.name, library_item_desc=library_item_desc, num_fields=num_fields, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, library_dataset_id=library_dataset_id, msg=msg, messagetype=messagetype ) elif params.get( 'new_info_template_button', False ): # Make sure at least 1 template field is filled in # TODO: Eventually we'll enhance templates to allow for required and optional fields. proceed = False for i in range( int( params.get( 'set_num_fields', 0 ) ) ): elem_name = params.get( 'new_element_name_%i' % i, None ) elem_description = params.get( 'new_element_description_%i' % i, None ) if elem_name or elem_description: proceed = True break if not proceed: msg = "At least 1 of the fields must be completed." return trans.fill_template( '/library/new_info_template.mako', library_item=library_item, library_item_name=library_item.name, library_item_desc=library_item_desc, num_fields=num_fields, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, library_dataset_id=library_dataset_id, msg=msg, messagetype=messagetype ) # Create template liit = trans.app.model.LibraryItemInfoTemplate() liit.name = util.restore_text( params.get( 'name', '' ) ) liit.description = util.restore_text( params.get( 'description', '' ) ) liit.flush() # Inherit the template's permissions from the library_item trans.app.security_agent.copy_library_permissions( liit, library_item ) # Create template association if folder_id: liit_assoc = trans.app.model.LibraryFolderInfoTemplateAssociation() liit_assoc.folder = trans.app.model.LibraryFolder.get( folder_id ) elif ldda_id: liit_assoc = trans.app.model.LibraryDatasetDatasetInfoTemplateAssociation() ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id ) liit_assoc.library_dataset_dataset_association = ldda # This response_action method requires a folder_id folder_id = ldda.library_dataset.folder.id elif library_dataset_id: liit_assoc = trans.app.model.LibraryDatasetInfoTemplateAssociation() library_dataset = trans.app.model.LibraryDataset.get( library_dataset_id ) liit_assoc.library_dataset = library_dataset # This response_action method requires a folder_id folder_id = library_dataset.folder.id else: # We'll always be sent a library_id liit_assoc = trans.app.model.LibraryInfoTemplateAssociation() liit_assoc.library = trans.app.model.Library.get( library_id ) liit_assoc.library_item_info_template = liit liit_assoc.flush() # Create and add elements for i in range( int( params.get( 'set_num_fields', 0 ) ) ): elem_name = params.get( 'new_element_name_%i' % i, None ) elem_description = params.get( 'new_element_description_%i' % i, None ) if elem_description and not elem_name: # If we have a description but no name, the description will be both # ( a name cannot be empty, but a description can ) elem_name = elem_description if elem_name: # Skip any elements that have a missing name liit.add_element( name=elem_name, description=elem_description ) msg = "The new information template has been created." return trans.response.send_redirect( web.url_for( controller='library', action=response_action, id=response_id, library_id=library_id, folder_id=folder_id, msg=util.sanitize_text( msg ), messagetype='done' ) ) return trans.fill_template( '/library/create_info_template.mako', library_item=library_item, library_id=library_id, msg=msg, messagetype=messagetype ) elif action == 'edit_template': define_or_save = 'define' edit_info_template_button = params.get( 'edit_info_template_button', False ) if edit_info_template_button: if edit_info_template_button == 'Define fields': define_or_save = 'save' else: define_or_save = 'define' # Save changes to existing attributes, only set name if nonempty/nonNone is passed, but always set description name = params.get( 'name', None ) if name: library_item.name = name library_item.description = params.get( 'description', '' ) library_item.flush() # Save changes to exisiting elements for elem_id in util.listify( params.get( 'element_ids', [] ) ): liit_element = trans.app.model.LibraryItemInfoTemplateElement.get( elem_id ) name = params.get( 'element_name_%s' % elem_id, None ) if name: liit_element.name = name liit_element.description = params.get( 'element_description_%s' % elem_id, None ) liit_element.flush() # Add new elements for i in range( int( params.get( 'set_num_fields', 0 ) ) ): elem_name = params.get( 'new_element_name_%i' % i, None ) elem_description = params.get( 'new_element_description_%i' % i, None ) # Skip any elements that have a missing name and description if not elem_name: # If we have a description but no name, the description will be both # ( a name cannot be empty, but a description can ) elem_name = elem_description if elem_name: library_item.add_element( name=elem_name, description=elem_description ) library_item.refresh() msg = 'Information template %s has been updated' % library_item.name return trans.fill_template( "/library/edit_info_template.mako", liit=library_item, num_fields=num_fields, library_id=library_id, library_dataset_id=library_dataset_id, ldda_id=ldda_id, folder_id=folder_id, library_item_name=library_item.name, library_item_desc=library_item_desc, define_or_save=define_or_save, msg=msg, messagetype=messagetype ) elif action == 'permissions': if params.get( 'update_roles_button', False ): # The user clicked the Save button on the 'Associate With Roles' form permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles trans.app.security_agent.set_all_library_permissions( library_item, permissions ) library_item.refresh() msg = "Permissions updated for information template '%s'" % library_item.name return trans.response.send_redirect( web.url_for( controller='library', action='info_template', library_id=library_id, id=id, permissions=True, msg=util.sanitize_text( msg ), messagetype='done' ) ) return trans.fill_template( '/library/info_template_permissions.mako', liit=library_item, library_id=library_id, msg=msg, messagetype=messagetype ) @web.expose def library_item_info( self, trans, library_id, id=None, library_item_id=None, library_item_type=None, **kwd ): params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) folder_id = None if library_item_type == 'library': library_item = trans.app.model.Library.get( library_item_id ) elif library_item_type == 'library_dataset': library_item = trans.app.model.LibraryDataset.get( library_item_id ) elif library_item_type == 'folder': library_item = trans.app.model.LibraryFolder.get( library_item_id ) elif library_item_type == 'library_dataset_dataset_association': library_item = trans.app.model.LibraryDatasetDatasetAssociation.get( library_item_id ) # This response_action method requires a folder_id folder_id = library_item.library_dataset.folder.id elif library_item_type == 'library_item_info_elememt': library_item = trans.app.model.LibraryItemInfoElement.get( library_item_id ) else: msg = "Invalid library item type ( %s ) specified, id ( %s )" % ( str( library_item_type ), str( library_item_id ) ) return trans.response.send_redirect( web.url_for( controller='library', action='browse_library', id=library_id, msg=util.sanitize_text( msg ), messagetype='error' ) ) if params.get( 'new_info', False ): if library_item: if params.get( 'create_new_info_button', False ): library_item_info_template_id = params.get( 'library_item_info_template_id', None ) library_item_info_template = trans.app.model.LibraryItemInfoTemplate.get( int( library_item_info_template_id ) ) # Make sure at least 1 template field is filled in # TODO: Eventually we'll enhance templates to allow for required and optional fields. proceed = False for template_element in library_item_info_template.elements: if params.get( "info_element_%s_%s" % ( library_item_info_template.id, template_element.id ), None ): proceed = True break if not proceed: msg = "At least 1 of the fields must be completed." return trans.response.send_redirect( web.url_for( controller='admin', action=library_item_type, id=library_item.id, library_id=library_id, folder_id=folder_id, edit_info=True, msg=util.sanitize_text( msg ), messagetype='error' ) ) user = trans.get_user() library_item_info = trans.app.model.LibraryItemInfo( user=user ) library_item_info.library_item_info_template = library_item_info_template library_item_info.flush() trans.app.security_agent.copy_library_permissions( library_item_info_template, library_item_info ) for template_element in library_item_info_template.elements: info_element_value = params.get( "info_element_%s_%s" % ( library_item_info_template.id, template_element.id ), None ) info_element = trans.app.model.LibraryItemInfoElement() info_element.contents = info_element_value info_element.library_item_info_template_element = template_element info_element.library_item_info = library_item_info info_element.flush() info_association_class = None for item_class, permission_class, info_association_class in trans.app.security_agent.library_item_assocs: if isinstance( library_item, item_class ): break if info_association_class: library_item_info_association = info_association_class( user=user ) library_item_info_association.set_library_item( library_item ) library_item_info_association.library_item_info = library_item_info library_item_info_association.flush() else: raise 'Invalid class (%s) specified for library_item (%s)' % ( library_item.__class__, library_item.__class__.__name__ ) msg = 'The information has been saved.' return trans.response.send_redirect( web.url_for( controller='library', action=library_item_type, id=library_item.id, library_id=library_id, folder_id=folder_id, edit_info=True, msg=util.sanitize_text( msg ), messagetype='done' ) ) return trans.fill_template( "/library/new_info.mako", library_id=library_id, library_item=library_item, library_item_type=library_item_type, msg=msg, messagetype=messagetype ) elif params.get( 'edit_info', False ): if params.get( 'edit_info_button', False ): ids = util.listify( id ) for id in ids: library_item_info_element = trans.app.model.LibraryItemInfoElement.get( int( id ) ) new_contents = util.restore_text( params.get( ( 'info_element_%s' % id ), '' ) ) library_item_info_element.contents = new_contents library_item_info_element.flush() msg = 'The information has been updated.' return trans.response.send_redirect( web.url_for( controller='library', action=library_item_type, id=library_item.id, library_id=library_id, folder_id=folder_id, edit_info=True, msg=util.sanitize_text( msg ), messagetype='done' ) ) elif params.get( 'permissions', False ): if params.get( 'update_roles_button', False ): permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles trans.app.security_agent.set_all_library_permissions( library_item.library_item_info, permissions ) library_item.library_item_info.refresh() library_item.refresh() msg = "Permissions updated for field '%s'" % library_item.library_item_info_template_element.name return trans.response.send_redirect( web.url_for( controller='library', action='library_item_info', library_id=library_id, id=id, library_item_id=library_item_id, library_item_type=library_item_type, permissions=True, msg=util.sanitize_text( msg ), messagetype='done' ) ) return trans.fill_template( '/library/info_permissions.mako', library_item_info_element=library_item, library_id=library_id, msg=msg, messagetype=messagetype )
{ "content_hash": "c7150795cf8c786c45a0156d7eb1523b", "timestamp": "", "source": "github", "line_count": 1365, "max_line_length": 158, "avg_line_length": 68.52673992673992, "alnum_prop": 0.44183709468777727, "repo_name": "dbcls/dbcls-galaxy", "id": "95d114444378ab236bedc89f8589d1e9391dfb49", "size": "93539", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/galaxy/web/controllers/library.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "646729" }, { "name": "Perl", "bytes": "40970" }, { "name": "Python", "bytes": "2646651" }, { "name": "Ruby", "bytes": "145028" }, { "name": "Shell", "bytes": "21457" } ], "symlink_target": "" }
from .kb_helper import * from .state_tracker import * from .dialog_manager import * from .dict_reader import * from .utils import *
{ "content_hash": "fece31bd0b0ab922da90ef2310f2c924", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 29, "avg_line_length": 27, "alnum_prop": 0.725925925925926, "repo_name": "MiuLab/TC-Bot", "id": "fecba3bb857cdfb105a8f3e16319b9e30a423bd2", "size": "135", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/deep_dialog/dialog_system/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "OpenEdge ABL", "bytes": "34853336" }, { "name": "Python", "bytes": "180734" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('deal', '0004_remove_deal_features_benefits'), ] operations = [ migrations.AddField( model_name='deal', name='features_benefits', field=models.TextField(default=b' ', max_length=500), preserve_default=True, ), ]
{ "content_hash": "c29410e058db7fcd4677b1040d5cab4e", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 65, "avg_line_length": 23.31578947368421, "alnum_prop": 0.5959367945823928, "repo_name": "raytung/Slice", "id": "09a1145b34ff9a24fa577b26fadea34b7415be79", "size": "467", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "deal/migrations/0005_deal_features_benefits.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "317788" }, { "name": "JavaScript", "bytes": "67473" }, { "name": "Perl", "bytes": "1404" }, { "name": "Python", "bytes": "165203" } ], "symlink_target": "" }
import argparse import os import subprocess import sys ANDROID_LOG_CLASS = 'android.util.Log' FLUTTER_LOG_CLASS = 'io.flutter.Log' def main(): parser = argparse.ArgumentParser(description='Checks Flutter Android library for forbidden imports') parser.add_argument('--stamp', type=str, required=True) parser.add_argument('--files', type=str, required=True, nargs='+') args = parser.parse_args() open(args.stamp, 'wa').close() bad_files = [] for file in args.files: if file.endswith(os.path.join('io', 'flutter', 'Log.java')): continue with open(file) as f: if ANDROID_LOG_CLASS in f.read(): bad_files.append(file) if bad_files: print('') print('Illegal import %s detected in the following files:' % ANDROID_LOG_CLASS) for bad_file in bad_files: print(' - ' + bad_file) print('Use %s instead.' % FLUTTER_LOG_CLASS) print('') return 1 return 0 if __name__ == '__main__': sys.exit(main())
{ "content_hash": "5296fc21936eae7c1a4d907c58315c90", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 102, "avg_line_length": 24.94871794871795, "alnum_prop": 0.6485097636176773, "repo_name": "chinmaygarde/sky_engine", "id": "2a2d5ac1fbf1f7b85da7af852c1aae3ad00faf02", "size": "1157", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tools/android_illegal_imports.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "79" }, { "name": "C", "bytes": "272156" }, { "name": "C++", "bytes": "11061165" }, { "name": "Dart", "bytes": "548670" }, { "name": "Groff", "bytes": "26536" }, { "name": "Java", "bytes": "446530" }, { "name": "JavaScript", "bytes": "6995" }, { "name": "Makefile", "bytes": "402" }, { "name": "Objective-C", "bytes": "27508" }, { "name": "Objective-C++", "bytes": "240243" }, { "name": "Python", "bytes": "2075400" }, { "name": "Shell", "bytes": "186422" } ], "symlink_target": "" }
import shutil import tarfile from unittest import TestCase from ddsc.core.localstore import LocalFile, LocalFolder, LocalProject, KindType, LocalItemsCounter, ItemsToSendCounter from mock import patch, Mock INCLUDE_ALL = '' def get_file_or_folder_paths(item, prefix=""): results = [] item_path = "" if item.kind != KindType.project_str: item_path = "{}{}".format(prefix, item.name) results.append(item_path) if item.kind != KindType.file_str: for child in item.children: results.extend(get_file_or_folder_paths(child, item_path + "/")) return sorted(results) class TestProjectFolderFile(TestCase): def test_file_str(self): f = LocalFile('setup.py') self.assertEqual(get_file_or_folder_paths(f), ['setup.py']) def test_empty_folder_str(self): f = LocalFolder('stuff') self.assertEqual(get_file_or_folder_paths(f), ['stuff']) def test_folder_one_child_str(self): folder = LocalFolder('stuff') folder.add_child(LocalFile('setup.py')) self.assertEqual(get_file_or_folder_paths(folder), ['stuff', 'stuff/setup.py']) def test_folder_two_children_str(self): folder = LocalFolder('stuff') folder.add_child(LocalFile('setup.py')) folder.add_child(LocalFile('requirements.txt')) self.assertEqual(get_file_or_folder_paths(folder), [ 'stuff', 'stuff/requirements.txt', 'stuff/setup.py', ]) def test_nested_folder_str(self): grand = LocalFolder('grand') parent = LocalFolder('parent') parent.add_child(LocalFile('setup.py')) parent.add_child(LocalFile('requirements.txt')) otherparent = LocalFolder('otherparent') grand.add_child(parent) grand.add_child(otherparent) self.assertEqual(get_file_or_folder_paths(grand), [ 'grand', 'grand/otherparent', 'grand/parent', 'grand/parent/requirements.txt', 'grand/parent/setup.py', ]) class TestProjectContent(TestCase): """ These tests exercise code that interacts with the file system. We extract ddsc/core/tests/testfolder.tar to temp and tests are specific to that tar file. """ @classmethod def setUpClass(cls): test_folder = tarfile.TarFile('ddsc/core/tests/testfolder.tar') test_folder.extractall('/tmp') test_folder.close() @classmethod def tearDownClass(cls): shutil.rmtree('/tmp/DukeDsClientTestFolder') def test_folder_dot_name(self): content = LocalFolder('.') self.assertEqual('DukeDSClient', content.name) def test_folder_name_removes_slash(self): content = LocalFolder('/tmp/DukeDsClientTestFolder/') self.assertEqual('DukeDsClientTestFolder', content.name) self.assertEqual('/tmp/DukeDsClientTestFolder', content.path) def test_folder_name_no_slash(self): content = LocalFolder('/tmp/DukeDsClientTestFolder') self.assertEqual('DukeDsClientTestFolder', content.name) self.assertEqual('/tmp/DukeDsClientTestFolder', content.path) def test_folder_up_and_back(self): content = LocalFolder('../DukeDSClient') self.assertEqual('DukeDSClient', content.name) def test_empty_str(self): content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) self.assertEqual(get_file_or_folder_paths(content), []) def test_top_level_file_str(self): content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder/note.txt') self.assertEqual(get_file_or_folder_paths(content), [ '/note.txt' ]) @patch('ddsc.core.localstore.isfile') @patch('ddsc.core.localstore.print') def test_top_level_non_regular_file(self, mock_print, mock_isfile): mock_isfile.return_value = False content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder/note.txt') self.assertEqual(get_file_or_folder_paths(content), []) mock_print.assert_called_with('Warning: Skipping /tmp/DukeDsClientTestFolder/note.txt. ' 'This is an unsupported type of file.') def test_empty_folder_str(self): content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder/emptyfolder') self.assertEqual(get_file_or_folder_paths(content), [ '/emptyfolder' ]) def test_empty_folder_and_file_str(self): content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder/emptyfolder') content.add_path('/tmp/DukeDsClientTestFolder/note.txt') self.assertEqual(get_file_or_folder_paths(content), [ '/emptyfolder', '/note.txt' ]) def test_one_folder_str(self): content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder/scripts') self.assertEqual(get_file_or_folder_paths(content), [ '/scripts', '/scripts/makemoney.sh', ]) @patch('ddsc.core.localstore.isfile') @patch('ddsc.core.localstore.print') def test_one_folder_containing_non_regular_file(self, mock_print, mock_isfile): mock_isfile.return_value = False content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder/scripts') self.assertEqual(get_file_or_folder_paths(content), [ '/scripts' ]) mock_print.assert_called_with('Warning: Skipping /tmp/DukeDsClientTestFolder/scripts/makemoney.sh. ' 'This is an unsupported type of file.') def test_nested_folder_str(self): content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder/results') self.assertEqual(get_file_or_folder_paths(content), [ '/results', '/results/result1929.txt', '/results/result2929.txt', '/results/subresults', '/results/subresults/result1002.txt', '/results/subresults/result13.txt', '/results/subresults/result15.txt', '/results/subresults2', ]) def test_big_folder_str(self): content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder') child_names = [child.name for child in content.children[0].children] self.assertEqual(set(['note.txt', 'emptyfolder', 'results', 'scripts']), set(child_names)) self.assertEqual(get_file_or_folder_paths(content), [ '/DukeDsClientTestFolder', '/DukeDsClientTestFolder/emptyfolder', '/DukeDsClientTestFolder/note.txt', '/DukeDsClientTestFolder/results', '/DukeDsClientTestFolder/results/result1929.txt', '/DukeDsClientTestFolder/results/result2929.txt', '/DukeDsClientTestFolder/results/subresults', '/DukeDsClientTestFolder/results/subresults/result1002.txt', '/DukeDsClientTestFolder/results/subresults/result13.txt', '/DukeDsClientTestFolder/results/subresults/result15.txt', '/DukeDsClientTestFolder/results/subresults2', '/DukeDsClientTestFolder/scripts', '/DukeDsClientTestFolder/scripts/makemoney.sh' ]) def test_include_dot_files(self): content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('test_scripts') self.assertIn('/test_scripts/.hidden_file', get_file_or_folder_paths(content)) def test_exclude_dot_files(self): content = LocalProject(False, file_exclude_regex='^\.') content.add_path('test_scripts') self.assertNotIn('/test_scripts/.hidden_file', get_file_or_folder_paths(content)) def test_ignore_one_dir(self): with open("/tmp/DukeDsClientTestFolder/.ddsignore", "w") as text_file: text_file.write("emptyfolder") content = LocalProject(False, file_exclude_regex='^\.') content.add_path('test_scripts') self.assertNotIn('.hidden_file', str(content)) content = LocalProject(False, file_exclude_regex=INCLUDE_ALL) content.add_path('/tmp/DukeDsClientTestFolder') child_names = [child.name for child in content.children[0].children] self.assertEqual(set(['.ddsignore', 'note.txt', 'results', 'scripts']), set(child_names)) class TestLocalFile(TestCase): @patch('ddsc.core.localstore.os') @patch('ddsc.core.localstore.PathData') def test_count_chunks_values(self, mock_path_data, mock_os): values = [ # file_size, bytes_per_chunk, expected (200, 10, 20), (200, 150, 2), (3, 150, 1), (0, 10, 1), # Empty files must send 1 empty chunk to DukeDS ] f = LocalFile('fakefile.txt') for file_size, bytes_per_chunk, expected in values: f.size = file_size self.assertEqual(expected, f.count_chunks(bytes_per_chunk)) @patch('ddsc.core.localstore.os') @patch('ddsc.core.localstore.PathData') def test_set_remote_values_after_send(self, mock_path_data, mock_os): f = LocalFile('fakefile.txt') self.assertEqual(f.remote_id, '') self.assertEqual(f.remote_file_hash_alg, None) self.assertEqual(f.remote_file_hash, None) self.assertEqual(f.sent_to_remote, False) f.set_remote_values_after_send( remote_id='abc123', remote_hash_alg='md5', remote_file_hash='defjkl' ) self.assertEqual(f.remote_id, 'abc123') self.assertEqual(f.remote_file_hash_alg, 'md5') self.assertEqual(f.remote_file_hash, 'defjkl') self.assertEqual(f.sent_to_remote, True) class TestLocalItemsCounter(TestCase): def test_to_str(self): local_project = Mock() local_project.children = [ Mock(kind='dds-file'), Mock(kind='dds-folder', children=[ Mock(kind='dds-file'), ]), ] counter = LocalItemsCounter(local_project) self.assertEqual(counter.to_str(prefix="Checking"), 'Checking 2 files and 1 folder.') class TestItemsToSendCounter(TestCase): def test_to_str(self): local_project = Mock(kind='dds-project') mock_file1 = Mock(kind='dds-file') mock_file1.count_chunks.return_value = 10 mock_file2 = Mock(kind='dds-file') mock_file2.count_chunks.return_value = 10 local_project.children = [ mock_file1, Mock(kind='dds-folder', children=[ mock_file2, ]), ] counter = ItemsToSendCounter(local_project, bytes_per_chunk=100) counter_str = counter.to_str(prefix="Synchronizing", local_items_count=Mock(files=3, folders=3)) self.assertEqual(counter_str, 'Synchronizing 1 new file, 2 existing files, 2 new folders and 1 existing folder.')
{ "content_hash": "b8e7806559ddabd7221eec6bc16d8a22", "timestamp": "", "source": "github", "line_count": 276, "max_line_length": 118, "avg_line_length": 40.93840579710145, "alnum_prop": 0.6327108593680857, "repo_name": "Duke-GCB/DukeDSClient", "id": "e6de966a93e997a0fe3ee0bfc02ceff64fb09f62", "size": "11299", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ddsc/core/tests/test_localstore.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "787619" }, { "name": "Shell", "bytes": "4472" } ], "symlink_target": "" }
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: avi_alertsyslogconfig author: Gaurav Rastogi (grastogi@avinetworks.com) short_description: Module for setup of AlertSyslogConfig Avi RESTful Object description: - This module is used to configure AlertSyslogConfig object - more examples at U(https://github.com/avinetworks/devops) requirements: [ avisdk ] version_added: "2.4" options: state: description: - The state that should be applied on the entity. default: present choices: ["absent","present"] description: description: - User defined description for alert syslog config. name: description: - A user-friendly name of the syslog notification. required: true syslog_servers: description: - The list of syslog servers. tenant_ref: description: - It is a reference to an object of type tenant. url: description: - Avi controller URL of the object. uuid: description: - Unique object identifier of the object. extends_documentation_fragment: - avi ''' EXAMPLES = ''' - name: Create Alert Syslog object to forward all events to external syslog server avi_alertsyslogconfig: controller: '' name: Roberts-syslog password: '' syslog_servers: - syslog_server: 10.10.0.100 syslog_server_port: 514 udp: true tenant_ref: admin username: '' ''' RETURN = ''' obj: description: AlertSyslogConfig (api/alertsyslogconfig) object returned: success, changed type: dict ''' from ansible.module_utils.basic import AnsibleModule try: from ansible.module_utils.avi import ( avi_common_argument_spec, HAS_AVI, avi_ansible_api) except ImportError: HAS_AVI = False def main(): argument_specs = dict( state=dict(default='present', choices=['absent', 'present']), description=dict(type='str',), name=dict(type='str', required=True), syslog_servers=dict(type='list',), tenant_ref=dict(type='str',), url=dict(type='str',), uuid=dict(type='str',), ) argument_specs.update(avi_common_argument_spec()) module = AnsibleModule( argument_spec=argument_specs, supports_check_mode=True) if not HAS_AVI: return module.fail_json(msg=( 'Avi python API SDK (avisdk>=17.1) is not installed. ' 'For more details visit https://github.com/avinetworks/sdk.')) return avi_ansible_api(module, 'alertsyslogconfig', set([])) if __name__ == '__main__': main()
{ "content_hash": "aafebf258dc48775d7c387c890667d3c", "timestamp": "", "source": "github", "line_count": 96, "max_line_length": 84, "avg_line_length": 29.28125, "alnum_prop": 0.6125933831376734, "repo_name": "e-gob/plataforma-kioscos-autoatencion", "id": "584feb518cca72711a03b4f179d77e31ea43df1e", "size": "3648", "binary": false, "copies": "27", "ref": "refs/heads/master", "path": "scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/network/avi/avi_alertsyslogconfig.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "41110" }, { "name": "C++", "bytes": "3804" }, { "name": "CSS", "bytes": "34823" }, { "name": "CoffeeScript", "bytes": "8521" }, { "name": "HTML", "bytes": "61168" }, { "name": "JavaScript", "bytes": "7206" }, { "name": "Makefile", "bytes": "1347" }, { "name": "PowerShell", "bytes": "584344" }, { "name": "Python", "bytes": "25506593" }, { "name": "Ruby", "bytes": "245726" }, { "name": "Shell", "bytes": "5075" } ], "symlink_target": "" }
import unittest from ctypes import * formats = "bBhHiIlLqQfd" formats = c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, \ c_long, c_ulonglong, c_float, c_double, c_longdouble class ArrayTestCase(unittest.TestCase): def test_simple(self): # create classes holding simple numeric types, and check # various properties. init = range(15, 25) for fmt in formats: alen = len(init) int_array = ARRAY(fmt, alen) ia = int_array(*init) # length of instance ok? self.assertEqual(len(ia), alen) # slot values ok? values = [ia[i] for i in range(len(init))] self.assertEqual(values, init) # change the items from operator import setitem new_values = range(42, 42+alen) [setitem(ia, n, new_values[n]) for n in range(alen)] values = [ia[i] for i in range(len(init))] self.assertEqual(values, new_values) # are the items initialized to 0? ia = int_array() values = [ia[i] for i in range(len(init))] self.assertEqual(values, [0] * len(init)) # Too many initializers should be caught self.assertRaises(IndexError, int_array, *range(alen*2)) CharArray = ARRAY(c_char, 3) ca = CharArray("a", "b", "c") # Should this work? It doesn't: # CharArray("abc") self.assertRaises(TypeError, CharArray, "abc") self.assertEqual(ca[0], "a") self.assertEqual(ca[1], "b") self.assertEqual(ca[2], "c") self.assertEqual(ca[-3], "a") self.assertEqual(ca[-2], "b") self.assertEqual(ca[-1], "c") self.assertEqual(len(ca), 3) # slicing is now supported, but not extended slicing (3-argument)! from operator import getslice, delitem self.assertRaises(TypeError, getslice, ca, 0, 1, -1) # cannot delete items self.assertRaises(TypeError, delitem, ca, 0) def test_numeric_arrays(self): alen = 5 numarray = ARRAY(c_int, alen) na = numarray() values = [na[i] for i in range(alen)] self.assertEqual(values, [0] * alen) na = numarray(*[c_int()] * alen) values = [na[i] for i in range(alen)] self.assertEqual(values, [0]*alen) na = numarray(1, 2, 3, 4, 5) values = [i for i in na] self.assertEqual(values, [1, 2, 3, 4, 5]) na = numarray(*map(c_int, (1, 2, 3, 4, 5))) values = [i for i in na] self.assertEqual(values, [1, 2, 3, 4, 5]) def test_classcache(self): self.assertTrue(not ARRAY(c_int, 3) is ARRAY(c_int, 4)) self.assertTrue(ARRAY(c_int, 3) is ARRAY(c_int, 3)) def test_from_address(self): # Failed with 0.9.8, reported by JUrner p = create_string_buffer("foo") sz = (c_char * 3).from_address(addressof(p)) self.assertEqual(sz[:], "foo") self.assertEqual(sz[::], "foo") self.assertEqual(sz[::-1], "oof") self.assertEqual(sz[::3], "f") self.assertEqual(sz[1:4:2], "o") self.assertEqual(sz.value, "foo") try: create_unicode_buffer except NameError: pass else: def test_from_addressW(self): p = create_unicode_buffer("foo") sz = (c_wchar * 3).from_address(addressof(p)) self.assertEqual(sz[:], "foo") self.assertEqual(sz[::], "foo") self.assertEqual(sz[::-1], "oof") self.assertEqual(sz[::3], "f") self.assertEqual(sz[1:4:2], "o") self.assertEqual(sz.value, "foo") def test_cache(self): # Array types are cached internally in the _ctypes extension, # in a WeakValueDictionary. Make sure the array type is # removed from the cache when the itemtype goes away. This # test will not fail, but will show a leak in the testsuite. # Create a new type: class my_int(c_int): pass # Create a new array type based on it: t1 = my_int * 1 t2 = my_int * 1 self.assertTrue(t1 is t2) if __name__ == '__main__': unittest.main()
{ "content_hash": "093aeda45da7bb608904f373fd5d71b7", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 74, "avg_line_length": 32.85820895522388, "alnum_prop": 0.531910061321826, "repo_name": "ktan2020/legacy-automation", "id": "11c44c7c4b71277486e40cd1790d44b511563ff3", "size": "4403", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "win/Lib/ctypes/test/test_arrays.py", "mode": "33261", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "913" }, { "name": "Ada", "bytes": "289" }, { "name": "Assembly", "bytes": "687" }, { "name": "Boo", "bytes": "540" }, { "name": "C", "bytes": "40116" }, { "name": "C#", "bytes": "474" }, { "name": "C++", "bytes": "393" }, { "name": "CSS", "bytes": "70883" }, { "name": "ColdFusion", "bytes": "1012" }, { "name": "Common Lisp", "bytes": "1034" }, { "name": "D", "bytes": "1858" }, { "name": "Eiffel", "bytes": "426" }, { "name": "Erlang", "bytes": "9243" }, { "name": "FORTRAN", "bytes": "1810" }, { "name": "Forth", "bytes": "182" }, { "name": "Groovy", "bytes": "2366" }, { "name": "Haskell", "bytes": "816" }, { "name": "Haxe", "bytes": "455" }, { "name": "Java", "bytes": "1155" }, { "name": "JavaScript", "bytes": "69444" }, { "name": "Lua", "bytes": "795" }, { "name": "Matlab", "bytes": "1278" }, { "name": "OCaml", "bytes": "350" }, { "name": "Objective-C++", "bytes": "885" }, { "name": "PHP", "bytes": "1411" }, { "name": "Pascal", "bytes": "388" }, { "name": "Perl", "bytes": "252651" }, { "name": "Pike", "bytes": "589" }, { "name": "Python", "bytes": "42085780" }, { "name": "R", "bytes": "1156" }, { "name": "Ruby", "bytes": "480" }, { "name": "Scheme", "bytes": "282" }, { "name": "Shell", "bytes": "30518" }, { "name": "Smalltalk", "bytes": "926" }, { "name": "Squirrel", "bytes": "697" }, { "name": "Stata", "bytes": "302" }, { "name": "SystemVerilog", "bytes": "3145" }, { "name": "Tcl", "bytes": "1039" }, { "name": "TeX", "bytes": "1746" }, { "name": "VHDL", "bytes": "985" }, { "name": "Vala", "bytes": "664" }, { "name": "Verilog", "bytes": "439" }, { "name": "Visual Basic", "bytes": "2142" }, { "name": "XSLT", "bytes": "152770" }, { "name": "ooc", "bytes": "890" }, { "name": "xBase", "bytes": "769" } ], "symlink_target": "" }
import _plotly_utils.basevalidators class ColorValidator(_plotly_utils.basevalidators.ColorValidator): def __init__( self, plotly_name="color", parent_name="histogram.legendgrouptitle.font", **kwargs, ): super(ColorValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "style"), **kwargs, )
{ "content_hash": "0f452951ea553c2318548b2b8d270c88", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 66, "avg_line_length": 28.375, "alnum_prop": 0.579295154185022, "repo_name": "plotly/plotly.py", "id": "e7081da9a71891debfca6f9eea0d048155926278", "size": "454", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/python/plotly/plotly/validators/histogram/legendgrouptitle/font/_color.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "545" }, { "name": "JavaScript", "bytes": "2074" }, { "name": "PostScript", "bytes": "565328" }, { "name": "Python", "bytes": "31506317" }, { "name": "TypeScript", "bytes": "71337" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.shortcuts import redirect from django.utils.http import urlencode from django.utils.six import string_types from django_core.utils.loading import get_setting try: # python 3 from urllib.parse import urlparse from urllib.parse import parse_qsl except ImportError: from urlparse import urlparse from urlparse import parse_qsl def is_legit_next_url(next_url): if not next_url: return False is_fully_qualified_url = (next_url.startswith('http') or next_url.startswith('//')) site_domain = get_setting('SITE_DOMAIN', default=None) parsed_url = urlparse(next_url) try: portless_netloc = parsed_url.netloc.split(':')[0] except: portless_netloc = None if is_fully_qualified_url: # domain validaition required if not site_domain: # can't validate without a SITE_DOMAIN setting return False if portless_netloc and portless_netloc.endswith(site_domain): # trusted domain return True return False if next_url.startswith('/'): # url relative to the current site return True return False def safe_redirect(next_url, default=None): """Makes sure it's a legit site to redirect to. :param default: this is the default url or named url to redirect to in the event where next_url is not legit. """ if is_legit_next_url(next_url): return redirect(next_url) if default: return redirect(default) return redirect('/') def build_url(url, querystring_params=None): """Builds a url string with properly encoded queryparams. :params url: the primary url with no querystring params. I.E. http://somesite.com/path/to/page :param querystring_params: dict of querystring key value pairs. """ if not querystring_params: return url return '{0}?{1}'.format(url, urlencode(querystring_params)) def replace_url_query_values(url, replace_vals): """Replace querystring values in a url string. >>> url = 'http://helloworld.com/some/path?test=5' >>> replace_vals = {'test': 10} >>> replace_url_query_values(url=url, replace_vals=replace_vals) 'http://helloworld.com/some/path?test=10' """ if '?' not in url: return url parsed_url = urlparse(url) query = dict(parse_qsl(parsed_url.query)) query.update(replace_vals) return '{0}?{1}'.format(url.split('?')[0], urlencode(query)) def get_query_values_from_url(url, keys=None): """Gets query string values from a url. if a list of keys are provided, then a dict will be returned. If only a single string key is provided, then only a single value will be returned. >>> url = 'http://helloworld.com/some/path?test=5&hello=world&john=doe' >>> get_query_values_from_url(url=url, keys='test') "5" >>> get_query_values_from_url(url=url, keys=['test']) {'test': '5'} >>> get_query_values_from_url(url=url, keys=['test', 'john']) {'test': '5', 'john': 'doe'} >>> get_query_values_from_url(url=url, keys=['test', 'john', 'blah']) {'test': '5', 'john': 'doe', 'blah': None} """ if not url or '?' not in url: # no query params return None parsed_url = urlparse(url) query = dict(parse_qsl(parsed_url.query)) if keys is None: return query if isinstance(keys, string_types): return query.get(keys) return {k: query.get(k) for k in keys}
{ "content_hash": "092dfecbe12848997c91650438166400", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 78, "avg_line_length": 27.9140625, "alnum_prop": 0.6328015673103834, "repo_name": "InfoAgeTech/django-core", "id": "4dc625edbc908a61e8fd80727ddd08fcec2bc9eb", "size": "3573", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "django_core/utils/urls.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "452" }, { "name": "Python", "bytes": "180676" } ], "symlink_target": "" }
""" Write the pseudocode for a function which returns the highest perfect square which is less or equal to its parameter (a positive integer). Implement this in a programming language of your choice. """ def power(number): x = 1 """ check if every square number going up from 1 fits. """ while x * x < number: x = x + 1 """ when the square number goes over the limit roll back the result by 1. """ if x * x > number: x = x - 1 return x """ number to be checked <- input number squared <- 1 while squared * squared < number to be checked squared <- squared + 1 if squared * squared > number to be checked squared <- squared - 1 output squared O(N) complexity """
{ "content_hash": "cce84c1738449daa765bcbdc164cf772", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 203, "avg_line_length": 29.84, "alnum_prop": 0.6260053619302949, "repo_name": "megasan/210-CT", "id": "c818f8692158f994102b0e27d94e25e4d344e460", "size": "746", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "coursework 3.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "13196" } ], "symlink_target": "" }
"""Camera image classification demo code. Runs continuous image classification on camera frames and prints detected object classes. Example: image_classification_camera.py --num_frames 10 """ import argparse import contextlib from aiy.vision.inference import CameraInference from aiy.vision.models import image_classification from picamera import PiCamera def classes_info(classes): return ', '.join('%s (%.2f)' % pair for pair in classes) @contextlib.contextmanager def CameraPreview(camera, enabled): if enabled: camera.start_preview() try: yield finally: if enabled: camera.stop_preview() def main(): parser = argparse.ArgumentParser('Image classification camera inference example.') parser.add_argument('--num_frames', '-n', type=int, default=None, help='Sets the number of frames to run for, otherwise runs forever.') parser.add_argument('--num_objects', '-c', type=int, default=3, help='Sets the number of object interences to print.') parser.add_argument('--nopreview', dest='preview', action='store_false', default=True, help='Enable camera preview') args = parser.parse_args() with PiCamera(sensor_mode=4, framerate=30) as camera, \ CameraPreview(camera, enabled=args.preview), \ CameraInference(image_classification.model()) as inference: for result in inference.run(args.num_frames): classes = image_classification.get_classes(result, top_k=args.num_objects) print(classes_info(classes)) if classes: camera.annotate_text = '%s (%.2f)' % classes[0] if __name__ == '__main__': main()
{ "content_hash": "3824173e3a7370a022dc7583927be908", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 90, "avg_line_length": 34.3469387755102, "alnum_prop": 0.6761734997029115, "repo_name": "google/aiyprojects-raspbian", "id": "f70548590f17b2348d1c6961c358ea744b865263", "size": "2283", "binary": false, "copies": "1", "ref": "refs/heads/aiyprojects", "path": "src/examples/vision/image_classification_camera.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "333768" }, { "name": "HTML", "bytes": "468" }, { "name": "JavaScript", "bytes": "31073" }, { "name": "Jupyter Notebook", "bytes": "19786" }, { "name": "Makefile", "bytes": "10938" }, { "name": "Python", "bytes": "400565" }, { "name": "Shell", "bytes": "7540" } ], "symlink_target": "" }
from django import forms from django.forms.formsets import all_valid from django.core.urlresolvers import reverse from django.contrib.admin.exceptions import DisallowedModelAdminToField from django.contrib.admin import widgets, helpers from django.contrib.admin.utils import ( unquote, flatten_fieldsets, get_deleted_objects, ) from django.contrib.admin.options import ( TO_FIELD_VAR, IS_POPUP_VAR, get_ul_class, csrf_protect_m, ) from django.utils import six from django.utils.html import escape from django.core.exceptions import PermissionDenied try: from django.db.models.related import RelatedObject except ImportError: from django.db.models.fields.related import ForeignObjectRel as RelatedObject # noqa from django.http import Http404 from django.template.response import TemplateResponse from django.utils.functional import curry from django.utils.text import capfirst from django.utils.translation import ugettext as _ from django.forms.forms import pretty_name from django.conf import settings from django.apps import apps from django_mongoengine.utils import force_text from django_mongoengine.fields import (ListField, EmbeddedDocumentField, ReferenceField, StringField) from django_mongoengine.mongo_admin.util import RelationWrapper from django_mongoengine.utils.wrappers import copy_class from django_mongoengine.utils.monkey import get_patched_django_module from django_mongoengine.forms.documents import ( DocumentForm, inlineformset_factory, BaseInlineDocumentFormSet) def get_content_type_for_model(obj): return apps.get_model("contenttypes.ContentType")() djmod = get_patched_django_module( "django.contrib.admin.options", get_content_type_for_model=get_content_type_for_model, ) class BaseDocumentAdmin(djmod.BaseModelAdmin): """Functionality common to both ModelAdmin and InlineAdmin.""" form = DocumentForm def formfield_for_dbfield(self, db_field, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ request = kwargs.pop("request", None) # If the field specifies choices, we don't need to look for special # admin widgets - we just need to use a select widget of some kind. if db_field.choices is not None: return self.formfield_for_choice_field(db_field, request, **kwargs) if isinstance(db_field, ListField) and isinstance(db_field.field, ReferenceField): return self.formfield_for_manytomany(db_field, request, **kwargs) # handle RelatedFields if isinstance(db_field, ReferenceField): # For non-raw_id fields, wrap the widget with a wrapper that adds # extra HTML -- the "add other" interface -- to the end of the # rendered output. formfield can be None if it came from a # OneToOneField with parent_link=True or a M2M intermediary. form_field = db_field.formfield(**kwargs) if db_field.name not in self.raw_id_fields: related_modeladmin = self.admin_site._registry.get(db_field.document_type) can_add_related = bool(related_modeladmin and related_modeladmin.has_add_permission(request)) form_field.widget = widgets.RelatedFieldWidgetWrapper( form_field.widget, RelationWrapper(db_field.document_type), self.admin_site, can_add_related=can_add_related) return form_field if isinstance(db_field, StringField): if db_field.max_length is None: kwargs = dict({'widget': widgets.AdminTextareaWidget}, **kwargs) else: kwargs = dict({'widget': widgets.AdminTextInputWidget}, **kwargs) return db_field.formfield(**kwargs) # If we've got overrides for the formfield defined, use 'em. **kwargs # passed to formfield_for_dbfield override the defaults. for klass in db_field.__class__.mro(): if klass in self.formfield_overrides: kwargs = dict(self.formfield_overrides[klass], **kwargs) return db_field.formfield(**kwargs) # For any other type of field, just call its formfield() method. return db_field.formfield(**kwargs) def formfield_for_choice_field(self, db_field, request=None, **kwargs): """ Get a form Field for a database Field that has declared choices. """ # If the field is named as a radio_field, use a RadioSelect if db_field.name in self.radio_fields: # Avoid stomping on custom widget/choices arguments. if 'widget' not in kwargs: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) if 'choices' not in kwargs: kwargs['choices'] = db_field.get_choices( include_blank = db_field.blank, blank_choice=[('', _('None'))] ) return db_field.formfield(**kwargs) def formfield_for_manytomany(self, db_field, request=None, **kwargs): """ Get a form Field for a ManyToManyField. """ db = kwargs.get('using') if db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.rel, using=db) kwargs['help_text'] = '' elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)): kwargs['widget'] = widgets.FilteredSelectMultiple(pretty_name(db_field.name), (db_field.name in self.filter_vertical)) return db_field.formfield(**kwargs) def get_view_on_site_url(self, obj=None): if obj is None or not self.view_on_site: return None if callable(self.view_on_site): return self.view_on_site(obj) elif self.view_on_site and hasattr(obj, 'get_absolute_url'): # use the ContentType lookup if view_on_site is True return reverse('admin:view_on_site', kwargs={ 'content_type_id': 0, 'object_id': obj.pk }) @copy_class(djmod.ModelAdmin) class DocumentAdmin(BaseDocumentAdmin): "Encapsulates all admin options and functionality for a given model." def __init__(self, model, admin_site): self.model = model self.opts = model._meta self.admin_site = admin_site super(DocumentAdmin, self).__init__() self.log = not settings.DATABASES.get('default', {}).get( 'ENGINE', 'django.db.backends.dummy' ).endswith('dummy') # XXX: add inline init somewhere def _get_inline_instances(self): for f in six.itervalues(self.model._fields): if not (isinstance(f, ListField) and isinstance(getattr(f, 'field', None), EmbeddedDocumentField)) and not isinstance(f, EmbeddedDocumentField): continue # Should only reach here if there is an embedded document... if f.name in self.exclude: continue document = self.model() if hasattr(f, 'field') and f.field is not None: embedded_document = f.field.document_type elif hasattr(f, 'document_type'): embedded_document = f.document_type else: # For some reason we found an embedded field were either # the field attribute or the field's document type is None. # This shouldn't happen, but apparently does happen: # https://github.com/jschrewe/django-mongoadmin/issues/4 # The solution for now is to ignore that field entirely. continue inline_admin = EmbeddedStackedDocumentAdmin # check if there is an admin for the embedded document in # self.inlines. If there is, use this, else use default. for inline_class in self.inlines: if inline_class.document == embedded_document: inline_admin = inline_class inline_instance = inline_admin(f, document, self.admin_site) # if f is an EmbeddedDocumentField set the maximum allowed form instances to one if isinstance(f, EmbeddedDocumentField): inline_instance.max_num = 1 # exclude field from normal form if f.name not in self.exclude: self.exclude.append(f.name) if f.name == 'created_at' and f.name not in self.exclude: self.exclude.append(f.name) self.inline_instances.append(inline_instance) def get_changelist_form(self, request, **kwargs): kwargs.setdefault("form", DocumentForm) return super(DocumentAdmin, self).get_changelist_form(request, **kwargs) def get_changelist_formset(self, request, **kwargs): kwargs.setdefault("form", DocumentForm) return super(DocumentAdmin, self).get_changelist_formset(request, **kwargs) def log_addition(self, request, object, message): """ Log that an object has been successfully added. The default implementation creates an admin LogEntry object. """ if not self.log: return super(DocumentAdmin, self).log_addition(request, object, message) def log_change(self, request, object, message): """ Log that an object has been successfully changed. The default implementation creates an admin LogEntry object. """ if not self.log: return super(DocumentAdmin, self).log_change(request, object, message) def log_deletion(self, request, object, object_repr): """ Log that an object will be deleted. Note that this method is called before the deletion. The default implementation creates an admin LogEntry object. """ if not self.log: return super(DocumentAdmin, self).log_deletion(request, object, object_repr) @csrf_protect_m def changeform_view(self, request, object_id=None, form_url='', extra_context=None): to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) model = self.model opts = model._meta if request.method == 'POST' and '_saveasnew' in request.POST: object_id = None add = object_id is None if add: if not self.has_add_permission(request): raise PermissionDenied obj = None else: obj = self.get_object(request, unquote(object_id), to_field) if not self.has_change_permission(request, obj): raise PermissionDenied if obj is None: raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % { 'name': force_text(opts.verbose_name), 'key': escape(object_id)}) ModelForm = self.get_form(request, obj) if request.method == 'POST': form = ModelForm(request.POST, request.FILES, instance=obj) if form.is_valid(): form_validated = True new_object = self.save_form(request, form, change=not add) else: form_validated = False new_object = form.instance formsets, inline_instances = self._create_formsets(request, new_object, change=not add) if all_valid(formsets) and form_validated: self.save_model(request, new_object, form, not add) self.save_related(request, form, formsets, not add) change_message = self.construct_change_message(request, form, formsets, add) if add: self.log_addition(request, new_object, change_message) return self.response_add(request, new_object) else: self.log_change(request, new_object, change_message) return self.response_change(request, new_object) else: form_validated = False else: if add: initial = self.get_changeform_initial_data(request) form = ModelForm(initial=initial) formsets, inline_instances = self._create_formsets(request, form.instance, change=False) else: form = ModelForm(instance=obj) formsets, inline_instances = self._create_formsets(request, obj, change=True) adminForm = helpers.AdminForm( form, list(self.get_fieldsets(request, obj)), self.get_prepopulated_fields(request, obj), self.get_readonly_fields(request, obj), model_admin=self) media = self.media + adminForm.media inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj) for inline_formset in inline_formsets: media = media + inline_formset.media context = dict(self.admin_site.each_context(request), title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name), adminform=adminForm, object_id=object_id, original=obj, is_popup=(IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET), to_field=to_field, media=media, inline_admin_formsets=inline_formsets, errors=helpers.AdminErrorList(form, formsets), preserved_filters=self.get_preserved_filters(request), ) # Hide the "Save" and "Save and continue" buttons if "Save as New" was # previously chosen to prevent the interface from getting confusing. if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST: context['show_save'] = False context['show_save_and_continue'] = False # Use the change template instead of the add template. add = False context.update(extra_context or {}) return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url) @csrf_protect_m def delete_view(self, request, object_id, extra_context=None): "The 'delete' admin view for this model." opts = self.model._meta app_label = opts.app_label to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) obj = self.get_object(request, unquote(object_id), to_field) if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: raise Http404( _('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)} ) from django.db import router using = router.db_for_write(self.model) # Populate deleted_objects, a data structure of all related objects that # will also be deleted. (deleted_objects, model_count, perms_needed, protected) = get_deleted_objects( [obj], opts, request.user, self.admin_site, using) if request.POST: # The user has already confirmed the deletion. if perms_needed: raise PermissionDenied obj_display = force_text(obj) attr = str(to_field) if to_field else opts.pk.attname obj_id = obj.serializable_value(attr) self.log_deletion(request, obj, obj_display) self.delete_model(request, obj) return self.response_delete(request, obj_display, obj_id) object_name = force_text(opts.verbose_name) if perms_needed or protected: title = _("Cannot delete %(name)s") % {"name": object_name} else: title = _("Are you sure?") context = dict( self.admin_site.each_context(request), title=title, object_name=object_name, object=obj, deleted_objects=deleted_objects, model_count=dict(model_count).items(), perms_lacking=perms_needed, protected=protected, opts=opts, app_label=app_label, preserved_filters=self.get_preserved_filters(request), is_popup=(IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET), to_field=to_field, ) context.update(extra_context or {}) return self.render_delete_form(request, context) def history_view(self, request, object_id, extra_context=None): "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model obj = self.get_object(request, unquote(object_id)) if obj is None: raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % { 'name': force_text(model._meta.verbose_name), 'key': escape(object_id), }) if not self.has_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('action_time') context = dict(self.admin_site.each_context(request), title=_('Change history: %s') % force_text(obj), action_list=action_list, module_name=capfirst(force_text(opts.verbose_name_plural)), object=obj, opts=opts, preserved_filters=self.get_preserved_filters(request), ) context.update(extra_context or {}) request.current_app = self.admin_site.name return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context) class InlineDocumentAdmin(BaseDocumentAdmin): """ Options for inline editing of ``model`` instances. Provide ``name`` to specify the attribute name of the ``ForeignKey`` from ``model`` to its parent. This is required if ``model`` has more than one ``ForeignKey`` to its parent. """ document = None fk_name = None formset = BaseInlineDocumentFormSet extra = 1 max_num = None template = None verbose_name = None verbose_name_plural = None can_delete = True def __init__(self, parent_document, admin_site): self.admin_site = admin_site self.parent_document = parent_document self.opts = self.model._meta super(InlineDocumentAdmin, self).__init__() if self.verbose_name is None: self.verbose_name = self.model._meta.verbose_name if self.verbose_name_plural is None: self.verbose_name_plural = self.model._meta.verbose_name_plural def _media(self): from django.conf import settings js = ['js/jquery.min.js', 'js/jquery.init.js', 'js/inlines.min.js'] if self.prepopulated_fields: js.append('js/urlify.js') js.append('js/prepopulate.min.js') if self.filter_vertical or self.filter_horizontal: js.extend(['js/SelectBox.js' , 'js/SelectFilter2.js']) return forms.Media(js=['%s%s' % (settings.ADMIN_MEDIA_PREFIX, url) for url in js]) media = property(_media) def get_formset(self, request, obj=None, **kwargs): """Returns a BaseInlineFormSet class for use in admin add/change views.""" if self.declared_fieldsets: fields = flatten_fieldsets(self.declared_fieldsets) else: fields = None if self.exclude is None: exclude = [] else: exclude = list(self.exclude) exclude.extend(kwargs.get("exclude", [])) exclude.extend(self.get_readonly_fields(request, obj)) # if exclude is an empty list we use None, since that's the actual # default exclude = exclude or None defaults = { "form": self.form, "formset": self.formset, "fields": fields, "exclude": exclude, "formfield_callback": curry(self.formfield_for_dbfield, request=request), "extra": self.extra, "max_num": self.max_num, "can_delete": self.can_delete, } defaults.update(kwargs) return inlineformset_factory(self.model, **defaults) def get_fieldsets(self, request, obj=None): if self.declared_fieldsets: return self.declared_fieldsets form = self.get_formset(request).form fields = form.base_fields.keys() + list(self.get_readonly_fields(request, obj)) return [(None, {'fields': fields})] class EmbeddedDocumentAdmin(InlineDocumentAdmin): def __init__(self, field, parent_document, admin_site): if hasattr(field, 'field'): self.model = field.field.document_type else: self.model = field.document_type self.doc_list = getattr(parent_document, field.name) self.field = field if not isinstance(self.doc_list, list): self.doc_list = [] self.rel_name = field.name if self.verbose_name is None: self.verbose_name = "Field: %s (Document: %s)" % (capfirst(field.name), self.model._meta.verbose_name) if self.verbose_name_plural is None: self.verbose_name_plural = "Field: %s (Document: %s)" % (capfirst(field.name), self.model._meta.verbose_name_plural) super(EmbeddedDocumentAdmin, self).__init__(parent_document, admin_site) def queryset(self, request): if isinstance(self.field, ListField): # list field self.doc_list = getattr(self.parent_document, self.rel_name) else: # embedded field emb_doc = getattr(self.parent_document, self.rel_name) if emb_doc is None: self.doc_list = [] else: self.doc_list = [emb_doc] return self.doc_list class StackedDocumentInline(InlineDocumentAdmin): template = 'admin/edit_inline/stacked.html' class EmbeddedStackedDocumentAdmin(EmbeddedDocumentAdmin): template = 'admin/edit_inline/stacked.html' class TabularDocumentInline(InlineDocumentAdmin): template = 'admin/edit_inline/tabular.html'
{ "content_hash": "f420e5210310bbe7da1846e726aa349b", "timestamp": "", "source": "github", "line_count": 569, "max_line_length": 156, "avg_line_length": 41.065026362038665, "alnum_prop": 0.6140974064880595, "repo_name": "unixhot/opencmdb", "id": "ca293c7acb9b3ca7b0a61ef6b1ade2b6a9cc1e77", "size": "23366", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "django_mongoengine/mongo_admin/options.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "61198" }, { "name": "HTML", "bytes": "44545" }, { "name": "JavaScript", "bytes": "503133" }, { "name": "Python", "bytes": "244232" }, { "name": "Vue", "bytes": "95038" } ], "symlink_target": "" }
from jedi.parser import Parser from jedi._compatibility import u def test_import_is_nested(): imp = Parser(u('import ')).module.imports[0] # should not raise an error, even if it's not a complete import assert not imp.is_nested()
{ "content_hash": "917adfedea2b180e55c621cfe9cabd72", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 67, "avg_line_length": 30.5, "alnum_prop": 0.7090163934426229, "repo_name": "blueyed/jedi", "id": "850f1e3bcf5859f15330af44c2b0aa04509b8921", "size": "244", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "test/test_parser/test_representation.py", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
"""Hierarchical Agglomerative Clustering These routines perform some hierarchical agglomerative clustering of some input data. Authors : Vincent Michel, Bertrand Thirion, Alexandre Gramfort, Gael Varoquaux License: BSD 3 clause """ from heapq import heapify, heappop, heappush, heappushpop import warnings import sys import numpy as np from scipy import sparse from ..base import BaseEstimator, ClusterMixin from ..externals.joblib import Memory from ..externals import six from ..metrics.pairwise import paired_distances, pairwise_distances from ..utils import check_array from ..utils.sparsetools import connected_components from . import _hierarchical from ._feature_agglomeration import AgglomerationTransform from ..utils.fast_dict import IntFloatDict if sys.version_info[0] > 2: xrange = range ############################################################################### # For non fully-connected graphs def _fix_connectivity(X, connectivity, n_components=None, affinity="euclidean"): """ Fixes the connectivity matrix - copies it - makes it symmetric - converts it to LIL if necessary - completes it if necessary """ n_samples = X.shape[0] if (connectivity.shape[0] != n_samples or connectivity.shape[1] != n_samples): raise ValueError('Wrong shape for connectivity matrix: %s ' 'when X is %s' % (connectivity.shape, X.shape)) # Make the connectivity matrix symmetric: connectivity = connectivity + connectivity.T # Convert connectivity matrix to LIL if not sparse.isspmatrix_lil(connectivity): if not sparse.isspmatrix(connectivity): connectivity = sparse.lil_matrix(connectivity) else: connectivity = connectivity.tolil() # Compute the number of nodes n_components, labels = connected_components(connectivity) if n_components > 1: warnings.warn("the number of connected components of the " "connectivity matrix is %d > 1. Completing it to avoid " "stopping the tree early." % n_components, stacklevel=2) # XXX: Can we do without completing the matrix? for i in xrange(n_components): idx_i = np.where(labels == i)[0] Xi = X[idx_i] for j in xrange(i): idx_j = np.where(labels == j)[0] Xj = X[idx_j] D = pairwise_distances(Xi, Xj, metric=affinity) ii, jj = np.where(D == np.min(D)) ii = ii[0] jj = jj[0] connectivity[idx_i[ii], idx_j[jj]] = True connectivity[idx_j[jj], idx_i[ii]] = True n_components = 1 return connectivity ############################################################################### # Hierarchical tree building functions def ward_tree(X, connectivity=None, n_components=None, n_clusters=None, return_distance=False): """Ward clustering based on a Feature matrix. Recursively merges the pair of clusters that minimally increases within-cluster variance. The inertia matrix uses a Heapq-based representation. This is the structured version, that takes into account some topological structure between samples. Parameters ---------- X : array, shape (n_samples, n_features) feature matrix representing n_samples samples to be clustered connectivity : sparse matrix (optional). connectivity matrix. Defines for each sample the neighboring samples following a given structure of the data. The matrix is assumed to be symmetric and only the upper triangular half is used. Default is None, i.e, the Ward algorithm is unstructured. n_components : int (optional) Number of connected components. If None the number of connected components is estimated from the connectivity matrix. n_clusters : int (optional) Stop early the construction of the tree at n_clusters. This is useful to decrease computation time if the number of clusters is not small compared to the number of samples. In this case, the complete tree is not computed, thus the 'children' output is of limited use, and the 'parents' output should rather be used. This option is valid only when specifying a connectivity matrix. return_distance: bool (optional) If True, return the distance between the clusters. Returns ------- children : 2D array, shape (n_nodes, 2) The children of each non-leaf node. Values less than `n_samples` correspond to leaves of the tree which are the original samples. A node `i` greater than or equal to `n_samples` is a non-leaf node and has children `children_[i - n_samples]`. Alternatively at the i-th iteration, children[i][0] and children[i][1] are merged to form node `n_samples + i` n_components : int The number of connected components in the graph. n_leaves : int The number of leaves in the tree parents : 1D array, shape (n_nodes, ) or None The parent of each node. Only returned when a connectivity matrix is specified, elsewhere 'None' is returned. distances : 1D array, shape (n_nodes, ) Only returned if return_distance is set to True (for compatibility). The distances between the centers of the nodes. `distances[i]` corresponds to a weighted euclidean distance between the nodes `children[i, 1]` and `children[i, 2]`. If the nodes refer to leaves of the tree, then `distances[i]` is their unweighted euclidean distance. Distances are updated in the following way (from scipy.hierarchy.linkage): The new entry :math:`d(u,v)` is computed as follows, .. math:: d(u,v) = \\sqrt{\\frac{|v|+|s|} {T}d(v,s)^2 + \\frac{|v|+|t|} {T}d(v,t)^2 - \\frac{|v|} {T}d(s,t)^2} where :math:`u` is the newly joined cluster consisting of clusters :math:`s` and :math:`t`, :math:`v` is an unused cluster in the forest, :math:`T=|v|+|s|+|t|`, and :math:`|*|` is the cardinality of its argument. This is also known as the incremental algorithm. """ X = np.asarray(X) if X.ndim == 1: X = np.reshape(X, (-1, 1)) n_samples, n_features = X.shape if connectivity is None: from scipy.cluster import hierarchy # imports PIL if n_clusters is not None: warnings.warn('Partial build of the tree is implemented ' 'only for structured clustering (i.e. with ' 'explicit connectivity). The algorithm ' 'will build the full tree and only ' 'retain the lower branches required ' 'for the specified number of clusters', stacklevel=2) out = hierarchy.ward(X) children_ = out[:, :2].astype(np.intp) if return_distance: distances = out[:, 2] return children_, 1, n_samples, None, distances else: return children_, 1, n_samples, None connectivity = _fix_connectivity(X, connectivity, n_components=n_components) if n_clusters is None: n_nodes = 2 * n_samples - 1 else: if n_clusters > n_samples: raise ValueError('Cannot provide more clusters than samples. ' '%i n_clusters was asked, and there are %i samples.' % (n_clusters, n_samples)) n_nodes = 2 * n_samples - n_clusters # create inertia matrix coord_row = [] coord_col = [] A = [] for ind, row in enumerate(connectivity.rows): A.append(row) # We keep only the upper triangular for the moments # Generator expressions are faster than arrays on the following row = [i for i in row if i < ind] coord_row.extend(len(row) * [ind, ]) coord_col.extend(row) coord_row = np.array(coord_row, dtype=np.intp, order='C') coord_col = np.array(coord_col, dtype=np.intp, order='C') # build moments as a list moments_1 = np.zeros(n_nodes, order='C') moments_1[:n_samples] = 1 moments_2 = np.zeros((n_nodes, n_features), order='C') moments_2[:n_samples] = X inertia = np.empty(len(coord_row), dtype=np.float, order='C') _hierarchical.compute_ward_dist(moments_1, moments_2, coord_row, coord_col, inertia) inertia = list(six.moves.zip(inertia, coord_row, coord_col)) heapify(inertia) # prepare the main fields parent = np.arange(n_nodes, dtype=np.intp) used_node = np.ones(n_nodes, dtype=bool) children = [] if return_distance: distances = np.empty(n_nodes - n_samples) not_visited = np.empty(n_nodes, dtype=np.int8, order='C') # recursive merge loop for k in range(n_samples, n_nodes): # identify the merge while True: inert, i, j = heappop(inertia) if used_node[i] and used_node[j]: break parent[i], parent[j] = k, k children.append((i, j)) used_node[i] = used_node[j] = False if return_distance: # store inertia value distances[k - n_samples] = inert # update the moments moments_1[k] = moments_1[i] + moments_1[j] moments_2[k] = moments_2[i] + moments_2[j] # update the structure matrix A and the inertia matrix coord_col = [] not_visited.fill(1) not_visited[k] = 0 _hierarchical._get_parents(A[i], coord_col, parent, not_visited) _hierarchical._get_parents(A[j], coord_col, parent, not_visited) # List comprehension is faster than a for loop [A[l].append(k) for l in coord_col] A.append(coord_col) coord_col = np.array(coord_col, dtype=np.intp, order='C') coord_row = np.empty(coord_col.shape, dtype=np.intp, order='C') coord_row.fill(k) n_additions = len(coord_row) ini = np.empty(n_additions, dtype=np.float, order='C') _hierarchical.compute_ward_dist(moments_1, moments_2, coord_row, coord_col, ini) # List comprehension is faster than a for loop [heappush(inertia, (ini[idx], k, coord_col[idx])) for idx in range(n_additions)] # Separate leaves in children (empty lists up to now) n_leaves = n_samples # sort children to get consistent output with unstructured version children = [c[::-1] for c in children] children = np.array(children) # return numpy array for efficient caching if return_distance: # 2 is scaling factor to compare w/ unstructured version distances = np.sqrt(2. * distances) return children, n_components, n_leaves, parent, distances else: return children, n_components, n_leaves, parent # average and complete linkage def linkage_tree(X, connectivity=None, n_components=None, n_clusters=None, linkage='complete', affinity="euclidean", return_distance=False): """Linkage agglomerative clustering based on a Feature matrix. The inertia matrix uses a Heapq-based representation. This is the structured version, that takes into account some topological structure between samples. Parameters ---------- X : array, shape (n_samples, n_features) feature matrix representing n_samples samples to be clustered connectivity : sparse matrix (optional). connectivity matrix. Defines for each sample the neighboring samples following a given structure of the data. The matrix is assumed to be symmetric and only the upper triangular half is used. Default is None, i.e, the Ward algorithm is unstructured. n_components : int (optional) Number of connected components. If None the number of connected components is estimated from the connectivity matrix. n_clusters : int (optional) Stop early the construction of the tree at n_clusters. This is useful to decrease computation time if the number of clusters is not small compared to the number of samples. In this case, the complete tree is not computed, thus the 'children' output is of limited use, and the 'parents' output should rather be used. This option is valid only when specifying a connectivity matrix. linkage : {"average", "complete"}, optional, default: "complete" Which linkage critera to use. The linkage criterion determines which distance to use between sets of observation. - average uses the average of the distances of each observation of the two sets - complete or maximum linkage uses the maximum distances between all observations of the two sets. affinity : string or callable, optional, default: "euclidean". which metric to use. Can be "euclidean", "manhattan", or any distance know to paired distance (see metric.pairwise) return_distance : bool, default False whether or not to return the distances between the clusters. Returns ------- children : 2D array, shape (n_nodes, 2) The children of each non-leaf node. Values less than `n_samples` correspond to leaves of the tree which are the original samples. A node `i` greater than or equal to `n_samples` is a non-leaf node and has children `children_[i - n_samples]`. Alternatively at the i-th iteration, children[i][0] and children[i][1] are merged to form node `n_samples + i` n_components : int The number of connected components in the graph. n_leaves : int The number of leaves in the tree. parents : 1D array, shape (n_nodes, ) or None The parent of each node. Only returned when a connectivity matrix is specified, elsewhere 'None' is returned. distances : ndarray, shape (n_nodes,) Returned when return_distance is set to True. distances[i] refers to the distance between children[i][0] and children[i][1] when they are merged. See also -------- ward_tree : hierarchical clustering with ward linkage """ X = np.asarray(X) if X.ndim == 1: X = np.reshape(X, (-1, 1)) n_samples, n_features = X.shape linkage_choices = {'complete': _hierarchical.max_merge, 'average': _hierarchical.average_merge, } try: join_func = linkage_choices[linkage] except KeyError: raise ValueError( 'Unknown linkage option, linkage should be one ' 'of %s, but %s was given' % (linkage_choices.keys(), linkage)) if connectivity is None: from scipy.cluster import hierarchy # imports PIL if n_clusters is not None: warnings.warn('Partial build of the tree is implemented ' 'only for structured clustering (i.e. with ' 'explicit connectivity). The algorithm ' 'will build the full tree and only ' 'retain the lower branches required ' 'for the specified number of clusters', stacklevel=2) if affinity == 'precomputed': # for the linkage function of hierarchy to work on precomputed # data, provide as first argument an ndarray of the shape returned # by pdist: it is a flat array containing the upper triangular of # the distance matrix. i, j = np.triu_indices(X.shape[0], k=1) X = X[i, j] elif affinity == 'l2': # Translate to something understood by scipy affinity = 'euclidean' elif affinity in ('l1', 'manhattan'): affinity = 'cityblock' elif callable(affinity): X = affinity(X) i, j = np.triu_indices(X.shape[0], k=1) X = X[i, j] out = hierarchy.linkage(X, method=linkage, metric=affinity) children_ = out[:, :2].astype(np.int) if return_distance: distances = out[:, 2] return children_, 1, n_samples, None, distances return children_, 1, n_samples, None connectivity = _fix_connectivity(X, connectivity, n_components=n_components) connectivity = connectivity.tocoo() # Put the diagonal to zero diag_mask = (connectivity.row != connectivity.col) connectivity.row = connectivity.row[diag_mask] connectivity.col = connectivity.col[diag_mask] connectivity.data = connectivity.data[diag_mask] del diag_mask if affinity == 'precomputed': distances = X[connectivity.row, connectivity.col] else: # FIXME We compute all the distances, while we could have only computed # the "interesting" distances distances = paired_distances(X[connectivity.row], X[connectivity.col], metric=affinity) connectivity.data = distances if n_clusters is None: n_nodes = 2 * n_samples - 1 else: assert n_clusters <= n_samples n_nodes = 2 * n_samples - n_clusters if return_distance: distances = np.empty(n_nodes - n_samples) # create inertia heap and connection matrix A = np.empty(n_nodes, dtype=object) inertia = list() # LIL seems to the best format to access the rows quickly, # without the numpy overhead of slicing CSR indices and data. connectivity = connectivity.tolil() # We are storing the graph in a list of IntFloatDict for ind, (data, row) in enumerate(zip(connectivity.data, connectivity.rows)): A[ind] = IntFloatDict(np.asarray(row, dtype=np.intp), np.asarray(data, dtype=np.float64)) # We keep only the upper triangular for the heap # Generator expressions are faster than arrays on the following inertia.extend(_hierarchical.WeightedEdge(d, ind, r) for r, d in zip(row, data) if r < ind) del connectivity heapify(inertia) # prepare the main fields parent = np.arange(n_nodes, dtype=np.intp) used_node = np.ones(n_nodes, dtype=np.intp) children = [] # recursive merge loop for k in xrange(n_samples, n_nodes): # identify the merge while True: edge = heappop(inertia) if used_node[edge.a] and used_node[edge.b]: break i = edge.a j = edge.b if return_distance: # store distances distances[k - n_samples] = edge.weight parent[i] = parent[j] = k children.append((i, j)) # Keep track of the number of elements per cluster n_i = used_node[i] n_j = used_node[j] used_node[k] = n_i + n_j used_node[i] = used_node[j] = False # update the structure matrix A and the inertia matrix # a clever 'min', or 'max' operation between A[i] and A[j] coord_col = join_func(A[i], A[j], used_node, n_i, n_j) for l, d in coord_col: A[l].append(k, d) # Here we use the information from coord_col (containing the # distances) to update the heap heappush(inertia, _hierarchical.WeightedEdge(d, k, l)) A[k] = coord_col # Clear A[i] and A[j] to save memory A[i] = A[j] = 0 # Separate leaves in children (empty lists up to now) n_leaves = n_samples # # return numpy array for efficient caching children = np.array(children)[:, ::-1] if return_distance: return children, n_components, n_leaves, parent, distances return children, n_components, n_leaves, parent # Matching names to tree-building strategies def _complete_linkage(*args, **kwargs): kwargs['linkage'] = 'complete' return linkage_tree(*args, **kwargs) def _average_linkage(*args, **kwargs): kwargs['linkage'] = 'average' return linkage_tree(*args, **kwargs) _TREE_BUILDERS = dict( ward=ward_tree, complete=_complete_linkage, average=_average_linkage, ) ############################################################################### # Functions for cutting hierarchical clustering tree def _hc_cut(n_clusters, children, n_leaves): """Function cutting the ward tree for a given number of clusters. Parameters ---------- n_clusters : int or ndarray The number of clusters to form. children : list of pairs. Length of n_nodes The children of each non-leaf node. Values less than `n_samples` refer to leaves of the tree. A greater value `i` indicates a node with children `children[i - n_samples]`. n_leaves : int Number of leaves of the tree. Returns ------- labels : array [n_samples] cluster labels for each point """ if n_clusters > n_leaves: raise ValueError('Cannot extract more clusters than samples: ' '%s clusters where given for a tree with %s leaves.' % (n_clusters, n_leaves)) # In this function, we store nodes as a heap to avoid recomputing # the max of the nodes: the first element is always the smallest # We use negated indices as heaps work on smallest elements, and we # are interested in largest elements # children[-1] is the root of the tree nodes = [-(max(children[-1]) + 1)] for i in xrange(n_clusters - 1): # As we have a heap, nodes[0] is the smallest element these_children = children[-nodes[0] - n_leaves] # Insert the 2 children and remove the largest node heappush(nodes, -these_children[0]) heappushpop(nodes, -these_children[1]) label = np.zeros(n_leaves, dtype=np.intp) for i, node in enumerate(nodes): label[_hierarchical._hc_get_descendent(-node, children, n_leaves)] = i return label ############################################################################### class AgglomerativeClustering(BaseEstimator, ClusterMixin): """ Agglomerative Clustering Recursively merges the pair of clusters that minimally increases a given linkage distance. Parameters ---------- n_clusters : int, default=2 The number of clusters to find. connectivity : array-like or callable, optional Connectivity matrix. Defines for each sample the neighboring samples following a given structure of the data. This can be a connectivity matrix itself or a callable that transforms the data into a connectivity matrix, such as derived from kneighbors_graph. Default is None, i.e, the hierarchical clustering algorithm is unstructured. affinity : string or callable, default: "euclidean" Metric used to compute the linkage. Can be "euclidean", "l1", "l2", "manhattan", "cosine", or 'precomputed'. If linkage is "ward", only "euclidean" is accepted. memory : Instance of joblib.Memory or string (optional) Used to cache the output of the computation of the tree. By default, no caching is done. If a string is given, it is the path to the caching directory. n_components : int (optional) The number of connected components in the graph defined by the connectivity matrix. If not set, it is estimated. compute_full_tree : bool or 'auto' (optional) Stop early the construction of the tree at n_clusters. This is useful to decrease computation time if the number of clusters is not small compared to the number of samples. This option is useful only when specifying a connectivity matrix. Note also that when varying the number of clusters and using caching, it may be advantageous to compute the full tree. linkage : {"ward", "complete", "average"}, optional, default: "ward" Which linkage criterion to use. The linkage criterion determines which distance to use between sets of observation. The algorithm will merge the pairs of cluster that minimize this criterion. - ward minimizes the variance of the clusters being merged. - average uses the average of the distances of each observation of the two sets. - complete or maximum linkage uses the maximum distances between all observations of the two sets. pooling_func : callable, default=np.mean This combines the values of agglomerated features into a single value, and should accept an array of shape [M, N] and the keyword argument ``axis=1``, and reduce it to an array of size [M]. Attributes ---------- labels_ : array [n_samples] cluster labels for each point n_leaves_ : int Number of leaves in the hierarchical tree. n_components_ : int The estimated number of connected components in the graph. children_ : array-like, shape (n_nodes, 2) The children of each non-leaf node. Values less than `n_samples` correspond to leaves of the tree which are the original samples. A node `i` greater than or equal to `n_samples` is a non-leaf node and has children `children_[i - n_samples]`. Alternatively at the i-th iteration, children[i][0] and children[i][1] are merged to form node `n_samples + i` """ def __init__(self, n_clusters=2, affinity="euclidean", memory=Memory(cachedir=None, verbose=0), connectivity=None, n_components=None, compute_full_tree='auto', linkage='ward', pooling_func=np.mean): self.n_clusters = n_clusters self.memory = memory self.n_components = n_components self.connectivity = connectivity self.compute_full_tree = compute_full_tree self.linkage = linkage self.affinity = affinity self.pooling_func = pooling_func def fit(self, X): """Fit the hierarchical clustering on the data Parameters ---------- X : array-like, shape = [n_samples, n_features] The samples a.k.a. observations. Returns ------- self """ X = check_array(X) memory = self.memory if isinstance(memory, six.string_types): memory = Memory(cachedir=memory, verbose=0) if self.linkage == "ward" and self.affinity != "euclidean": raise ValueError("%s was provided as affinity. Ward can only " "work with euclidean distances." % (self.affinity, )) if self.linkage not in _TREE_BUILDERS: raise ValueError("Unknown linkage type %s." "Valid options are %s" % (self.linkage, _TREE_BUILDERS.keys())) tree_builder = _TREE_BUILDERS[self.linkage] connectivity = self.connectivity if self.connectivity is not None: if callable(self.connectivity): connectivity = self.connectivity(X) connectivity = check_array( connectivity, accept_sparse=['csr', 'coo', 'lil']) n_samples = len(X) compute_full_tree = self.compute_full_tree if self.connectivity is None: compute_full_tree = True if compute_full_tree == 'auto': # Early stopping is likely to give a speed up only for # a large number of clusters. The actual threshold # implemented here is heuristic compute_full_tree = self.n_clusters < max(100, .02 * n_samples) n_clusters = self.n_clusters if compute_full_tree: n_clusters = None # Construct the tree kwargs = {} if self.linkage != 'ward': kwargs['linkage'] = self.linkage kwargs['affinity'] = self.affinity self.children_, self.n_components_, self.n_leaves_, parents = \ memory.cache(tree_builder)(X, connectivity, n_components=self.n_components, n_clusters=n_clusters, **kwargs) # Cut the tree if compute_full_tree: self.labels_ = _hc_cut(self.n_clusters, self.children_, self.n_leaves_) else: labels = _hierarchical.hc_get_heads(parents, copy=False) # copy to avoid holding a reference on the original array labels = np.copy(labels[:n_samples]) # Reasign cluster numbers self.labels_ = np.searchsorted(np.unique(labels), labels) return self class FeatureAgglomeration(AgglomerativeClustering, AgglomerationTransform): """Agglomerate features. Similar to AgglomerativeClustering, but recursively merges features instead of samples. Parameters ---------- n_clusters : int, default 2 The number of clusters to find. connectivity : array-like or callable, optional Connectivity matrix. Defines for each feature the neighboring features following a given structure of the data. This can be a connectivity matrix itself or a callable that transforms the data into a connectivity matrix, such as derived from kneighbors_graph. Default is None, i.e, the hierarchical clustering algorithm is unstructured. affinity : string or callable, default "euclidean" Metric used to compute the linkage. Can be "euclidean", "l1", "l2", "manhattan", "cosine", or 'precomputed'. If linkage is "ward", only "euclidean" is accepted. memory : Instance of joblib.Memory or string, optional Used to cache the output of the computation of the tree. By default, no caching is done. If a string is given, it is the path to the caching directory. n_components : int, optional The number of connected components in the graph defined by the connectivity matrix. If not set, it is estimated. compute_full_tree : bool or 'auto', optional, default "auto" Stop early the construction of the tree at n_clusters. This is useful to decrease computation time if the number of clusters is not small compared to the number of features. This option is useful only when specifying a connectivity matrix. Note also that when varying the number of clusters and using caching, it may be advantageous to compute the full tree. linkage : {"ward", "complete", "average"}, optional, default "ward" Which linkage criterion to use. The linkage criterion determines which distance to use between sets of features. The algorithm will merge the pairs of cluster that minimize this criterion. - ward minimizes the variance of the clusters being merged. - average uses the average of the distances of each feature of the two sets. - complete or maximum linkage uses the maximum distances between all features of the two sets. pooling_func : callable, default np.mean This combines the values of agglomerated features into a single value, and should accept an array of shape [M, N] and the keyword argument `axis=1`, and reduce it to an array of size [M]. Attributes ---------- labels_ : array-like, (n_features,) cluster labels for each feature. n_leaves_ : int Number of leaves in the hierarchical tree. n_components_ : int The estimated number of connected components in the graph. children_ : array-like, shape (n_nodes, 2) The children of each non-leaf node. Values less than `n_features` correspond to leaves of the tree which are the original samples. A node `i` greater than or equal to `n_features` is a non-leaf node and has children `children_[i - n_features]`. Alternatively at the i-th iteration, children[i][0] and children[i][1] are merged to form node `n_features + i` """ def fit(self, X, y=None, **params): """Fit the hierarchical clustering on the data Parameters ---------- X : array-like, shape = [n_samples, n_features] The data Returns ------- self """ X = check_array(X, accept_sparse=['csr', 'csc', 'coo']) if not (len(X.shape) == 2 and X.shape[0] > 0): raise ValueError('At least one sample is required to fit the ' 'model. A data matrix of shape %s was given.' % (X.shape, )) return AgglomerativeClustering.fit(self, X.T, **params) @property def fit_predict(self): raise AttributeError ############################################################################### # Backward compatibility: class for Ward hierarchical clustering class Ward(AgglomerativeClustering): """Ward hierarchical clustering: constructs a tree and cuts it. Recursively merges the pair of clusters that minimally increases within-cluster variance. Parameters ---------- n_clusters : int or ndarray The number of clusters to find. connectivity : sparse matrix (optional) Connectivity matrix. Defines for each sample the neighboring samples following a given structure of the data. Default is None, i.e, the hierarchical clustering algorithm is unstructured. memory : Instance of joblib.Memory or string (optional) Used to cache the output of the computation of the tree. By default, no caching is done. If a string is given, it is the path to the caching directory. n_components : int (optional) The number of connected components in the graph defined by the connectivity matrix. If not set, it is estimated. compute_full_tree : bool or 'auto' (optional) Stop early the construction of the tree at n_clusters. This is useful to decrease computation time if the number of clusters is not small compared to the number of samples. This option is useful only when specifying a connectivity matrix. Note also that when varying the number of clusters and using caching, it may be advantageous to compute the full tree. Attributes ---------- labels_ : array [n_features] cluster labels for each feature n_leaves_ : int Number of leaves in the hierarchical tree. n_components_ : int The estimated number of connected components in the graph. children_ : array-like, shape (n_nodes, 2) The children of each non-leaf node. Values less than `n_samples` refer to leaves of the tree. A greater value `i` indicates a node with children `children_[i - n_samples]`. See also -------- AgglomerativeClustering : agglomerative hierarchical clustering """ linkage = 'ward' def __init__(self, n_clusters=2, memory=Memory(cachedir=None, verbose=0), connectivity=None, n_components=None, compute_full_tree='auto', pooling_func=np.mean): warnings.warn("The Ward class is deprecated since 0.14 and will be " "removed in 0.17. Use the AgglomerativeClustering " "instead.", DeprecationWarning) self.n_clusters = n_clusters self.memory = memory self.n_components = n_components self.connectivity = connectivity self.compute_full_tree = compute_full_tree self.affinity = "euclidean" self.pooling_func = pooling_func class WardAgglomeration(AgglomerationTransform, Ward): """Feature agglomeration based on Ward hierarchical clustering Parameters ---------- n_clusters : int or ndarray The number of clusters. connectivity : array-like or callable, optional Connectivity matrix. Defines for each sample the neighboring samples following a given structure of the data. This can be a connectivity matrix itself or a callable that transforms the data into a connectivity matrix, such as derived from kneighbors_graph. Default is None, i.e, the hierarchical clustering algorithm is unstructured. memory : Instance of joblib.Memory or string, optional Used to cache the output of the computation of the tree. By default, no caching is done. If a string is given, it is the path to the caching directory. n_components : int (optional) The number of connected components in the graph defined by the connectivity matrix. If not set, it is estimated. compute_full_tree : bool or 'auto' (optional) Stop early the construction of the tree at n_clusters. This is useful to decrease computation time if the number of clusters is not small compared to the number of samples. This option is useful only when specifying a connectivity matrix. Note also that when varying the number of cluster and using caching, it may be advantageous to compute the full tree. pooling_func : callable, default=np.mean This combines the values of agglomerated features into a single value, and should accept an array of shape [M, N] and the keyword argument `axis=1`, and reduce it to an array of size [M]. Attributes ---------- children_ : array-like, shape (n_nodes, 2) The children of each non-leaf node. Values less than `n_features` correspond to leaves of the tree which are the original samples. A node `i` greater than or equal to `n_features` is a non-leaf node and has children `children_[i - n_features]`. Alternatively at the i-th iteration, children[i][0] and children[i][1] are merged to form node `n_features + i` labels_ : array [n_features] cluster labels for each feature n_leaves_ : int Number of leaves in the hierarchical tree. n_components_ : int The estimated number of connected components in the graph. """ def fit(self, X, y=None, **params): """Fit the hierarchical clustering on the data Parameters ---------- X : array-like, shape = [n_samples, n_features] The data Returns ------- self """ X = check_array(X) return Ward.fit(self, X.T, **params) @property def fit_predict(self): raise AttributeError
{ "content_hash": "aa749306190ba5a6a997f9821fda7e58", "timestamp": "", "source": "github", "line_count": 1009, "max_line_length": 81, "avg_line_length": 39.00594648166501, "alnum_prop": 0.6138679269253245, "repo_name": "ycaihua/scikit-learn", "id": "5e834a6c6dbed7d366a049b1b8bf90e12a1b3b98", "size": "39357", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "sklearn/cluster/hierarchical.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "18569015" }, { "name": "C++", "bytes": "1810938" }, { "name": "CSS", "bytes": "1503" }, { "name": "JavaScript", "bytes": "20564" }, { "name": "Makefile", "bytes": "4897" }, { "name": "PowerShell", "bytes": "13427" }, { "name": "Python", "bytes": "5887845" }, { "name": "Shell", "bytes": "8730" } ], "symlink_target": "" }
""" Stresstest the LCS algorithm. Result when I tried this on my laptop was that I had to go to 100_000_000 tokens before getting over 1s of processing time. So let's assume there are no computational arguments for limiting the input size. """ import sys import time import subprocess token_count = int(sys.argv[1]) removes = b"-" + b"." * token_count adds = b"+" + b"#" * token_count t0 = time.time() print(f"Launching riff with {token_count} tokens mismatching...") riff = subprocess.Popen( ["cargo", "run"], stdin=subprocess.PIPE, stdout=subprocess.DEVNULL ) assert riff.stdin riff.stdin.write(removes) riff.stdin.write(adds) riff.stdin.close() riff.wait() t1 = time.time() dt_seconds = t1 - t0 print(f"Riff done processing {token_count} differences in {dt_seconds}s")
{ "content_hash": "1fcc84b34e4033b10c5d388a3a09c80a", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 80, "avg_line_length": 24.4375, "alnum_prop": 0.7225063938618926, "repo_name": "walles/riff", "id": "f2c0fded40b6e5798a7512c070af5ed1ae2e7540", "size": "806", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "stresstest.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "6947" }, { "name": "Rust", "bytes": "59047" }, { "name": "Shell", "bytes": "7619" } ], "symlink_target": "" }
import random from six import moves from tempest.api.object_storage import base from tempest import clients from tempest.common import custom_matchers from tempest.common.utils import data_utils from tempest import config from tempest import exceptions from tempest import test CONF = config.CONF class AccountTest(base.BaseObjectTest): containers = [] @classmethod @test.safe_setup def setUpClass(cls): super(AccountTest, cls).setUpClass() for i in moves.xrange(ord('a'), ord('f') + 1): name = data_utils.rand_name(name='%s-' % chr(i)) cls.container_client.create_container(name) cls.containers.append(name) cls.containers_count = len(cls.containers) @classmethod def tearDownClass(cls): cls.delete_containers(cls.containers) cls.data.teardown_all() super(AccountTest, cls).tearDownClass() @test.attr(type='smoke') def test_list_containers(self): # list of all containers should not be empty resp, container_list = self.account_client.list_account_containers() self.assertHeaders(resp, 'Account', 'GET') self.assertIsNotNone(container_list) for container_name in self.containers: self.assertIn(container_name, container_list) @test.attr(type='smoke') def test_list_no_containers(self): # List request to empty account # To test listing no containers, create new user other than # the base user of this instance. self.data.setup_test_user() os_test_user = clients.Manager( self.data.test_credentials) # Retrieve the id of an operator role of object storage test_role_id = None swift_role = CONF.object_storage.operator_role try: _, roles = self.os_admin.identity_client.list_roles() test_role_id = next(r['id'] for r in roles if r['name'] == swift_role) except StopIteration: msg = "%s role found" % swift_role raise exceptions.NotFound(msg) # Retrieve the test_user id _, users = self.os_admin.identity_client.get_users() test_user_id = next(usr['id'] for usr in users if usr['name'] == self.data.test_user) # Retrieve the test_tenant id _, tenants = self.os_admin.identity_client.list_tenants() test_tenant_id = next(tnt['id'] for tnt in tenants if tnt['name'] == self.data.test_tenant) # Assign the newly created user the appropriate operator role self.os_admin.identity_client.assign_user_role( test_tenant_id, test_user_id, test_role_id) resp, container_list = \ os_test_user.account_client.list_account_containers() self.assertIn(int(resp['status']), test.HTTP_SUCCESS) # When sending a request to an account which has not received a PUT # container request, the response does not contain 'accept-ranges' # header. This is a special case, therefore the existence of response # headers is checked without custom matcher. self.assertIn('content-length', resp) self.assertIn('x-timestamp', resp) self.assertIn('x-account-bytes-used', resp) self.assertIn('x-account-container-count', resp) self.assertIn('x-account-object-count', resp) self.assertIn('content-type', resp) self.assertIn('x-trans-id', resp) self.assertIn('date', resp) # Check only the format of common headers with custom matcher self.assertThat(resp, custom_matchers.AreAllWellFormatted()) self.assertEqual(len(container_list), 0) @test.attr(type='smoke') def test_list_containers_with_format_json(self): # list containers setting format parameter to 'json' params = {'format': 'json'} resp, container_list = self.account_client.list_account_containers( params=params) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'GET') self.assertIsNotNone(container_list) self.assertTrue([c['name'] for c in container_list]) self.assertTrue([c['count'] for c in container_list]) self.assertTrue([c['bytes'] for c in container_list]) @test.attr(type='smoke') def test_list_containers_with_format_xml(self): # list containers setting format parameter to 'xml' params = {'format': 'xml'} resp, container_list = self.account_client.list_account_containers( params=params) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'GET') self.assertIsNotNone(container_list) self.assertEqual(container_list.tag, 'account') self.assertTrue('name' in container_list.keys()) self.assertEqual(container_list.find(".//container").tag, 'container') self.assertEqual(container_list.find(".//name").tag, 'name') self.assertEqual(container_list.find(".//count").tag, 'count') self.assertEqual(container_list.find(".//bytes").tag, 'bytes') @test.attr(type='smoke') def test_list_extensions(self): resp, extensions = self.account_client.list_extensions() self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertThat(resp, custom_matchers.AreAllWellFormatted()) @test.attr(type='smoke') def test_list_containers_with_limit(self): # list containers one of them, half of them then all of them for limit in (1, self.containers_count / 2, self.containers_count): params = {'limit': limit} resp, container_list = \ self.account_client.list_account_containers(params=params) self.assertHeaders(resp, 'Account', 'GET') self.assertEqual(len(container_list), limit) @test.attr(type='smoke') def test_list_containers_with_marker(self): # list containers using marker param # first expect to get 0 container as we specified last # the container as marker # second expect to get the bottom half of the containers params = {'marker': self.containers[-1]} resp, container_list = \ self.account_client.list_account_containers(params=params) self.assertHeaders(resp, 'Account', 'GET') self.assertEqual(len(container_list), 0) params = {'marker': self.containers[self.containers_count / 2]} resp, container_list = \ self.account_client.list_account_containers(params=params) self.assertHeaders(resp, 'Account', 'GET') self.assertEqual(len(container_list), self.containers_count / 2 - 1) @test.attr(type='smoke') def test_list_containers_with_end_marker(self): # list containers using end_marker param # first expect to get 0 container as we specified first container as # end_marker # second expect to get the top half of the containers params = {'end_marker': self.containers[0]} resp, container_list = \ self.account_client.list_account_containers(params=params) self.assertHeaders(resp, 'Account', 'GET') self.assertEqual(len(container_list), 0) params = {'end_marker': self.containers[self.containers_count / 2]} resp, container_list = \ self.account_client.list_account_containers(params=params) self.assertHeaders(resp, 'Account', 'GET') self.assertEqual(len(container_list), self.containers_count / 2) @test.attr(type='smoke') def test_list_containers_with_marker_and_end_marker(self): # list containers combining marker and end_marker param params = {'marker': self.containers[0], 'end_marker': self.containers[self.containers_count - 1]} resp, container_list = self.account_client.list_account_containers( params=params) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'GET') self.assertEqual(len(container_list), self.containers_count - 2) @test.attr(type='smoke') def test_list_containers_with_limit_and_marker(self): # list containers combining marker and limit param # result are always limitated by the limit whatever the marker for marker in random.choice(self.containers): limit = random.randint(0, self.containers_count - 1) params = {'marker': marker, 'limit': limit} resp, container_list = \ self.account_client.list_account_containers(params=params) self.assertHeaders(resp, 'Account', 'GET') self.assertTrue(len(container_list) <= limit, str(container_list)) @test.attr(type='smoke') def test_list_containers_with_limit_and_end_marker(self): # list containers combining limit and end_marker param limit = random.randint(1, self.containers_count) params = {'limit': limit, 'end_marker': self.containers[self.containers_count / 2]} resp, container_list = self.account_client.list_account_containers( params=params) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'GET') self.assertEqual(len(container_list), min(limit, self.containers_count / 2)) @test.attr(type='smoke') def test_list_containers_with_limit_and_marker_and_end_marker(self): # list containers combining limit, marker and end_marker param limit = random.randint(1, self.containers_count) params = {'limit': limit, 'marker': self.containers[0], 'end_marker': self.containers[self.containers_count - 1]} resp, container_list = self.account_client.list_account_containers( params=params) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'GET') self.assertEqual(len(container_list), min(limit, self.containers_count - 2)) @test.attr(type='smoke') def test_list_account_metadata(self): # list all account metadata # set metadata to account metadata = {'test-account-meta1': 'Meta1', 'test-account-meta2': 'Meta2'} resp, _ = self.account_client.create_account_metadata(metadata) resp, _ = self.account_client.list_account_metadata() self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'HEAD') self.assertIn('x-account-meta-test-account-meta1', resp) self.assertIn('x-account-meta-test-account-meta2', resp) self.account_client.delete_account_metadata(metadata) @test.attr(type='smoke') def test_list_no_account_metadata(self): # list no account metadata resp, _ = self.account_client.list_account_metadata() self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'HEAD') self.assertNotIn('x-account-meta-', str(resp)) @test.attr(type='smoke') def test_update_account_metadata_with_create_metadata(self): # add metadata to account metadata = {'test-account-meta1': 'Meta1'} resp, _ = self.account_client.create_account_metadata(metadata) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'POST') resp, body = self.account_client.list_account_metadata() self.assertIn('x-account-meta-test-account-meta1', resp) self.assertEqual(resp['x-account-meta-test-account-meta1'], metadata['test-account-meta1']) self.account_client.delete_account_metadata(metadata) @test.attr(type='smoke') def test_update_account_metadata_with_delete_matadata(self): # delete metadata from account metadata = {'test-account-meta1': 'Meta1'} self.account_client.create_account_metadata(metadata) resp, _ = self.account_client.delete_account_metadata(metadata) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'POST') resp, _ = self.account_client.list_account_metadata() self.assertNotIn('x-account-meta-test-account-meta1', resp) @test.attr(type='smoke') def test_update_account_metadata_with_create_matadata_key(self): # if the value of metadata is not set, the metadata is not # registered at a server metadata = {'test-account-meta1': ''} resp, _ = self.account_client.create_account_metadata(metadata) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'POST') resp, _ = self.account_client.list_account_metadata() self.assertNotIn('x-account-meta-test-account-meta1', resp) @test.attr(type='smoke') def test_update_account_metadata_with_delete_matadata_key(self): # Although the value of metadata is not set, the feature of # deleting metadata is valid metadata_1 = {'test-account-meta1': 'Meta1'} self.account_client.create_account_metadata(metadata_1) metadata_2 = {'test-account-meta1': ''} resp, _ = self.account_client.delete_account_metadata(metadata_2) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'POST') resp, _ = self.account_client.list_account_metadata() self.assertNotIn('x-account-meta-test-account-meta1', resp) @test.attr(type='smoke') def test_update_account_metadata_with_create_and_delete_metadata(self): # Send a request adding and deleting metadata requests simultaneously metadata_1 = {'test-account-meta1': 'Meta1'} self.account_client.create_account_metadata(metadata_1) metadata_2 = {'test-account-meta2': 'Meta2'} resp, body = self.account_client.create_and_delete_account_metadata( metadata_2, metadata_1) self.assertIn(int(resp['status']), test.HTTP_SUCCESS) self.assertHeaders(resp, 'Account', 'POST') resp, _ = self.account_client.list_account_metadata() self.assertNotIn('x-account-meta-test-account-meta1', resp) self.assertIn('x-account-meta-test-account-meta2', resp) self.assertEqual(resp['x-account-meta-test-account-meta2'], metadata_2['test-account-meta2']) self.account_client.delete_account_metadata(metadata_2)
{ "content_hash": "64d46a4ee87d9beb9ba7c1e84a8fceee", "timestamp": "", "source": "github", "line_count": 338, "max_line_length": 78, "avg_line_length": 43.75443786982248, "alnum_prop": 0.6375684630468591, "repo_name": "cloudbase/lis-tempest", "id": "d615374acb45a08a3b8d5aa65a7c464d7f6554b6", "size": "15425", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "tempest/api/object_storage/test_account_services.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "3377111" }, { "name": "Shell", "bytes": "8663" } ], "symlink_target": "" }
"""This module contains the Site Model.""" from google.appengine.ext import db from django.utils.translation import ugettext import soc.models.presence_with_tos import soc.models.program class Site(soc.models.presence_with_tos.PresenceWithToS): """Model of a Site, which stores per site configuration. The Site Model stores configuration information unique to the Melange web site as a whole (in addition to any configuration that is common to any "presence" on the site, such as a Group or Program). """ #: The official name of the site site_name = db.StringProperty(default="Melange", verbose_name=ugettext('Site Name')) site_name.help_text = ugettext('The official name of the Site') #: A notice that should be displayed site-wide site_notice = db.StringProperty(verbose_name=ugettext('Site Notice')) site_notice.help_text = ugettext('A notice that will be displayed site-wide') maintenance_start = db.DateTimeProperty( verbose_name=ugettext('Maintenance start date')) maintenance_end = db.DateTimeProperty( verbose_name=ugettext('Maintenance end date')) #: Valid Google Custom Search Engine key. Used to load the appropriate #: search box in the search page. cse_key = db.StringProperty(verbose_name=ugettext('Custom Search Engine key')) cse_key.help_text = ugettext( 'Google Custom Search Engine key for embedding a ' 'CSE search box into the website.') #: Valid Google Analytics tracking number, if entered every page #: is going to have Google Analytics JS initialization code in #: the footer with the given tracking number. ga_tracking_num = db.StringProperty( verbose_name=ugettext('Google Analytics')) ga_tracking_num.help_text = ugettext( 'Valid Google Analytics tracking number. If the number is ' 'entered every page is going to have Google Analytics ' 'initialization code in footer.') #: Valid Google API Key. Used to embed Google services. google_api_key = db.StringProperty(verbose_name=ugettext('Google API')) google_api_key.help_text = ugettext( 'Valid Google API Key. This key is used for ' 'embedding Google services into the website.') #: Valid Google API Key. Used to embed Google services. secondary_google_api_key = db.StringProperty(verbose_name=ugettext('Secondary Google API')) secondary_google_api_key.help_text = ugettext( 'Valid Google API Key. This secondary key is used for ' 'embedding Google services into the website when ' 'accessed through the "hostname" url.') #: Optional field storing the consumer key for GData APIs gdata_consumer_key = db.StringProperty( verbose_name=ugettext('GData Consumer Key'), multiline=False) gdata_consumer_key.help_text = ugettext( 'OAuth Consumer Key that is provided by Google after ' 'registering your domain. This is used in authentication ' 'that is required to use particular GData APIs.') #: Optional field storing the consumer secret for GData APIs gdata_consumer_secret = db.StringProperty( verbose_name=ugettext('GData Consumer Secret'), multiline=False) gdata_consumer_secret.help_text = ugettext( 'OAuth Consumer Secret that is also provided by Google after ' 'registering your domain.') #: Secondary consumer key to be used with multiple domains secondary_gdata_consumer_key = db.StringProperty( verbose_name=ugettext('Secondary GData Consumer Key'), multiline=False) secondary_gdata_consumer_key.help_text = ugettext( 'Same with "GData Consumer Key" except this is used when ' 'not accessed through the "hostname" url. To be used with ' 'multiple domains.') #: Secondary consumer secret to be used with multiple domains secondary_gdata_consumer_secret = db.StringProperty( verbose_name=ugettext('Secondary GData Consumer Secret'), multiline=False) secondary_gdata_consumer_secret.help_text = ugettext( 'Same with "GData Consumer Secret" except this is used when ' 'not accessed through the "hostname" url. To be used with ' 'multiple domains.') #: No Reply Email address used for sending notification emails to site users noreply_email = db.EmailProperty(verbose_name=ugettext('No reply email')) noreply_email.help_text = ugettext( 'No reply email address is used for sending emails to site users. ' 'Email address provided in this field needs to be added as Developer ' 'in GAE admin console.') #: Optional field storing the url of the site logo. logo_url = db.LinkProperty( verbose_name=ugettext('Site logo')) logo_url.help_text = ugettext( 'URL of the site logo.') #: XSRF tokens are generated using a secret key. This field is not visible in #: /site/edit because it is hidden in soc.views.models.site, and is populated #: automatically by soc.logic.models.site. xsrf_secret_key = db.StringProperty(multiline=False) xsrf_secret_key.help_text = ugettext('An automatically generated random ' 'value used to prevent cross-site request forgery attacks.') #: Optional field storing the hostname hostname = db.StringProperty( verbose_name=ugettext('Hostname')) hostname.help_text = ugettext( 'URL of the hostname.') #: Reference to Program which is currently active active_program = db.ReferenceProperty( reference_class=soc.models.program.Program) active_program.help_text = ugettext( 'The Program which is currently active.')
{ "content_hash": "59b22ede57ac776f2b95cb62e839bb7c", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 93, "avg_line_length": 42.34615384615385, "alnum_prop": 0.7238873751135332, "repo_name": "adviti/melange", "id": "8de5904a81271f8d903188e1c163fa2100af995b", "size": "6115", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/soc/models/site.py", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
import asyncio import uuid import json import re from aiokafka import AIOKafkaConsumer, AIOKafkaProducer # local imports from nautilus.conventions.actions import serialize_action, hydrate_action class KafkaBroker: """ This class handles two way communication with the kafka server. Also allows for a question/answer interface served over the kafka stream. Args: consumer_pattern = None server (str): The location of the kafka stream. consumer_channel (optional, str): The channel to listen for events on. consumer_pattern (optional, regex): A regex pattern to match against the action types. The action handler is called for every matching event. If none is provided, the action handler is called for every action. producer_channel (optional, str): The default channel to user when producing events. initial_offset (optional, one of 'latest' or 'earliest'): Where to start on the event stream when run. loop (optional, ayncio.EventLoop): The event loop that the broker should run on. Example: .. code-block:: python from .kafka import KafkaBroker class ActionHandler(KafkaBroker): consumer_channel = 'myEvents' server = 'localhost:9092' async def handle_message(self, action_type, payload, **kwds): print("recieved action with type: {}".format(action_type)) print("and payload: {}".format(payload)) """ loop = None server = None consumer_channel = None producer_channel = None initial_offset = 'latest' consumer_pattern = None def __init__(self): # a dictionary to keep the question/answer correlation ids self._request_handlers = {} self._pending_outbound = {} # if there is no loop assigned if not self.loop: # use the current one self.loop = asyncio.get_event_loop() # a placeholder for the event consumer task self._consumer_task = None # create a consumer instance self._consumer = AIOKafkaConsumer( self.consumer_channel, loop=self.loop, bootstrap_servers=self.server, auto_offset_reset=self.initial_offset ) self._producer = AIOKafkaProducer(loop=self.loop, bootstrap_servers=self.server) def start(self): """ This function starts the brokers interaction with the kafka stream """ self.loop.run_until_complete(self._consumer.start()) self.loop.run_until_complete(self._producer.start()) self._consumer_task = self.loop.create_task(self._consume_event_callback()) def stop(self): """ This method stops the brokers interaction with the kafka stream """ self.loop.run_until_complete(self._consumer.stop()) self.loop.run_until_complete(self._producer.stop()) # attempt try: # to cancel the service self._consumer_task.cancel() # if there was no service except AttributeError: # keep going pass async def send(self, payload='', action_type='', channel=None, **kwds): """ This method sends a message over the kafka stream. """ # use a custom channel if one was provided channel = channel or self.producer_channel # serialize the action type for the message = serialize_action(action_type=action_type, payload=payload, **kwds) # send the message return await self._producer.send(channel, message.encode()) async def ask(self, action_type, **kwds): # create a correlation id for the question correlation_id = uuid.uuid4() # make sure its unique while correlation_id in self._request_handlers: # create a new correlation id correlation_id = uuid.uuid4() # use the integer form of the uuid correlation_id = correlation_id.int # create a future to wait on before we ask the question question_future = asyncio.Future() # register the future's callback with the request handler self._request_handlers[correlation_id] = question_future.set_result # add the entry to the outbound dictionary self._pending_outbound[correlation_id] = action_type # publish the question await self.send( correlation_id=correlation_id, action_type=action_type, **kwds ) # return the response return await question_future ## internal implementations async def handle_message(self, props, action_type=None, payload=None, **kwds): raise NotImplementedError() async def _consume_event_callback(self): # continuously loop while True: # grab the next message msg = await self._consumer.getone() # parse the message as json message = hydrate_action(msg.value.decode()) # the correlation_id associated with this message correlation_id = message.get('correlation_id') # the action type of the message action_type = message['action_type'] # if there is a consumer pattern if self.consumer_pattern: # if the action_type does not satisfy the pattern if not re.match(self.consumer_pattern, message['action_type']): # don't do anything continue # if we know how to respond to this message if correlation_id and correlation_id in self._request_handlers \ and action_type != self._pending_outbound[correlation_id]: # pass the message to the handler self._request_handlers[correlation_id](message['payload']) # remove the entry in the handler dict del self._request_handlers[correlation_id] del self._pending_outbound[correlation_id] # otherwise there was no correlation id, pass it along to the general handlers else: # build the dictionary of message properties message_props = { 'correlation_id': correlation_id } # pass it to the handler await self.handle_message( props=message_props, **message )
{ "content_hash": "04f5066844c618f3d19940624ef12ea7", "timestamp": "", "source": "github", "line_count": 201, "max_line_length": 90, "avg_line_length": 33.72636815920398, "alnum_prop": 0.5874022717214928, "repo_name": "AlecAivazis/nautilus", "id": "082c99656a1f6a28963bc3fd8f6d863d19fe51fd", "size": "6798", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "nautilus/network/events/consumers/kafka.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "57814" }, { "name": "HTML", "bytes": "260" }, { "name": "JavaScript", "bytes": "386" }, { "name": "Python", "bytes": "235605" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import migrations from django.contrib.auth.models import Group def create_group(apps, schema_editor): Group.objects.get_or_create(name='Data Entry Users') return class Migration(migrations.Migration): dependencies = [ ('whats_fresh_api', '0001_initial'), ('auth', '0001_initial') ] operations = [ migrations.RunPython(create_group) ]
{ "content_hash": "fa2dce1430556cd94f48407f3c7138d8", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 56, "avg_line_length": 21.142857142857142, "alnum_prop": 0.6711711711711712, "repo_name": "osu-cass/whats-fresh-api", "id": "671027f8974886a726c7963fc9c2052ec63225ed", "size": "468", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "whats_fresh/whats_fresh_api/migrations/0002_auto_20141120_2246.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "12322" }, { "name": "HTML", "bytes": "51855" }, { "name": "JavaScript", "bytes": "18395" }, { "name": "Python", "bytes": "330478" }, { "name": "Ruby", "bytes": "867" } ], "symlink_target": "" }
"""OnscreenText module: contains the OnscreenText class""" __all__ = ['OnscreenText', 'Plain', 'ScreenTitle', 'ScreenPrompt', 'NameConfirm', 'BlackOnWhite'] from pandac.PandaModules import * import DirectGuiGlobals as DGG from direct.showbase.DirectObject import DirectObject import string,types ## These are the styles of text we might commonly see. They set the ## overall appearance of the text according to one of a number of ## pre-canned styles. You can further customize the appearance of the ## text by specifying individual parameters as well. Plain = 1 ScreenTitle = 2 ScreenPrompt = 3 NameConfirm = 4 BlackOnWhite = 5 class OnscreenText(DirectObject, NodePath): def __init__(self, text = '', style = Plain, pos = (0, 0), roll = 0, scale = None, fg = None, bg = None, shadow = None, shadowOffset = (0.04, 0.04), frame = None, align = None, wordwrap = None, drawOrder = None, decal = 0, font = None, parent = None, sort = 0, mayChange = True): """ Make a text node from string, put it into the 2d sg and set it up with all the indicated parameters. The parameters are as follows: text: the actual text to display. This may be omitted and specified later via setText() if you don't have it available, but it is better to specify it up front. style: one of the pre-canned style parameters defined at the head of this file. This sets up the default values for many of the remaining parameters if they are unspecified; however, a parameter may still be specified to explicitly set it, overriding the pre-canned style. pos: the x, y position of the text on the screen. scale: the size of the text. This may either be a single float (and it will usually be a small number like 0.07) or it may be a 2-tuple of floats, specifying a different x, y scale. fg: the (r, g, b, a) foreground color of the text. This is normally a 4-tuple of floats or ints. bg: the (r, g, b, a) background color of the text. If the fourth value, a, is nonzero, a card is created to place behind the text and set to the given color. shadow: the (r, g, b, a) color of the shadow behind the text. If the fourth value, a, is nonzero, a little drop shadow is created and placed behind the text. frame: the (r, g, b, a) color of the frame drawn around the text. If the fourth value, a, is nonzero, a frame is created around the text. align: one of TextNode.ALeft, TextNode.ARight, or TextNode.ACenter. wordwrap: either the width to wordwrap the text at, or None to specify no automatic word wrapping. drawOrder: the drawing order of this text with respect to all other things in the 'fixed' bin within render2d. The text will actually use drawOrder through drawOrder + 2. decal: if this is True, the text is decalled onto its background card. Useful when the text will be parented into the 3-D scene graph. font: the font to use for the text. parent: the NodePath to parent the text to initially. mayChange: pass true if the text or its properties may need to be changed at runtime, false if it is static once created (which leads to better memory optimization). """ if parent == None: parent = aspect2d # make a text node textNode = TextNode('') self.textNode = textNode # We ARE a node path. Initially, we're an empty node path. NodePath.__init__(self) # Choose the default parameters according to the selected # style. if style == Plain: scale = scale or 0.07 fg = fg or (0, 0, 0, 1) bg = bg or (0, 0, 0, 0) shadow = shadow or (0, 0, 0, 0) frame = frame or (0, 0, 0, 0) if align == None: align = TextNode.ACenter elif style == ScreenTitle: scale = scale or 0.15 fg = fg or (1, 0.2, 0.2, 1) bg = bg or (0, 0, 0, 0) shadow = shadow or (0, 0, 0, 1) frame = frame or (0, 0, 0, 0) if align == None: align = TextNode.ACenter elif style == ScreenPrompt: scale = scale or 0.1 fg = fg or (1, 1, 0, 1) bg = bg or (0, 0, 0, 0) shadow = shadow or (0, 0, 0, 1) frame = frame or (0, 0, 0, 0) if align == None: align = TextNode.ACenter elif style == NameConfirm: scale = scale or 0.1 fg = fg or (0, 1, 0, 1) bg = bg or (0, 0, 0, 0) shadow = shadow or (0, 0, 0, 0) frame = frame or (0, 0, 0, 0) if align == None: align = TextNode.ACenter elif style == BlackOnWhite: scale = scale or 0.1 fg = fg or (0, 0, 0, 1) bg = bg or (1, 1, 1, 1) shadow = shadow or (0, 0, 0, 0) frame = frame or (0, 0, 0, 0) if align == None: align = TextNode.ACenter else: raise ValueError if not isinstance(scale, types.TupleType): # If the scale is already a tuple, it's a 2-d (x, y) scale. # Otherwise, it's a uniform scale--make it a tuple. scale = (scale, scale) # Save some of the parameters for posterity. self.scale = scale self.pos = pos self.roll = roll self.wordwrap = wordwrap if decal: textNode.setCardDecal(1) if font == None: font = DGG.getDefaultFont() textNode.setFont(font) textNode.setTextColor(fg[0], fg[1], fg[2], fg[3]) textNode.setAlign(align) if wordwrap: textNode.setWordwrap(wordwrap) if bg[3] != 0: # If we have a background color, create a card. textNode.setCardColor(bg[0], bg[1], bg[2], bg[3]) textNode.setCardAsMargin(0.1, 0.1, 0.1, 0.1) if shadow[3] != 0: # If we have a shadow color, create a shadow. # Can't use the *shadow interface because it might be a VBase4. #textNode.setShadowColor(*shadow) textNode.setShadowColor(shadow[0], shadow[1], shadow[2], shadow[3]) textNode.setShadow(*shadowOffset) if frame[3] != 0: # If we have a frame color, create a frame. textNode.setFrameColor(frame[0], frame[1], frame[2], frame[3]) textNode.setFrameAsMargin(0.1, 0.1, 0.1, 0.1) # Create a transform for the text for our scale and position. # We'd rather do it here, on the text itself, rather than on # our NodePath, so we have one fewer transforms in the scene # graph. self.updateTransformMat() if drawOrder != None: textNode.setBin('fixed') textNode.setDrawOrder(drawOrder) self.setText(text) if not text: # If we don't have any text, assume we'll be changing it later. self.mayChange = 1 else: self.mayChange = mayChange # Ok, now update the node. if not self.mayChange: # If we aren't going to change the text later, we can # throw away the TextNode. self.textNode = textNode.generate() self.isClean = 0 # Set ourselves up as the NodePath that points to this node. self.assign(parent.attachNewNode(self.textNode, sort)) def cleanup(self): self.textNode = None if self.isClean == 0: self.isClean = 1 self.removeNode() def destroy(self): self.cleanup() def freeze(self): pass def thaw(self): pass # Allow changing of several of the parameters after the text has # been created. These should be used with caution; it is better # to set all the parameters up front. These functions are # primarily intended for interactive placement of the initial # text, and for those rare occasions when you actually want to # change a text's property after it has been created. def setDecal(self, decal): self.textNode.setCardDecal(decal) def getDecal(self): return self.textNode.getCardDecal() def setFont(self, font): self.textNode.setFont(font) def getFont(self): return self.textNode.getFont() def clearText(self): self.textNode.clearText() def setText(self, text): self.unicodeText = isinstance(text, types.UnicodeType) if self.unicodeText: self.textNode.setWtext(text) else: self.textNode.setText(text) def appendText(self, text): if isinstance(text, types.UnicodeType): self.unicodeText = 1 if self.unicodeText: self.textNode.appendWtext(text) else: self.textNode.appendText(text) def getText(self): if self.unicodeText: return self.textNode.getWtext() else: return self.textNode.getText() def setX(self, x): self.setPos(x, self.pos[1]) def setY(self, y): self.setPos(self.pos[0], y) def setPos(self, x, y): """setPos(self, float, float) Position the onscreen text in 2d screen space """ self.pos = (x, y) self.updateTransformMat() def getPos(self): return self.pos def setRoll(self, roll): """setRoll(self, float) Rotate the onscreen text around the screen's normal """ self.roll = roll self.updateTransformMat() def getRoll(self): return self.roll def setScale(self, sx, sy = None): """setScale(self, float, float) Scale the text in 2d space. You may specify either a single uniform scale, or two scales, or a tuple of two scales. """ if sy == None: if isinstance(sx, types.TupleType): self.scale = sx else: self.scale = (sx, sx) else: self.scale = (sx, sy) self.updateTransformMat() def updateTransformMat(self): assert(isinstance(self.textNode, TextNode)) mat = ( Mat4.scaleMat(Vec3.rfu(self.scale[0], 1, self.scale[1])) * Mat4.rotateMat(self.roll, Vec3.back()) * Mat4.translateMat(Point3.rfu(self.pos[0], 0, self.pos[1])) ) self.textNode.setTransform(mat) def getScale(self): return self.scale def setWordwrap(self, wordwrap): self.wordwrap = wordwrap if wordwrap: self.textNode.setWordwrap(wordwrap) else: self.textNode.clearWordwrap() def getWordwrap(self): return self.wordwrap def setFg(self, fg): self.textNode.setTextColor(fg[0], fg[1], fg[2], fg[3]) def setBg(self, bg): if bg[3] != 0: # If we have a background color, create a card. self.textNode.setCardColor(bg[0], bg[1], bg[2], bg[3]) self.textNode.setCardAsMargin(0.1, 0.1, 0.1, 0.1) else: # Otherwise, remove the card. self.textNode.clearCard() def setShadow(self, shadow): if shadow[3] != 0: # If we have a shadow color, create a shadow. self.textNode.setShadowColor(shadow[0], shadow[1], shadow[2], shadow[3]) self.textNode.setShadow(0.04, 0.04) else: # Otherwise, remove the shadow. self.textNode.clearShadow() def setFrame(self, frame): if frame[3] != 0: # If we have a frame color, create a frame. self.textNode.setFrameColor(frame[0], frame[1], frame[2], frame[3]) self.textNode.setFrameAsMargin(0.1, 0.1, 0.1, 0.1) else: # Otherwise, remove the frame. self.textNode.clearFrame() def configure(self, option=None, **kw): # These is for compatibility with DirectGui functions if not self.mayChange: print 'OnscreenText.configure: mayChange == 0' return for option, value in kw.items(): # Use option string to access setter function try: setter = getattr(self, 'set' + option[0].upper() + option[1:]) if setter == self.setPos: setter(value[0], value[1]) else: setter(value) except AttributeError: print 'OnscreenText.configure: invalid option:', option # Allow index style references def __setitem__(self, key, value): apply(self.configure, (), {key: value}) def cget(self, option): # Get current configuration setting. # This is for compatibility with DirectGui functions getter = getattr(self, 'get' + option[0].upper() + option[1:]) return getter() def setAlign(self, align): self.textNode.setAlign(align) # Allow index style refererences __getitem__ = cget
{ "content_hash": "d348f39615782611036e713d252a77ff", "timestamp": "", "source": "github", "line_count": 407, "max_line_length": 97, "avg_line_length": 33.85012285012285, "alnum_prop": 0.5553458662989039, "repo_name": "jjkoletar/panda3d", "id": "ed212d264322abf84f31b65f51b0bfd96d70af3a", "size": "13777", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "direct/src/gui/OnscreenText.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "6435690" }, { "name": "C++", "bytes": "31389522" }, { "name": "Emacs Lisp", "bytes": "166274" }, { "name": "HTML", "bytes": "8081" }, { "name": "Java", "bytes": "3777" }, { "name": "JavaScript", "bytes": "7003" }, { "name": "Logos", "bytes": "5504" }, { "name": "NSIS", "bytes": "91955" }, { "name": "Nemerle", "bytes": "7733" }, { "name": "Objective-C", "bytes": "12290" }, { "name": "Objective-C++", "bytes": "298197" }, { "name": "Pascal", "bytes": "53710" }, { "name": "Perl", "bytes": "206982" }, { "name": "Perl6", "bytes": "30612" }, { "name": "Puppet", "bytes": "752377" }, { "name": "Python", "bytes": "5860103" }, { "name": "Rebol", "bytes": "421" }, { "name": "Shell", "bytes": "59984" }, { "name": "Visual Basic", "bytes": "136" } ], "symlink_target": "" }
from oslo_log import log import re from vitrage.evaluator.actions.recipes.execute_mistral import INPUT from vitrage.evaluator.actions.recipes.execute_mistral import WORKFLOW from vitrage.evaluator.template_fields import TemplateFields from vitrage.evaluator.template_functions.function_resolver import is_function from vitrage.evaluator.template_validation.content.base import \ ActionValidator from vitrage.evaluator.template_validation.content.base import \ get_content_correct_result from vitrage.evaluator.template_validation.content.base import \ get_content_fault_result from vitrage.evaluator.template_validation.content.base import \ get_content_warning_result from vitrage.evaluator.template_validation.status_messages import status_msgs LOG = log.getLogger(__name__) class ExecuteMistralValidator(ActionValidator): @staticmethod def validate(action, definitions_index): properties = action[TemplateFields.PROPERTIES] if WORKFLOW not in properties or not properties[WORKFLOW]: LOG.error('%s status code: %s' % (status_msgs[133], 133)) return get_content_fault_result(133) for prop in properties: if prop not in {WORKFLOW, INPUT}: LOG.error('%s status code: %s' % (status_msgs[136], 136)) return get_content_fault_result(136) inputs = properties[INPUT] if INPUT in properties else {} for key, value in inputs.items(): if re.findall('[(),]', value) and not is_function(value): LOG.error('%s status code: %s' % (status_msgs[138], 138)) return get_content_warning_result(138) return get_content_correct_result()
{ "content_hash": "a9d9717904cc121432aab0531c7cf0ec", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 78, "avg_line_length": 38.90909090909091, "alnum_prop": 0.7091121495327103, "repo_name": "openstack/vitrage", "id": "f77525f8331d3f422446d4a6bd164b22770b5b4a", "size": "2285", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vitrage/evaluator/template_validation/content/v2/execute_mistral_validator.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "26541" }, { "name": "Mako", "bytes": "896" }, { "name": "Python", "bytes": "2074427" }, { "name": "Shell", "bytes": "17668" } ], "symlink_target": "" }
""" Submodule: defaluts Default parameters for all other submodules, including scf, fci, be and fbe """ from .default_params import *
{ "content_hash": "73d0d8ca3392ddbe0c6a5a3205002a1d", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 54, "avg_line_length": 15.666666666666666, "alnum_prop": 0.7163120567375887, "repo_name": "hongzhouye/frankenstein", "id": "b949607fc18ab7d7691cbbdfc440a6ce89ee75ae", "size": "141", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "default_params/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "230081" }, { "name": "CMake", "bytes": "1825" }, { "name": "Python", "bytes": "1127607" } ], "symlink_target": "" }
from utils import * import sys def format(propName, propList, version): return '// All code points with the `' + propName + '` derived core property as per Unicode v' + version + ':\n[\n\t' + ',\n\t'.join(propList) + '\n];' def main(sourceFile, version): dictionary = parseScriptsOrProps(sourceFile) for item in sorted(dictionary.items()): prop = item[0] codePoints = map(hexify, item[1]) result = format(prop, codePoints, version) writeFile(version + '/properties/' + prop + '-code-points.js', result) if __name__ == '__main__': main(sys.argv[1], sys.argv[2])
{ "content_hash": "7c966db163447cb8ec778e411d22cb7e", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 152, "avg_line_length": 36, "alnum_prop": 0.6666666666666666, "repo_name": "mathiasbynens/unicode-data", "id": "ee32b1dd48212ce96a0556c7da15714dc1c4a1bc", "size": "595", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scripts/derived-core-property-code-points.py", "mode": "33261", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "2008063149" }, { "name": "Python", "bytes": "15187" }, { "name": "Shell", "bytes": "8147" } ], "symlink_target": "" }
from bs4 import BeautifulSoup import os import re import requests import subprocess import sys import tabulate class OutColors: DEFAULT = '\033[0m' BW = '\033[1m' LG = '\033[0m\033[32m' LR = '\033[0m\033[31m' SEEDER = '\033[1m\033[32m' LEECHER = '\033[1m\033[31m' def helper(): print(OutColors.DEFAULT + "\nSearch torrents from Kickass.to ;)") def select_torrent(): torrent = input('>> ') return torrent def download_torrent(url): fname = os.getcwd() + '/' + url.split('title=')[-1] + '.torrent' # http://stackoverflow.com/a/14114741/1302018 try: r = requests.get(url, stream=True) with open(fname, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) f.flush() except requests.exceptions.RequestException as e: print('\n' + OutColors.LR + str(e)) sys.exit(1) return fname def aksearch(): helper() tmp_url = 'http://kickass.to/usearch/' query = input('Type query: ') url = tmp_url + query + '/' try: cont = requests.get(url) except requests.exceptions.RequestException as e: raise SystemExit('\n' + OutColors.LR + str(e)) # check if no torrents found if not re.findall(r'Download torrent file', str(cont.content)): print('Torrents found: 0') aksearch() else: soup = BeautifulSoup(cont.content) # to use by age, seeders, and leechers # sample: # 700.46 MB # 5 # 2 years # 1852 # 130 al = [s.get_text() for s in soup.find_all('td', {'class':'center'})] href = [a.get('href') for a in soup.find_all('a', {'title':'Download torrent file'})] size = [t.get_text() for t in soup.find_all('td', {'class':'nobr'}) ] title = [ti.get_text() for ti in soup.find_all('a', {'class':'cellMainLink'})] age = al[2::5] seeders = al[3::5] leechers = al[4::5] # for table printing table = [[OutColors.BW + str(i+1) + OutColors.DEFAULT if (i+1) % 2 == 0 else i+1, OutColors.BW + title[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else title[i], OutColors.BW + size[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else size[i], OutColors.BW + age[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else age[i], OutColors.SEEDER + seeders[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else OutColors.LG + seeders[i] + OutColors.DEFAULT, OutColors.LEECHER + leechers[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else OutColors.LR + leechers[i] + OutColors.DEFAULT] for i in range(len(href))] print() print(tabulate.tabulate(table, headers=['No', 'Title', 'Size', 'Age', 'Seeders', 'Leechers'])) # torrent selection if len(href) == 1: torrent = 1 else: print('\nSelect torrent: [ 1 - ' + str(len(href)) + ' ] or [ M ] to go back to main menu or [ Q ] to quit') torrent = select_torrent() if torrent == 'Q' or torrent == 'q': sys.exit(0) elif torrent == 'M' or torrent == 'm': aksearch() else: if int(torrent) <= 0 or int(torrent) > len(href): print('Use eyeglasses...') else: print('Download >> ' + href[int(torrent)-1].split('title=')[-1] + '.torrent') fname = download_torrent(href[int(torrent)-1]) subprocess.Popen(['xdg-open', fname], stdout=subprocess.PIPE, stderr=subprocess.PIPE) aksearch() if __name__ == '__main__': try: aksearch() except KeyboardInterrupt: print('\nHuha!')
{ "content_hash": "7e63690a0a98aecd1f0f524e9100b863", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 169, "avg_line_length": 33.675438596491226, "alnum_prop": 0.5319093513935921, "repo_name": "ActiveState/code", "id": "7989f674cb7331c4bc61870230c5b5f81a767950", "size": "4324", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "recipes/Python/578940_asskickpy/recipe-578940.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "35894" }, { "name": "C", "bytes": "56048" }, { "name": "C++", "bytes": "90880" }, { "name": "HTML", "bytes": "11656" }, { "name": "Java", "bytes": "57468" }, { "name": "JavaScript", "bytes": "181218" }, { "name": "PHP", "bytes": "250144" }, { "name": "Perl", "bytes": "37296" }, { "name": "Perl 6", "bytes": "9914" }, { "name": "Python", "bytes": "17387779" }, { "name": "Ruby", "bytes": "40233" }, { "name": "Shell", "bytes": "190732" }, { "name": "Tcl", "bytes": "674650" } ], "symlink_target": "" }
import base64 import datetime import decimal import unittest class Test_not_null(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _not_null return _not_null(value, field) def test_w_none_nullable(self): self.assertFalse(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): self.assertTrue(self._call_fut(None, _Field("REQUIRED"))) def test_w_value(self): self.assertTrue(self._call_fut(object(), object())) class Test_int_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _int_from_json return _int_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): coerced = self._call_fut("42", object()) self.assertEqual(coerced, 42) def test_w_float_value(self): coerced = self._call_fut(42, object()) self.assertEqual(coerced, 42) class Test_float_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _float_from_json return _float_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): coerced = self._call_fut("3.1415", object()) self.assertEqual(coerced, 3.1415) def test_w_float_value(self): coerced = self._call_fut(3.1415, object()) self.assertEqual(coerced, 3.1415) class Test_decimal_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _decimal_from_json return _decimal_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): coerced = self._call_fut("3.1415", object()) self.assertEqual(coerced, decimal.Decimal("3.1415")) def test_w_float_value(self): coerced = self._call_fut(3.1415, object()) # There is no exact float representation of 3.1415. self.assertEqual(coerced, decimal.Decimal(3.1415)) class Test_bool_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _bool_from_json return _bool_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(AttributeError): self._call_fut(None, _Field("REQUIRED")) def test_w_value_t(self): coerced = self._call_fut("T", object()) self.assertTrue(coerced) def test_w_value_true(self): coerced = self._call_fut("True", object()) self.assertTrue(coerced) def test_w_value_1(self): coerced = self._call_fut("1", object()) self.assertTrue(coerced) def test_w_value_other(self): coerced = self._call_fut("f", object()) self.assertFalse(coerced) class Test_string_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _string_from_json return _string_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): self.assertIsNone(self._call_fut(None, _Field("REQUIRED"))) def test_w_string_value(self): coerced = self._call_fut("Wonderful!", object()) self.assertEqual(coerced, "Wonderful!") class Test_bytes_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _bytes_from_json return _bytes_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_base64_encoded_bytes(self): expected = b"Wonderful!" encoded = base64.standard_b64encode(expected) coerced = self._call_fut(encoded, object()) self.assertEqual(coerced, expected) def test_w_base64_encoded_text(self): expected = b"Wonderful!" encoded = base64.standard_b64encode(expected).decode("ascii") coerced = self._call_fut(encoded, object()) self.assertEqual(coerced, expected) class Test_timestamp_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _timestamp_from_json return _timestamp_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): from google.cloud._helpers import _EPOCH coerced = self._call_fut("1.234567", object()) self.assertEqual( coerced, _EPOCH + datetime.timedelta(seconds=1, microseconds=234567) ) def test_w_float_value(self): from google.cloud._helpers import _EPOCH coerced = self._call_fut(1.234567, object()) self.assertEqual( coerced, _EPOCH + datetime.timedelta(seconds=1, microseconds=234567) ) class Test_timestamp_query_param_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery import _helpers return _helpers._timestamp_query_param_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_timestamp_valid(self): from google.cloud._helpers import UTC samples = [ ( "2016-12-20 15:58:27.339328+00:00", datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), ), ( "2016-12-20 15:58:27+00:00", datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), ), ( "2016-12-20T15:58:27.339328+00:00", datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), ), ( "2016-12-20T15:58:27+00:00", datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), ), ( "2016-12-20 15:58:27.339328Z", datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), ), ( "2016-12-20 15:58:27Z", datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), ), ( "2016-12-20T15:58:27.339328Z", datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), ), ( "2016-12-20T15:58:27Z", datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), ), ] for timestamp_str, expected_result in samples: self.assertEqual( self._call_fut(timestamp_str, _Field("NULLABLE")), expected_result ) def test_w_timestamp_invalid(self): with self.assertRaises(ValueError): self._call_fut("definitely-not-a-timestamp", _Field("NULLABLE")) class Test_datetime_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _datetime_from_json return _datetime_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): coerced = self._call_fut("2016-12-02T18:51:33", object()) self.assertEqual(coerced, datetime.datetime(2016, 12, 2, 18, 51, 33)) def test_w_microseconds(self): coerced = self._call_fut("2015-05-22T10:11:12.987654", object()) self.assertEqual(coerced, datetime.datetime(2015, 5, 22, 10, 11, 12, 987654)) class Test_date_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _date_from_json return _date_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): coerced = self._call_fut("1987-09-22", object()) self.assertEqual(coerced, datetime.date(1987, 9, 22)) class Test_time_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _time_from_json return _time_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): coerced = self._call_fut("12:12:27", object()) self.assertEqual(coerced, datetime.time(12, 12, 27)) def test_w_subsecond_string_value(self): coerced = self._call_fut("12:12:27.123456", object()) self.assertEqual(coerced, datetime.time(12, 12, 27, 123456)) def test_w_bogus_string_value(self): with self.assertRaises(ValueError): self._call_fut("12:12:27.123", object()) class Test_record_from_json(unittest.TestCase): def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _record_from_json return _record_from_json(value, field) def test_w_none_nullable(self): self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): self._call_fut(None, _Field("REQUIRED")) def test_w_nullable_subfield_none(self): subfield = _Field("NULLABLE", "age", "INTEGER") field = _Field("REQUIRED", fields=[subfield]) value = {"f": [{"v": None}]} coerced = self._call_fut(value, field) self.assertEqual(coerced, {"age": None}) def test_w_scalar_subfield(self): subfield = _Field("REQUIRED", "age", "INTEGER") field = _Field("REQUIRED", fields=[subfield]) value = {"f": [{"v": 42}]} coerced = self._call_fut(value, field) self.assertEqual(coerced, {"age": 42}) def test_w_scalar_subfield_geography(self): subfield = _Field("REQUIRED", "geo", "GEOGRAPHY") field = _Field("REQUIRED", fields=[subfield]) value = {"f": [{"v": "POINT(1, 2)"}]} coerced = self._call_fut(value, field) self.assertEqual(coerced, {"geo": "POINT(1, 2)"}) def test_w_repeated_subfield(self): subfield = _Field("REPEATED", "color", "STRING") field = _Field("REQUIRED", fields=[subfield]) value = {"f": [{"v": [{"v": "red"}, {"v": "yellow"}, {"v": "blue"}]}]} coerced = self._call_fut(value, field) self.assertEqual(coerced, {"color": ["red", "yellow", "blue"]}) def test_w_record_subfield(self): full_name = _Field("REQUIRED", "full_name", "STRING") area_code = _Field("REQUIRED", "area_code", "STRING") local_number = _Field("REQUIRED", "local_number", "STRING") rank = _Field("REQUIRED", "rank", "INTEGER") phone = _Field( "NULLABLE", "phone", "RECORD", fields=[area_code, local_number, rank] ) person = _Field("REQUIRED", "person", "RECORD", fields=[full_name, phone]) value = { "f": [ {"v": "Phred Phlyntstone"}, {"v": {"f": [{"v": "800"}, {"v": "555-1212"}, {"v": 1}]}}, ] } expected = { "full_name": "Phred Phlyntstone", "phone": {"area_code": "800", "local_number": "555-1212", "rank": 1}, } coerced = self._call_fut(value, person) self.assertEqual(coerced, expected) class Test_field_to_index_mapping(unittest.TestCase): def _call_fut(self, schema): from google.cloud.bigquery._helpers import _field_to_index_mapping return _field_to_index_mapping(schema) def test_w_empty_schema(self): self.assertEqual(self._call_fut([]), {}) def test_w_non_empty_schema(self): schema = [ _Field("REPEATED", "first", "INTEGER"), _Field("REQUIRED", "second", "INTEGER"), _Field("REPEATED", "third", "INTEGER"), ] self.assertEqual(self._call_fut(schema), {"first": 0, "second": 1, "third": 2}) class Test_row_tuple_from_json(unittest.TestCase): def _call_fut(self, row, schema): from google.cloud.bigquery._helpers import _row_tuple_from_json return _row_tuple_from_json(row, schema) def test_w_single_scalar_column(self): # SELECT 1 AS col col = _Field("REQUIRED", "col", "INTEGER") row = {u"f": [{u"v": u"1"}]} self.assertEqual(self._call_fut(row, schema=[col]), (1,)) def test_w_single_scalar_geography_column(self): # SELECT 1 AS col col = _Field("REQUIRED", "geo", "GEOGRAPHY") row = {u"f": [{u"v": u"POINT(1, 2)"}]} self.assertEqual(self._call_fut(row, schema=[col]), ("POINT(1, 2)",)) def test_w_single_struct_column(self): # SELECT (1, 2) AS col sub_1 = _Field("REQUIRED", "sub_1", "INTEGER") sub_2 = _Field("REQUIRED", "sub_2", "INTEGER") col = _Field("REQUIRED", "col", "RECORD", fields=[sub_1, sub_2]) row = {u"f": [{u"v": {u"f": [{u"v": u"1"}, {u"v": u"2"}]}}]} self.assertEqual(self._call_fut(row, schema=[col]), ({"sub_1": 1, "sub_2": 2},)) def test_w_single_array_column(self): # SELECT [1, 2, 3] as col col = _Field("REPEATED", "col", "INTEGER") row = {u"f": [{u"v": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}]} self.assertEqual(self._call_fut(row, schema=[col]), ([1, 2, 3],)) def test_w_struct_w_nested_array_column(self): # SELECT ([1, 2], 3, [4, 5]) as col first = _Field("REPEATED", "first", "INTEGER") second = _Field("REQUIRED", "second", "INTEGER") third = _Field("REPEATED", "third", "INTEGER") col = _Field("REQUIRED", "col", "RECORD", fields=[first, second, third]) row = { u"f": [ { u"v": { u"f": [ {u"v": [{u"v": u"1"}, {u"v": u"2"}]}, {u"v": u"3"}, {u"v": [{u"v": u"4"}, {u"v": u"5"}]}, ] } } ] } self.assertEqual( self._call_fut(row, schema=[col]), ({u"first": [1, 2], u"second": 3, u"third": [4, 5]},), ) def test_w_array_of_struct(self): # SELECT [(1, 2, 3), (4, 5, 6)] as col first = _Field("REQUIRED", "first", "INTEGER") second = _Field("REQUIRED", "second", "INTEGER") third = _Field("REQUIRED", "third", "INTEGER") col = _Field("REPEATED", "col", "RECORD", fields=[first, second, third]) row = { u"f": [ { u"v": [ {u"v": {u"f": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}}, {u"v": {u"f": [{u"v": u"4"}, {u"v": u"5"}, {u"v": u"6"}]}}, ] } ] } self.assertEqual( self._call_fut(row, schema=[col]), ( [ {u"first": 1, u"second": 2, u"third": 3}, {u"first": 4, u"second": 5, u"third": 6}, ], ), ) def test_w_array_of_struct_w_array(self): # SELECT [([1, 2, 3], 4), ([5, 6], 7)] first = _Field("REPEATED", "first", "INTEGER") second = _Field("REQUIRED", "second", "INTEGER") col = _Field("REPEATED", "col", "RECORD", fields=[first, second]) row = { u"f": [ { u"v": [ { u"v": { u"f": [ {u"v": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}, {u"v": u"4"}, ] } }, { u"v": { u"f": [ {u"v": [{u"v": u"5"}, {u"v": u"6"}]}, {u"v": u"7"}, ] } }, ] } ] } self.assertEqual( self._call_fut(row, schema=[col]), ([{u"first": [1, 2, 3], u"second": 4}, {u"first": [5, 6], u"second": 7}],), ) class Test_rows_from_json(unittest.TestCase): def _call_fut(self, rows, schema): from google.cloud.bigquery._helpers import _rows_from_json return _rows_from_json(rows, schema) def test_w_record_subfield(self): from google.cloud.bigquery.table import Row full_name = _Field("REQUIRED", "full_name", "STRING") area_code = _Field("REQUIRED", "area_code", "STRING") local_number = _Field("REQUIRED", "local_number", "STRING") rank = _Field("REQUIRED", "rank", "INTEGER") phone = _Field( "NULLABLE", "phone", "RECORD", fields=[area_code, local_number, rank] ) color = _Field("REPEATED", "color", "STRING") schema = [full_name, phone, color] rows = [ { "f": [ {"v": "Phred Phlyntstone"}, {"v": {"f": [{"v": "800"}, {"v": "555-1212"}, {"v": 1}]}}, {"v": [{"v": "orange"}, {"v": "black"}]}, ] }, { "f": [ {"v": "Bharney Rhubble"}, {"v": {"f": [{"v": "877"}, {"v": "768-5309"}, {"v": 2}]}}, {"v": [{"v": "brown"}]}, ] }, {"f": [{"v": "Wylma Phlyntstone"}, {"v": None}, {"v": []}]}, ] phred_phone = {"area_code": "800", "local_number": "555-1212", "rank": 1} bharney_phone = {"area_code": "877", "local_number": "768-5309", "rank": 2} f2i = {"full_name": 0, "phone": 1, "color": 2} expected = [ Row(("Phred Phlyntstone", phred_phone, ["orange", "black"]), f2i), Row(("Bharney Rhubble", bharney_phone, ["brown"]), f2i), Row(("Wylma Phlyntstone", None, []), f2i), ] coerced = self._call_fut(rows, schema) self.assertEqual(coerced, expected) def test_w_int64_float64_bool(self): from google.cloud.bigquery.table import Row # "Standard" SQL dialect uses 'INT64', 'FLOAT64', 'BOOL'. candidate = _Field("REQUIRED", "candidate", "STRING") votes = _Field("REQUIRED", "votes", "INT64") percentage = _Field("REQUIRED", "percentage", "FLOAT64") incumbent = _Field("REQUIRED", "incumbent", "BOOL") schema = [candidate, votes, percentage, incumbent] rows = [ {"f": [{"v": "Phred Phlyntstone"}, {"v": 8}, {"v": 0.25}, {"v": "true"}]}, {"f": [{"v": "Bharney Rhubble"}, {"v": 4}, {"v": 0.125}, {"v": "false"}]}, { "f": [ {"v": "Wylma Phlyntstone"}, {"v": 20}, {"v": 0.625}, {"v": "false"}, ] }, ] f2i = {"candidate": 0, "votes": 1, "percentage": 2, "incumbent": 3} expected = [ Row(("Phred Phlyntstone", 8, 0.25, True), f2i), Row(("Bharney Rhubble", 4, 0.125, False), f2i), Row(("Wylma Phlyntstone", 20, 0.625, False), f2i), ] coerced = self._call_fut(rows, schema) self.assertEqual(coerced, expected) class Test_int_to_json(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _int_to_json return _int_to_json(value) def test_w_int(self): self.assertEqual(self._call_fut(123), "123") def test_w_string(self): self.assertEqual(self._call_fut("123"), "123") class Test_float_to_json(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _float_to_json return _float_to_json(value) def test_w_float(self): self.assertEqual(self._call_fut(1.23), 1.23) class Test_decimal_to_json(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _decimal_to_json return _decimal_to_json(value) def test_w_float(self): self.assertEqual(self._call_fut(1.23), 1.23) def test_w_string(self): self.assertEqual(self._call_fut("1.23"), "1.23") def test_w_decimal(self): self.assertEqual(self._call_fut(decimal.Decimal("1.23")), "1.23") class Test_bool_to_json(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _bool_to_json return _bool_to_json(value) def test_w_true(self): self.assertEqual(self._call_fut(True), "true") def test_w_false(self): self.assertEqual(self._call_fut(False), "false") def test_w_string(self): self.assertEqual(self._call_fut("false"), "false") class Test_bytes_to_json(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _bytes_to_json return _bytes_to_json(value) def test_w_non_bytes(self): non_bytes = object() self.assertIs(self._call_fut(non_bytes), non_bytes) def test_w_bytes(self): source = b"source" expected = u"c291cmNl" converted = self._call_fut(source) self.assertEqual(converted, expected) class Test_timestamp_to_json_parameter(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _timestamp_to_json_parameter return _timestamp_to_json_parameter(value) def test_w_float(self): self.assertEqual(self._call_fut(1.234567), 1.234567) def test_w_string(self): ZULU = "2016-12-20 15:58:27.339328+00:00" self.assertEqual(self._call_fut(ZULU), ZULU) def test_w_datetime_wo_zone(self): ZULU = "2016-12-20 15:58:27.339328+00:00" when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328) self.assertEqual(self._call_fut(when), ZULU) def test_w_datetime_w_non_utc_zone(self): class _Zone(datetime.tzinfo): def utcoffset(self, _): return datetime.timedelta(minutes=-240) ZULU = "2016-12-20 19:58:27.339328+00:00" when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=_Zone()) self.assertEqual(self._call_fut(when), ZULU) def test_w_datetime_w_utc_zone(self): from google.cloud._helpers import UTC ZULU = "2016-12-20 15:58:27.339328+00:00" when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) self.assertEqual(self._call_fut(when), ZULU) class Test_timestamp_to_json_row(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _timestamp_to_json_row return _timestamp_to_json_row(value) def test_w_float(self): self.assertEqual(self._call_fut(1.234567), 1.234567) def test_w_string(self): ZULU = "2016-12-20 15:58:27.339328+00:00" self.assertEqual(self._call_fut(ZULU), ZULU) def test_w_datetime(self): from google.cloud._helpers import _microseconds_from_datetime when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328) self.assertEqual(self._call_fut(when), _microseconds_from_datetime(when) / 1e6) class Test_datetime_to_json(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _datetime_to_json return _datetime_to_json(value) def test_w_string(self): RFC3339 = "2016-12-03T14:14:51Z" self.assertEqual(self._call_fut(RFC3339), RFC3339) def test_w_datetime(self): from google.cloud._helpers import UTC when = datetime.datetime(2016, 12, 3, 14, 11, 27, 123456, tzinfo=UTC) self.assertEqual(self._call_fut(when), "2016-12-03T14:11:27.123456") class Test_date_to_json(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _date_to_json return _date_to_json(value) def test_w_string(self): RFC3339 = "2016-12-03" self.assertEqual(self._call_fut(RFC3339), RFC3339) def test_w_datetime(self): when = datetime.date(2016, 12, 3) self.assertEqual(self._call_fut(when), "2016-12-03") class Test_time_to_json(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _time_to_json return _time_to_json(value) def test_w_string(self): RFC3339 = "12:13:41" self.assertEqual(self._call_fut(RFC3339), RFC3339) def test_w_datetime(self): when = datetime.time(12, 13, 41) self.assertEqual(self._call_fut(when), "12:13:41") class Test_snake_to_camel_case(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _snake_to_camel_case return _snake_to_camel_case(value) def test_w_snake_case_string(self): self.assertEqual(self._call_fut("friendly_name"), "friendlyName") def test_w_camel_case_string(self): self.assertEqual(self._call_fut("friendlyName"), "friendlyName") class Test__get_sub_prop(unittest.TestCase): def _call_fut(self, container, keys, **kw): from google.cloud.bigquery._helpers import _get_sub_prop return _get_sub_prop(container, keys, **kw) def test_w_empty_container_default_default(self): self.assertIsNone(self._call_fut({}, ["key1"])) def test_w_missing_key_explicit_default(self): self.assertEqual(self._call_fut({"key2": 2}, ["key1"], default=1), 1) def test_w_matching_single_key(self): self.assertEqual(self._call_fut({"key1": 1}, ["key1"]), 1) def test_w_matching_first_key_missing_second_key(self): self.assertIsNone(self._call_fut({"key1": {"key3": 3}}, ["key1", "key2"])) def test_w_matching_first_key_matching_second_key(self): self.assertEqual(self._call_fut({"key1": {"key2": 2}}, ["key1", "key2"]), 2) class Test__set_sub_prop(unittest.TestCase): def _call_fut(self, container, keys, value): from google.cloud.bigquery._helpers import _set_sub_prop return _set_sub_prop(container, keys, value) def test_w_empty_container_single_key(self): container = {} self._call_fut(container, ["key1"], "value") self.assertEqual(container, {"key1": "value"}) def test_w_empty_container_nested_keys(self): container = {} self._call_fut(container, ["key1", "key2", "key3"], "value") self.assertEqual(container, {"key1": {"key2": {"key3": "value"}}}) def test_w_existing_value(self): container = {"key1": "before"} self._call_fut(container, ["key1"], "after") self.assertEqual(container, {"key1": "after"}) def test_w_nested_keys_existing_value(self): container = {"key1": {"key2": {"key3": "before"}}} self._call_fut(container, ["key1", "key2", "key3"], "after") self.assertEqual(container, {"key1": {"key2": {"key3": "after"}}}) class Test__del_sub_prop(unittest.TestCase): def _call_fut(self, container, keys): from google.cloud.bigquery._helpers import _del_sub_prop return _del_sub_prop(container, keys) def test_w_single_key(self): container = {"key1": "value"} self._call_fut(container, ["key1"]) self.assertEqual(container, {}) def test_w_empty_container_nested_keys(self): container = {} self._call_fut(container, ["key1", "key2", "key3"]) self.assertEqual(container, {"key1": {"key2": {}}}) def test_w_existing_value_nested_keys(self): container = {"key1": {"key2": {"key3": "value"}}} self._call_fut(container, ["key1", "key2", "key3"]) self.assertEqual(container, {"key1": {"key2": {}}}) class Test__int_or_none(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _int_or_none return _int_or_none(value) def test_w_num_string(self): self.assertEqual(self._call_fut("123"), 123) def test_w_none(self): self.assertIsNone(self._call_fut(None)) def test_w_int(self): self.assertEqual(self._call_fut(123), 123) def test_w_non_num_string(self): with self.assertRaises(ValueError): self._call_fut("ham") class Test__str_or_none(unittest.TestCase): def _call_fut(self, value): from google.cloud.bigquery._helpers import _str_or_none return _str_or_none(value) def test_w_int(self): self.assertEqual(self._call_fut(123), "123") def test_w_none(self): self.assertIsNone(self._call_fut(None)) def test_w_str(self): self.assertEqual(self._call_fut("ham"), "ham") class _Field(object): def __init__(self, mode, name="unknown", field_type="UNKNOWN", fields=()): self.mode = mode self.name = name self.field_type = field_type self.fields = fields
{ "content_hash": "1c3668c945969482fad585eb60de6e7f", "timestamp": "", "source": "github", "line_count": 890, "max_line_length": 88, "avg_line_length": 34.6561797752809, "alnum_prop": 0.5584554532486059, "repo_name": "dhermes/google-cloud-python", "id": "c2c4f9f7f7874e014d896d9b6b6306df07b9660a", "size": "31419", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "bigquery/tests/unit/test__helpers.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "936" }, { "name": "Makefile", "bytes": "1779" }, { "name": "Python", "bytes": "13118304" }, { "name": "Shell", "bytes": "8606" } ], "symlink_target": "" }
""" Tests For Filter Scheduler. """ import mox from nova.compute import utils as compute_utils from nova.compute import vm_states from nova import context from nova import db from nova import exception from nova.scheduler import driver from nova.scheduler import filter_scheduler from nova.scheduler import host_manager from nova.scheduler import least_cost from nova.tests.scheduler import fakes from nova.tests.scheduler import test_scheduler def fake_filter_hosts(hosts, filter_properties): return list(hosts) class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): """Test case for Filter Scheduler.""" driver_cls = filter_scheduler.FilterScheduler def test_run_instance_no_hosts(self): def _fake_empty_call_zone_method(*args, **kwargs): return [] sched = fakes.FakeFilterScheduler() uuid = 'fake-uuid1' fake_context = context.RequestContext('user', 'project') request_spec = {'instance_type': {'memory_mb': 1, 'root_gb': 1, 'ephemeral_gb': 0}, 'instance_properties': {'project_id': 1}, 'instance_uuids': [uuid]} self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc') self.mox.StubOutWithMock(db, 'instance_update_and_get_original') compute_utils.add_instance_fault_from_exc(fake_context, uuid, mox.IsA(exception.NoValidHost), mox.IgnoreArg()) db.instance_update_and_get_original(fake_context, uuid, {'vm_state': vm_states.ERROR, 'task_state': None}).AndReturn(({}, {})) self.mox.ReplayAll() sched.schedule_run_instance( fake_context, request_spec, None, None, None, None, {}) def test_run_instance_non_admin(self): self.was_admin = False def fake_get(context, *args, **kwargs): # make sure this is called with admin context, even though # we're using user context below self.was_admin = context.is_admin return {} sched = fakes.FakeFilterScheduler() self.stubs.Set(sched.host_manager, 'get_all_host_states', fake_get) fake_context = context.RequestContext('user', 'project') uuid = 'fake-uuid1' request_spec = {'instance_type': {'memory_mb': 1, 'local_gb': 1}, 'instance_properties': {'project_id': 1}, 'instance_uuids': [uuid]} self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc') self.mox.StubOutWithMock(db, 'instance_update_and_get_original') compute_utils.add_instance_fault_from_exc(fake_context, uuid, mox.IsA(exception.NoValidHost), mox.IgnoreArg()) db.instance_update_and_get_original(fake_context, uuid, {'vm_state': vm_states.ERROR, 'task_state': None}).AndReturn(({}, {})) self.mox.ReplayAll() sched.schedule_run_instance( fake_context, request_spec, None, None, None, None, {}) self.assertTrue(self.was_admin) def test_schedule_bad_topic(self): """Parameter checking.""" sched = fakes.FakeFilterScheduler() fake_context = context.RequestContext('user', 'project') self.assertRaises(NotImplementedError, sched._schedule, fake_context, "foo", {}, {}) def test_scheduler_includes_launch_index(self): ctxt = "fake-context" fake_kwargs = {'fake_kwarg1': 'fake_value1', 'fake_kwarg2': 'fake_value2'} instance_opts = {'fake_opt1': 'meow'} request_spec = {'instance_uuids': ['fake-uuid1', 'fake-uuid2'], 'instance_properties': instance_opts} instance1 = {'uuid': 'fake-uuid1'} instance2 = {'uuid': 'fake-uuid2'} def _has_launch_index(expected_index): """Return a function that verifies the expected index.""" def _check_launch_index(value): if 'instance_properties' in value: if 'launch_index' in value['instance_properties']: index = value['instance_properties']['launch_index'] if index == expected_index: return True return False return _check_launch_index class ContextFake(object): def elevated(self): return ctxt context_fake = ContextFake() self.mox.StubOutWithMock(self.driver, '_schedule') self.mox.StubOutWithMock(self.driver, '_provision_resource') self.driver._schedule(context_fake, 'compute', request_spec, {}, ['fake-uuid1', 'fake-uuid2'] ).AndReturn(['host1', 'host2']) # instance 1 self.driver._provision_resource( ctxt, 'host1', mox.Func(_has_launch_index(0)), {}, None, None, None, None, instance_uuid='fake-uuid1').AndReturn(instance1) # instance 2 self.driver._provision_resource( ctxt, 'host2', mox.Func(_has_launch_index(1)), {}, None, None, None, None, instance_uuid='fake-uuid2').AndReturn(instance2) self.mox.ReplayAll() self.driver.schedule_run_instance(context_fake, request_spec, None, None, None, None, {}) def test_schedule_happy_day(self): """Make sure there's nothing glaringly wrong with _schedule() by doing a happy day pass through.""" self.next_weight = 1.0 def _fake_weighted_sum(functions, hosts, options): self.next_weight += 2.0 host_state = hosts[0] return least_cost.WeightedHost(self.next_weight, host_state=host_state) sched = fakes.FakeFilterScheduler() fake_context = context.RequestContext('user', 'project', is_admin=True) self.stubs.Set(sched.host_manager, 'filter_hosts', fake_filter_hosts) self.stubs.Set(least_cost, 'weighted_sum', _fake_weighted_sum) fakes.mox_host_manager_db_calls(self.mox, fake_context) request_spec = {'num_instances': 10, 'instance_type': {'memory_mb': 512, 'root_gb': 512, 'ephemeral_gb': 0, 'vcpus': 1}, 'instance_properties': {'project_id': 1, 'root_gb': 512, 'memory_mb': 512, 'ephemeral_gb': 0, 'vcpus': 1}} self.mox.ReplayAll() weighted_hosts = sched._schedule(fake_context, 'compute', request_spec, {}) self.assertEquals(len(weighted_hosts), 10) for weighted_host in weighted_hosts: self.assertTrue(weighted_host.host_state is not None) def test_schedule_prep_resize_doesnt_update_host(self): fake_context = context.RequestContext('user', 'project', is_admin=True) sched = fakes.FakeFilterScheduler() def _return_hosts(*args, **kwargs): host_state = host_manager.HostState('host2', 'compute') return [least_cost.WeightedHost(1.0, host_state=host_state)] self.stubs.Set(sched, '_schedule', _return_hosts) info = {'called': 0} def _fake_instance_update_db(*args, **kwargs): # This should not be called info['called'] = 1 self.stubs.Set(driver, 'instance_update_db', _fake_instance_update_db) instance = {'uuid': 'fake-uuid', 'host': 'host1'} sched.schedule_prep_resize(fake_context, {}, {}, {}, instance, {}, None) self.assertEqual(info['called'], 0) def test_get_cost_functions(self): fixture = fakes.FakeFilterScheduler() fns = fixture.get_cost_functions() self.assertEquals(len(fns), 1) weight, fn = fns[0] self.assertEquals(weight, -1.0) hostinfo = host_manager.HostState('host', 'compute') hostinfo.update_from_compute_node(dict(memory_mb=1000, local_gb=0, vcpus=1, disk_available_least=1000, free_disk_mb=1000, free_ram_mb=872, vcpus_used=0, local_gb_used=0, updated_at=None)) self.assertEquals(872, fn(hostinfo, {})) def test_max_attempts(self): self.flags(scheduler_max_attempts=4) sched = fakes.FakeFilterScheduler() self.assertEqual(4, sched._max_attempts()) def test_invalid_max_attempts(self): self.flags(scheduler_max_attempts=0) sched = fakes.FakeFilterScheduler() self.assertRaises(exception.NovaException, sched._max_attempts) def test_retry_disabled(self): """Retry info should not get populated when re-scheduling is off""" self.flags(scheduler_max_attempts=1) sched = fakes.FakeFilterScheduler() instance_properties = {} request_spec = dict(instance_properties=instance_properties) filter_properties = {} sched._schedule(self.context, 'compute', request_spec, filter_properties=filter_properties) # should not have retry info in the populated filter properties: self.assertFalse("retry" in filter_properties) def test_retry_attempt_one(self): """Test retry logic on initial scheduling attempt""" self.flags(scheduler_max_attempts=2) sched = fakes.FakeFilterScheduler() instance_properties = {} request_spec = dict(instance_properties=instance_properties) filter_properties = {} sched._schedule(self.context, 'compute', request_spec, filter_properties=filter_properties) num_attempts = filter_properties['retry']['num_attempts'] self.assertEqual(1, num_attempts) def test_retry_attempt_two(self): """Test retry logic when re-scheduling""" self.flags(scheduler_max_attempts=2) sched = fakes.FakeFilterScheduler() instance_properties = {} request_spec = dict(instance_properties=instance_properties) retry = dict(num_attempts=1) filter_properties = dict(retry=retry) sched._schedule(self.context, 'compute', request_spec, filter_properties=filter_properties) num_attempts = filter_properties['retry']['num_attempts'] self.assertEqual(2, num_attempts) def test_retry_exceeded_max_attempts(self): """Test for necessary explosion when max retries is exceeded""" self.flags(scheduler_max_attempts=2) sched = fakes.FakeFilterScheduler() instance_properties = {} request_spec = dict(instance_properties=instance_properties) retry = dict(num_attempts=2) filter_properties = dict(retry=retry) self.assertRaises(exception.NoValidHost, sched._schedule, self.context, 'compute', request_spec, filter_properties=filter_properties) def test_add_retry_host(self): retry = dict(num_attempts=1, hosts=[]) filter_properties = dict(retry=retry) host = "fakehost" sched = fakes.FakeFilterScheduler() sched._add_retry_host(filter_properties, host) hosts = filter_properties['retry']['hosts'] self.assertEqual(1, len(hosts)) self.assertEqual(host, hosts[0])
{ "content_hash": "cc4ea87b0fd9964d7e85d471281a09b1", "timestamp": "", "source": "github", "line_count": 300, "max_line_length": 79, "avg_line_length": 38.943333333333335, "alnum_prop": 0.5850380895317984, "repo_name": "paulmathews/nova", "id": "12369ec10f8169b424a68a5b89c16d3301fd6ee1", "size": "12312", "binary": false, "copies": "1", "ref": "refs/heads/stable/folsom", "path": "nova/tests/scheduler/test_filter_scheduler.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "16002" }, { "name": "JavaScript", "bytes": "7403" }, { "name": "Python", "bytes": "7293434" }, { "name": "Shell", "bytes": "16910" } ], "symlink_target": "" }
'''OpenGL extension ARB.framebuffer_object_DEPRECATED Automatically generated by the get_gl_extensions script, do not edit! ''' from OpenGL import platform, constants, constant, arrays from OpenGL import extensions from OpenGL.GL import glget import ctypes EXTENSION_NAME = 'GL_ARB_framebuffer_object' _DEPRECATED = True GL_INDEX = constant.Constant( 'GL_INDEX', 0x8222 ) GL_TEXTURE_LUMINANCE_TYPE = constant.Constant( 'GL_TEXTURE_LUMINANCE_TYPE', 0x8C14 ) GL_TEXTURE_INTENSITY_TYPE = constant.Constant( 'GL_TEXTURE_INTENSITY_TYPE', 0x8C15 ) def glInitFramebufferObjectDeprecatedARB(): '''Return boolean indicating whether this extension is available''' return extensions.hasGLExtension( EXTENSION_NAME )
{ "content_hash": "6806adea817a5deb49f6831d57a5b072", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 84, "avg_line_length": 39.77777777777778, "alnum_prop": 0.7863128491620112, "repo_name": "Universal-Model-Converter/UMC3.0a", "id": "0c9cc1a622839e7531d5d4387db3eeb3c9fed9b9", "size": "716", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "data/Python/x86/Lib/site-packages/OpenGL/raw/GL/ARB/framebuffer_object_DEPRECATED.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "226" }, { "name": "C", "bytes": "1082640" }, { "name": "C#", "bytes": "8440" }, { "name": "C++", "bytes": "3621086" }, { "name": "CSS", "bytes": "6226" }, { "name": "F#", "bytes": "2310" }, { "name": "FORTRAN", "bytes": "7795" }, { "name": "Forth", "bytes": "506" }, { "name": "GLSL", "bytes": "1040" }, { "name": "Groff", "bytes": "5943" }, { "name": "HTML", "bytes": "1196266" }, { "name": "Java", "bytes": "5793" }, { "name": "Makefile", "bytes": "1109" }, { "name": "Mask", "bytes": "969" }, { "name": "Matlab", "bytes": "4346" }, { "name": "Python", "bytes": "33351557" }, { "name": "R", "bytes": "1370" }, { "name": "Shell", "bytes": "6931" }, { "name": "Tcl", "bytes": "2084458" }, { "name": "Visual Basic", "bytes": "481" } ], "symlink_target": "" }
from PyQt4 import QtGui from PyQt4 import QtCore class BrowserModel(QtGui.QAbstractItemModel) class Main(QtGui.QTreeView): def __init__(self): QtGui.QTreeView.__init__(self) model = QtGui.QFileSystemModel() model.setRootPath( QtCore.QDir.currentPath() ) self.setModel(model) QtCore.QObject.connect(self.selectionModel(), QtCore.SIGNAL('selectionChanged(QItemSelection, QItemSelection)'), self.test) @QtCore.pyqtSlot("QItemSelection, QItemSelection") def test(self, selected, deselected): print("hello!") print(selected) print(deselected) if __name__ == '__main__': import sys app = QtGui.QApplication(sys.argv) w = Main() w.show() sys.exit(app.exec_())
{ "content_hash": "c266accf7bc410d200c2ac5b6bc8c5b2", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 127, "avg_line_length": 25.03448275862069, "alnum_prop": 0.6859504132231405, "repo_name": "Schizo/MediaBrowser", "id": "7a3c3d40cdddb1629346387363f772ca54f241af", "size": "726", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python/Temp/sandboxTreeView.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "23" }, { "name": "Python", "bytes": "94708" } ], "symlink_target": "" }
from distutils.core import setup, Extension import os if 'BASE_TOOLS_PATH' not in os.environ: raise "Please define BASE_TOOLS_PATH to the root of base tools tree" BaseToolsDir = os.environ['BASE_TOOLS_PATH'] setup( name="EfiCompressor", version="0.01", ext_modules=[ Extension( 'EfiCompressor', sources=[ os.path.join(BaseToolsDir, 'Source', 'C', 'Common', 'Decompress.c'), 'EfiCompressor.c' ], include_dirs=[ os.path.join(BaseToolsDir, 'Source', 'C', 'Include'), os.path.join(BaseToolsDir, 'Source', 'C', 'Include', 'Ia32'), os.path.join(BaseToolsDir, 'Source', 'C', 'Common') ], ) ], )
{ "content_hash": "c234b772312828950605fc04c0b09e46", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 84, "avg_line_length": 30.192307692307693, "alnum_prop": 0.5286624203821656, "repo_name": "egraba/vbox_openbsd", "id": "89ac07b91f8fb098b4a1021e7d94b3a276072f59", "size": "1362", "binary": false, "copies": "12", "ref": "refs/heads/master", "path": "VirtualBox-5.0.0/src/VBox/Devices/EFI/Firmware/BaseTools/Source/C/PyEfiCompressor/setup.py", "mode": "33261", "license": "mit", "language": [ { "name": "Ada", "bytes": "88714" }, { "name": "Assembly", "bytes": "4303680" }, { "name": "AutoIt", "bytes": "2187" }, { "name": "Batchfile", "bytes": "95534" }, { "name": "C", "bytes": "192632221" }, { "name": "C#", "bytes": "64255" }, { "name": "C++", "bytes": "83842667" }, { "name": "CLIPS", "bytes": "5291" }, { "name": "CMake", "bytes": "6041" }, { "name": "CSS", "bytes": "26756" }, { "name": "D", "bytes": "41844" }, { "name": "DIGITAL Command Language", "bytes": "56579" }, { "name": "DTrace", "bytes": "1466646" }, { "name": "GAP", "bytes": "350327" }, { "name": "Groff", "bytes": "298540" }, { "name": "HTML", "bytes": "467691" }, { "name": "IDL", "bytes": "106734" }, { "name": "Java", "bytes": "261605" }, { "name": "JavaScript", "bytes": "80927" }, { "name": "Lex", "bytes": "25122" }, { "name": "Logos", "bytes": "4941" }, { "name": "Makefile", "bytes": "426902" }, { "name": "Module Management System", "bytes": "2707" }, { "name": "NSIS", "bytes": "177212" }, { "name": "Objective-C", "bytes": "5619792" }, { "name": "Objective-C++", "bytes": "81554" }, { "name": "PHP", "bytes": "58585" }, { "name": "Pascal", "bytes": "69941" }, { "name": "Perl", "bytes": "240063" }, { "name": "PowerShell", "bytes": "10664" }, { "name": "Python", "bytes": "9094160" }, { "name": "QMake", "bytes": "3055" }, { "name": "R", "bytes": "21094" }, { "name": "SAS", "bytes": "1847" }, { "name": "Shell", "bytes": "1460572" }, { "name": "SourcePawn", "bytes": "4139" }, { "name": "TypeScript", "bytes": "142342" }, { "name": "Visual Basic", "bytes": "7161" }, { "name": "XSLT", "bytes": "1034475" }, { "name": "Yacc", "bytes": "22312" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.apps import AppConfig class LinksConfig(AppConfig): name = 'links'
{ "content_hash": "635329fb29d13b04a2d4c1c5566ce5f5", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 39, "avg_line_length": 18, "alnum_prop": 0.746031746031746, "repo_name": "moshthepitt/product.co.ke", "id": "ec972f87e26c93b2b6b93fbb5fe1c4c46add1023", "size": "126", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "links/apps.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "54411" }, { "name": "HTML", "bytes": "39651" }, { "name": "JavaScript", "bytes": "849" }, { "name": "Python", "bytes": "26102" } ], "symlink_target": "" }
"""Discover devices that implement the Ikea Tradfri platform.""" from . import MDNSDiscoverable # pylint: disable=too-few-public-methods class Discoverable(MDNSDiscoverable): """Add support for discovering Ikea Tradfri devices.""" def __init__(self, nd): """Initialize the Cast discovery.""" super(Discoverable, self).__init__(nd, '_coap._udp.local.')
{ "content_hash": "b2b3205fbe27030e2499765f581b6780", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 67, "avg_line_length": 34.45454545454545, "alnum_prop": 0.6886543535620053, "repo_name": "jules185/IoT_Hackathon", "id": "ea6ded53dc6d3d75395955b99a65f71a293c6629", "size": "379", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": ".homeassistant/deps/netdisco/discoverables/ikea_tradfri.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "12396" }, { "name": "HTML", "bytes": "1557" }, { "name": "JavaScript", "bytes": "2843" }, { "name": "Python", "bytes": "8347316" } ], "symlink_target": "" }
import Queue from threading import Thread from diamond_game import Conf class Event(object): """Generic event class, all Events should extend this class""" def __init__(self, name): self.name = name class TickEvent(Event): def __init__(self): Event.__init__(self, "Tick Event") class QuitEvent(Event): def __init__(self): Event.__init__(self, "Quit Event") class MenuPrevEvent(Event): def __init__(self): Event.__init__(self, "Select Previous Menu Entry Event") class MenuNextEvent(Event): def __init__(self): Event.__init__(self, "Select Next Menu Entry Event") class MenuPressEvent(Event): def __init__(self): Event.__init__(self, "Menu Press Event") class MouseClickEvent(Event): def __init__(self, pos): Event.__init__(self, "Mouse Clicked Event: " + str(pos)) self.position = pos class MouseMotionEvent(Event): def __init__(self, pos): Event.__init__(self, "Mouse Moved Event: " + str(pos)) self.position = pos class MenuSelectEvent(Event): def __init__(self, val): Event.__init__(self, "Menu Select Event: " + str(val)) self.value = val class MenuUnSelectEvent(Event): def __init__(self, val): Event.__init__(self, "Menu Un Select Event: " + str(val)) self.value = val class ButtonHoverEvent(Event): def __init__(self, val): Event.__init__(self, "Hover Event: " + str(val)) self.value = val class SwitchScreenEvent(Event): def __init__(self, val): Event.__init__(self, "Switch Screen Event: " + Conf.debug_dict.get(val)) self.value = val class BoardCreatedEvent(Event): def __init__(self, val, dimention): Event.__init__(self, "Board Create Event: " + str(val) + " " + str(dimention)) self.value = val self.dimention = dimention class PiecesCreatedEvent(Event): def __init__(self, val): Event.__init__(self, "Pieces Create Event: " + str(val)) self.value = val class SubModulesLoadedEvent(Event): def __init__(self, module, sub_module): Event.__init__(self, "SubModulesLoadedEvent: " + Conf.debug_dict.get(module) + ": " + Conf.debug_dict.get(sub_module)) self.module = module self.sub_module = sub_module class GameObjectClickEvent(Event): def __init__(self, typ, val): Event.__init__(self, "GameObjectClickEvent: " + str(typ) + " " + str(val)) self.typ = typ self.value = val class HintsCreatedEvent(Event): def __init__(self, val): Event.__init__(self, "HintsCreatedEvent: " + str(val)) self.value = val class HintsDestroyedEvent(Event): def __init__(self): Event.__init__(self, "HintsDestroyedEvent") class PieceSelectedEvent(Event): """Has uid piece attribute """ def __init__(self, uid): Event.__init__(self, "PieceSelectedEvent: " + str(uid)) self.value = uid class PieceDeSelectedEvent(Event): """Has uid piece attribute """ def __init__(self, uid): Event.__init__(self, "PieceDeSelectedEvent: " + str(uid)) self.value = uid class PieceMoveEvent(Event): def __init__(self, start, end): Event.__init__(self, "PieceMoveEvent: " + str(start) + " " + str(end)) self.start = start self.end = end class CreateAvailableLocs(Event): def __init__(self, locs): Event.__init__(self, "CreateAvailableLocs: " + str(locs)) self.locs = locs class RemoveAvailableLocs(Event): def __init__(self, locs): Event.__init__(self, "RemoveAvailableLocs: " + str(locs)) self.locs = locs class SoundPlayEvent(Event): def __init__(self, sound_name): Event.__init__(self, "SoundPlayEvent: " + str(sound_name)) self.sound_name = sound_name class AIMakeMoveEvent(Event): def __init__(self, data): Event.__init__(self, "AiMakeMoveEvent: " + str(data)) self.data = data class AIMovedEvent(Event): def __init__(self, data): Event.__init__(self, "AIMovedEvent: " + str(data)) self.data = data class OptionsClickEvent(Event): def __init__(self, val): Event.__init__(self, "OptionsClick: " + str(val)) self.value = val class OptionButtonStateChangeEvent(Event): def __init__(self, an_id, val): Event.__init__(self, "OptionButtonStateChange: " + str(val)) self.an_id = an_id self.value = val class EventManager(object): """Class to manage all of the events generated in the Game""" def __init__(self): # A dict in which items get deleted if either # the key or the value of the item is garbage collected. self.model_event_queue = Queue.Queue(0) self.view_event_queue = Queue.Queue(0) self.controller_event_queue = Queue.Queue(0) self.sound_event_queue = Queue.Queue(0) self.ai_event_queue = Queue.Queue(0) # locks to queue access self.model_locked = 0 self.view_locked = 0 self.controller_locked = 0 def post(self, event, destination): """ Method that allows to pass events to corresponding parts of the program events are not posted if the resource is locked """ if destination == Conf.ALL: # Only switch and quit events are send to all self.view_event_queue.put(event) self.model_event_queue.put(event) self.controller_event_queue.put(event) self.sound_event_queue.put(event) self.ai_event_queue.put(event) elif destination == Conf.MODEL: if not self.model_locked: self.model_event_queue.put(event) elif destination == Conf.VIEW: if not self.view_locked: self.view_event_queue.put(event) elif destination == Conf.CONTROLLER: if not self.controller_locked: self.controller_event_queue.put(event) elif destination == Conf.SOUND: self.sound_event_queue.put(event) elif destination == Conf.AI: self.ai_event_queue.put(event) if Conf.DEBUG: self.debug(event, destination) def manage_lock(self, thread_to_lock, action): """ Method that allows to optionally allow or disallow adding events to queues. If queue is locked for access then events will miss out """ if thread_to_lock == Conf.MODEL: self.model_locked = action elif thread_to_lock == Conf.VIEW: self.view_locked = action elif thread_to_lock == Conf.CONTROLLER: self.controller_locked = action def get_next_model_event(self): """ :return: :rtype : Event """ if not self.model_event_queue.empty(): return self.model_event_queue.get() def get_next_view_event(self): """ :return: :rtype : Event """ if not self.view_event_queue.empty(): return self.view_event_queue.get() def get_next_controller_event(self): """ :return: :rtype : Event """ if not self.controller_event_queue.empty(): return self.controller_event_queue.get() def get_next_sound_event(self): """ :return: :rtype : Event """ if not self.sound_event_queue.empty(): return self.sound_event_queue.get() def get_next_ai_event(self): """ :return: :rtype : Event """ if not self.ai_event_queue.empty(): return self.ai_event_queue.get() @staticmethod def debug(event, destination): if not isinstance(event, TickEvent): print '[' + event.name + '] send to [' + Conf.debug_dict.get(destination) + ']' class MVCObject(Thread): def __init__(self, ev_manager, name): Thread.__init__(self) self.thread_name = name self.id = 0 self.event_manager = ev_manager self.sub_modules = [] self.sub_classes = {} def does_handle_event(self, event): print 'In ' + self.thread_name + ' does_handle_event method is not implemented' def handle_event(self, event): print 'In ' + self.thread_name + ' handle_event method is not implemented' def handle_py_game_event(self, event): print 'In ' + self.thread_name + ' handle_py_game_event method is not implemented' def post(self, event, destination): self.event_manager.post(event, destination) def run(self): print 'In ' + self.thread_name + ' run method is not implemented' def switch_sub_modules(self, key): if not self.sub_classes.has_key(key): raise NotImplementedError self.sub_modules = [] for a_class in self.sub_classes[key]: new_module = a_class(self.event_manager) self.sub_modules.append(new_module) self.post(SubModulesLoadedEvent(self.id, key), Conf.ALL) if __name__ == "__main__": raise Exception("Unexpected")
{ "content_hash": "e9696371aae472f705fc4d7d699d7335", "timestamp": "", "source": "github", "line_count": 317, "max_line_length": 91, "avg_line_length": 29.0788643533123, "alnum_prop": 0.5834237361683663, "repo_name": "batousik/Python2-Diamond", "id": "d38fd78ddeefca993c4f1289bc37a129a0ce83d1", "size": "9218", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "diamond_game/game_manager.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "113209" }, { "name": "TeX", "bytes": "2526" } ], "symlink_target": "" }
from argparse import ArgumentParser, FileType import time import warnings import os, sys, io import signal class ArgParser(object): def __init__(self, description, version): self.__description = description self.__version = version self.__parser = None self.__initialized() @property def description(self): return self.__description @property def version(self): return self.__version @property def parser(self): return self.__parser def print_help(self): self.__parser.print_help() def args(self): return self.__parser.parse_args() def __initialized(self): parser = ArgumentParser(description=self.description) parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + self.version) subparsers = parser.add_subparsers(help='sub-command help', dest='subparser_name') list_parser = subparsers.add_parser('list', help='list help') list_parser.add_argument('--user', '-u', action='store', metavar='USER', help='github your account name') list_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist') list_parser.add_argument('--number', '-n', action='store_true', help='number of your gists') list_parser.add_argument('--no-headers', action='store_true', help='print no header line at all') list_parser.add_argument('--verbose', action='store_true', help='verbose output') show_parser = subparsers.add_parser('show', help='show help') show_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist') show_parser.add_argument('--id', '-I', action='store', required=True, metavar='ID', help='gist id') show_parser.add_argument('--verbose', action='store_true', help='verbose output') fetch_parser = subparsers.add_parser('fetch', help='fetch help') fetch_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist') fetch_parser.add_argument('--id', '-I', action='store', required=True, metavar='ID', help='gist id') fetch_parser.add_argument('--download-dir', '-d', action='store', metavar='DOWNLOAD_DIR', help='download directory') fetch_parser.add_argument('--type', '-t', action='store', default="git", metavar='DOWNLOAD_TYPE', choices=['git', 'tarball', 'zip'], help='gistfetch download type(default:git. other type are tarball and zip)') fetch_parser.add_argument('--verbose', action='store_true', help='verbose output') post_parser = subparsers.add_parser('post', help='post help') post_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token') post_parser.add_argument('--name', '-n', action='store', metavar='FILE_NAME', help='gist file name') post_parser.add_argument('--description', '-d', action='store', metavar='DESCRIPTION', help='gist file description') post_parser.add_argument('--private', '-p', action='store_true', help='private gist') post_parser.add_argument('--verbose', action='store_true', help='verbose output') post_parser.add_argument('infile', type=FileType("r"), nargs="*", default=sys.stdin, metavar='INFILE', help='post target file or stdin data') update_parser = subparsers.add_parser('update', help='update help') update_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token') update_parser.add_argument('--id', '-I', action='store', required=True, metavar='ID', help='gist id') update_parser.add_argument('--name', '-n', action='store', metavar='FILE_NAME', help='gist file name') update_parser.add_argument('--description', '-d', action='store', metavar='DESCRIPTION', help='gist file description') update_parser.add_argument('--private', '-p', action='store_true', help='private gist') update_parser.add_argument('--verbose', action='store_true', help='verbose output') update_parser.add_argument('infile', type=FileType("r"), nargs="*", default=sys.stdin, metavar='INFILE', help='update target file or stdin data') delete_parser = subparsers.add_parser('delete', help='delete help') delete_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token') delete_parser.add_argument('--id', '-I', action='store', required=True, metavar='ID', help='gist id') delete_parser.add_argument('--verbose', action='store_true', help='verbose output') #show_from_name_parser = subparsers.add_parser('show_from_name', help='show_from_name help') #show_from_name_parser.add_argument('--user', '-u', action='store', metavar='USER', help='github your account name') #show_from_name_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist') #show_from_name_parser.add_argument('--name', '-n', action='store', required=True, metavar='FILE_NAME', help='gist file name') #show_from_name_parser.add_argument('--verbose', action='store_true', help='verbose output') # #fetch_from_name_parser = subparsers.add_parser('fetch_from_name', help='fetch_from_name help') #fetch_from_name_parser.add_argument('--user', '-u', action='store', metavar='USER', help='github your account name') #fetch_from_name_parser.add_argument('--auth-token', '-T', action='store', metavar='AUTH_TOKEN', help='your github api access token, if you want private gist') #fetch_from_name_parser.add_argument('--name', '-n', action='store', required=True, metavar='FILE_NAME', help='gist file name') #fetch_from_name_parser.add_argument('--output', '-o', type=FileType('w'), metavar='FILE_NAME', help='write to FILE instead of stdout') #fetch_from_name_parser.add_argument('--remote-name', '-O', action='store_true', help='write output to a file named as the remote file') #fetch_from_name_parser.add_argument('--add-executable', '-x', action='store_true', help='add executable mode. enable --output or --remote-name option') #fetch_from_name_parser.add_argument('--verbose', action='store_true', help='verbose output') #args = parser.parse_args() self.__parser = parser
{ "content_hash": "963c4bb65352058fcd3b8f6a2f29270c", "timestamp": "", "source": "github", "line_count": 101, "max_line_length": 217, "avg_line_length": 65.9009900990099, "alnum_prop": 0.6544471153846154, "repo_name": "holly/gistcli", "id": "f9625a8878d9b1eb5411e1357b4ee19334739a72", "size": "6704", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "lib/gistcli/argparser.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "25007" }, { "name": "Shell", "bytes": "157" } ], "symlink_target": "" }
""" #;+ #; NAME: #; utils #; Version 1.0 #; #; PURPOSE: #; Plotting utilities #; 24-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ """ from __future__ import print_function, absolute_import, division, unicode_literals import numpy as np import pdb #def set_fontsize def set_fontsize(ax,fsz): ''' Generate a Table of columns and so on Restrict to those systems where flg_clm > 0 Parameters ---------- ax : Matplotlib ax class fsz : float Font size ''' for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] + ax.get_xticklabels() + ax.get_yticklabels()): item.set_fontsize(fsz) # def whisker_box(ax,xval,yval,per=0.5,color='gray',alpha=0.2): ''' Plot a simple 2D 'whisker' box for a set of points Parameters ---------- ax : Matplotlib ax class xval: array x values yval: array y values ''' from xastropy import stats as xstat xper = xstat.basic.perc(xval, per=per) yper = xstat.basic.perc(yval, per=per) #pdb.set_trace() ax.fill_between(xper, yper[0], np.array((yper[1],yper[1])), color=color, alpha=alpha) # def plt_arrows(ax,xval,yval, color='black', up=False, csz=1.5, alen=1.): ''' Plot arrows (upper or lower) on a given axis Parameters ---------- ax : Matplotlib ax class xval : float or array yval : float or array up: Bool (False) Arrow up or down? alen: float (1.5) Length of arrow csz: float (1.5) Cap size ''' ax.errorbar(xval, yval, yerr=alen, ecolor=color, lolims=(up is True), uplims=(up is False), fmt='none', capsize=csz)
{ "content_hash": "9aed6ab34ebefa1a99e61f44f5160fca", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 82, "avg_line_length": 23.24, "alnum_prop": 0.5559380378657487, "repo_name": "nhmc/xastropy", "id": "c1457e834dbdd22aa62477343a06e1c9a8065c6d", "size": "1743", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "xastropy/plotting/utils.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "OpenEdge ABL", "bytes": "144038" }, { "name": "Python", "bytes": "1007319" } ], "symlink_target": "" }
"""The go plugin can be used for go projects using `go get`. This plugin uses the common plugin keywords, for more information check the 'plugins' topic. This plugin uses the common plugin keywords as well as those for "sources". For more information check the 'plugins' topic for the former and the 'sources' topic for the latter. Additionally, this plugin uses the following plugin-specific keywords: - go-packages: (list of strings) Go packages to fetch, these must be a "main" package. Dependencies are pulled in automatically by `go get`. Packages that are not "main" will not cause an error, but would not be useful either. - go-importpath: (string) This entry tells the checked out `source` to live within a certain path within `GOPATH`. This is not needed and does not affect `go-packages`. """ import logging import os import shutil import snapcraft logger = logging.getLogger(__name__) class GoSrcPlugin(snapcraft.BasePlugin): @classmethod def schema(cls): schema = super().schema() schema['properties']['go-packages'] = { 'type': 'array', 'minitems': 1, 'uniqueItems': True, 'items': { 'type': 'string', }, 'default': [], } schema['properties']['go-importpath'] = { 'type': 'string', 'default': '' } if 'required' in schema: del schema['required'] # Inform Snapcraft of the properties associated with pulling. If these # change in the YAML Snapcraft will consider the pull step dirty. schema['pull-properties'].append('go-packages') # Inform Snapcraft of the properties associated with building. If these # change in the YAML Snapcraft will consider the build step dirty. schema['build-properties'].extend(['source', 'go-packages']) return schema def __init__(self, name, options, project): super().__init__(name, options, project) self.build_packages.append('golang-go') self.build_packages.append('make') self._gopath_src = os.path.join(self.sourcedir, 'src') self._gopath_bin = os.path.join(self.sourcedir, 'bin') self._gopath_pkg = os.path.join(self.sourcedir, 'pkg') def _build_environment(self): env = os.environ.copy() env['GOPATH'] = self.sourcedir env['CGO_CFLAGS'] = ' '.join( ['-I{0}/include', '-I{0}/usr/include', '-I{0}/include/{1}', '-I{0}/usr/include/{1}']).format( self.project.stage_dir, self.project.arch_triplet) env['CGO_LDFLAGS'] = ' '.join( ['-L{0}/lib', '-L{0}/usr/lib', '-L{0}/lib/{1}', '-L{0}/usr/lib/{1}']).format( self.project.stage_dir, self.project.arch_triplet) return env def build(self): self._run(["make"]) install_bin_path = os.path.join(self.installdir, 'bin') os.makedirs(install_bin_path, exist_ok=True) os.makedirs(self._gopath_bin, exist_ok=True) for binary in os.listdir(os.path.join(self._gopath_bin)): binary_path = os.path.join(self._gopath_bin, binary) shutil.copy2(binary_path, install_bin_path) def clean_build(self): super().clean_build() if os.path.isdir(self._gopath_bin): shutil.rmtree(self._gopath_bin) if os.path.isdir(self._gopath_pkg): shutil.rmtree(self._gopath_pkg) def _run(self, cmd, **kwargs): return self.run(cmd, cwd=self.sourcedir, env=self._build_environment())
{ "content_hash": "5241ac53075304b1d78482c6d3ea9411", "timestamp": "", "source": "github", "line_count": 103, "max_line_length": 79, "avg_line_length": 35.55339805825243, "alnum_prop": 0.6004915346805024, "repo_name": "dz0ny/champ", "id": "e4bdf2f64dd2a93e9bbfd7b3f916ff06381f202a", "size": "4329", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packaging/snap-x86/champ/parts/plugins/x-gosrc.py", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "47712" }, { "name": "Makefile", "bytes": "1288" }, { "name": "Python", "bytes": "6520" }, { "name": "Shell", "bytes": "1269" } ], "symlink_target": "" }
from functools import reduce # from . import ast class Document(object): """Base class for documents to be pretty printed.""" pass class Text(Document): """Textual element in a document.""" def __init__(self, text): self._text = text def __str__(self): return "Text('%s')" % self._text def width(self): return len(self._text) empty = Text('') class Cond(Document): """Emit either left or tail, newline, right depending on line width""" def __init__(self, left, right, tail=''): self._left = left self._right = right self._tail = tail def __str__(self): return "Cond('%s','%s','%s')" % (self._left, self._right, self._tail) def width(self): return len(self._left) br = Cond(' ', '', ' \\') # Python backslash dot = Cond('.', '.', ' \\') # Python backslash class Concat(Document): """Concatenate two documents.""" def __init__(self, *args): self._docs = args def __str__(self): return "Concat(%s)" % ",".join([str(doc) for doc in self._docs]) def width(self): from operator import add return reduce(add, [doc.width() for doc in self._docs]) class Group(Document): """Specify unit whose linebreaks are interpeted consistently.""" def __init__(self, child): self._child = child def __str__(self): return "Group(%s)" % str(self._child) def width(self): return self._child.width() class Nest(Document): """Concatenate N documents with consistent indentation.""" def __init__(self, *args): self._docs = args def __str__(self): return "Nest(%s)" % ",".join([str(doc) for doc in self._docs]) def width(self): from operator import add return reduce(add, [doc.width() for doc in self._docs]) def CommaSep(*args): if len(args) == 0: return empty docs = [args[0]] for subdoc in args[1:]: docs.append(Text(',')) docs.append(br) docs.append(subdoc) return Nest(*docs) def ArgList(*args): return Concat(Text('('), CommaSep(*args), Text(')')) def DotList(*args): initial = args[0] docs = [] for subdoc in args[1:]: docs.append(dot) docs.append(subdoc) if len(docs) > 1: docs[0] = Text('.') # prevent breaking before first dot return Concat(initial, Nest(*docs)) def Call(name, *args): return Concat(Text(name), ArgList(*args)) class TerriblePrettyPrinter(object): """A terrible, inefficient pretty printer for comparison.""" def __init__(self, width): self._width = width def render(self, document): return self._format(False, self._width, 0, document)[0] def _format(self, hasFit, widthLeft, nestIndent, document): if isinstance(document, Text): return (document._text, widthLeft - len(document._text)) elif isinstance(document, Cond) and hasFit: return (document._left, widthLeft - len(document._left)) elif isinstance(document, Cond) and not hasFit: return ('%s\n%s%s' % (document._tail, ' ' * nestIndent, document._right), self._width - nestIndent - len(document._right)) elif isinstance(document, Concat): width = widthLeft s = "" for subelement in document._docs: s1, width = self._format(hasFit, width, nestIndent, subelement) s += s1 return (s, width) elif isinstance(document, Group): newFit = hasFit or document._child.width() <= widthLeft return self._format(newFit, widthLeft, nestIndent, document._child) elif isinstance(document, Nest): currentPos = self._width - widthLeft s = "" for subelement in document._docs: newFit = hasFit or subelement.width() <= widthLeft s1, widthLeft = self._format(newFit, widthLeft, currentPos, subelement) s += s1 return (s, widthLeft) else: raise RuntimeError("invalid argument %s" % document) doc1 = Group(Concat(Text("A"), br, Group(Concat(Text("B"), br, Text("C"))))) doc2 = DotList(Text('r'), Call('expr', Text('5')), Call('add', DotList(Text('r'), Call('expr', Text('7')), Call('frob'))), Call('mul', DotList(Text('r'), Call('expr', Text('17'))), Call('mul', DotList(Text('r'), Call('expr', Text('17')))), Call('mul', DotList(Text('r'), Call('expr', Text('17')))))) # print(TerriblePrettyPrinter(5).render(doc1)) # print(TerriblePrettyPrinter(3).render(doc1)) # print(TerriblePrettyPrinter(1).render(doc1)) # print(TerriblePrettyPrinter(5).render(doc2)) # print(TerriblePrettyPrinter(10).render(doc2)) # print(TerriblePrettyPrinter(80).render(doc2)) class Streamer(object): pass class TE(Streamer): def __init__(self, string, hpos=None): self._string = string self._hpos = hpos def __str__(self): if self._hpos is None: return "TE('%s')" % self._string else: return "TE(%d,'%s')" % (self._hpos, self._string) class CD(Streamer): def __init__(self, left, right, tail, hpos=None): self._left = left self._right = right self._tail = tail self._hpos = hpos def __str__(self): if self._hpos is None: return "CD('%s','%s','%s')" % (self._left, self._right, self._tail) else: return "CD('%s','%s','%s',%d)" % (self._left, self._right, self._tail, self._hpos) class NBeg(Streamer): def __init__(self, hpos=None): self._hpos = hpos def __str__(self): if self._hpos is None: return "NBeg" else: return "NBeg(%d)" % self._hpos class NEnd(Streamer): def __init__(self, hpos=None): self._hpos = hpos def __str__(self): if self._hpos is None: return "NEnd" else: return "NEnd(%d)" % self._hpos class GBeg(Streamer): def __init__(self, hpos=None): self._hpos = hpos def __str__(self): if self._hpos is None: return "GBeg" else: return "GBeg(%d)" % self._hpos class GEnd(Streamer): def __init__(self, hpos=None): self._hpos = hpos def __str__(self): if self._hpos is None: return "GEnd" else: return "GEnd(%d)" % self._hpos def genStream(document): stack = [document] while len(stack) > 0: top = stack.pop() if isinstance(top, Text): yield TE(top._text) elif isinstance(top, Cond): yield CD(top._left, top._right, top._tail) elif isinstance(top, Concat): newdocs = list(top._docs) newdocs.reverse() stack.extend(newdocs) elif isinstance(top, Group): yield GBeg() stack.append(GEnd()) stack.append(top._child) elif isinstance(top, Nest): yield NBeg() yield GBeg() stack.append(NEnd()) stack.append(GEnd()) newdocs = list(top._docs) newdocs.reverse() stack.extend(newdocs) elif isinstance(top, GEnd): yield top elif isinstance(top, NEnd): yield top else: raise RuntimeError("invalid thing seen %s" % top) # for elt in genStream(doc2): # print("> %s" % elt) def annotateStream(stream): pos = 0 for element in stream: if isinstance(element, TE): pos += len(element._string) yield TE(element._string, pos) elif isinstance(element, CD): pos += len(element._left) yield CD(element._left, element._right, element._tail, pos) elif isinstance(element, GBeg): yield GBeg(pos) elif isinstance(element, GEnd): yield GEnd(pos) elif isinstance(element, NBeg): yield NBeg(pos) elif isinstance(element, NEnd): yield NEnd(pos) # for elt in annotateStream(genStream(doc2)): # print(">> %s" % elt) def trackActualPosition(stream): lookahead = [] for element in stream: if isinstance(element, GBeg): lookahead.append([]) elif isinstance(element, GEnd): b = lookahead.pop() if len(lookahead) == 0: # topmost group, simple case yield GBeg(element._hpos) for subelt in b: yield subelt yield element else: lookahead[-1].append(GBeg(element._hpos)) lookahead[-1].extend(b) lookahead[-1].append(element) elif len(lookahead) == 0: yield element else: lookahead[-1].append(element) # for elt in trackActualPosition(annotateStream(genStream(doc2))): # print(">! %s" % elt) # Kiselyov adds a pruning step; this is overly complicated, useless in our # environment, and requires that we guarantee that all documents have nonzero # length, which I'm not prepared to do. So we use trackActualPosition instead. def format(width, stream): fittingElements = 0 rightEdge = width hpos = 0 result = "" indent = [0] for element in stream: if isinstance(element, TE): result += element._string hpos += len(element._string) elif isinstance(element, CD): indentation = indent[-1] if fittingElements == 0: result += "%s\n%s%s" % (element._tail, ' ' * indentation, element._right) fittingElements = 0 hpos = indentation + len(element._right) rightEdge = (width - hpos) + element._hpos else: result += element._left hpos += len(element._left) elif isinstance(element, GBeg): if fittingElements != 0 or element._hpos <= rightEdge: fittingElements += 1 else: fittingElements = 0 elif isinstance(element, GEnd): fittingElements = max(fittingElements - 1, 0) elif isinstance(element, NBeg): indent.append(hpos) elif isinstance(element, NEnd): indent.pop() return result def pprint(width, document): return format(width, trackActualPosition(annotateStream(genStream(document)))) print(" " * 4 + "|") print(pprint(5, doc2)) print("-" * 20) print(" " * 39 + "|") print(pprint(40, doc2)) print("-" * 20) print(" " * 79 + "|") print(pprint(80, doc2))
{ "content_hash": "d0335b0b0921fba38aec58eac8b74495", "timestamp": "", "source": "github", "line_count": 379, "max_line_length": 79, "avg_line_length": 29.102902374670183, "alnum_prop": 0.5364460562103355, "repo_name": "robertjpayne/rethinkdb", "id": "40eac338b01e68502fe07597063727a7da4d1531", "size": "11079", "binary": false, "copies": "48", "ref": "refs/heads/next", "path": "scripts/pprint_sandbox.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AppleScript", "bytes": "2597" }, { "name": "C", "bytes": "80175" }, { "name": "C++", "bytes": "8609176" }, { "name": "CSS", "bytes": "403688" }, { "name": "CoffeeScript", "bytes": "539374" }, { "name": "HTML", "bytes": "75496" }, { "name": "Haskell", "bytes": "13234" }, { "name": "Java", "bytes": "1889375" }, { "name": "JavaScript", "bytes": "672427" }, { "name": "Makefile", "bytes": "67067" }, { "name": "Nginx", "bytes": "728" }, { "name": "Perl", "bytes": "6368" }, { "name": "Protocol Buffer", "bytes": "42521" }, { "name": "Python", "bytes": "4453630" }, { "name": "Roff", "bytes": "572" }, { "name": "Ruby", "bytes": "144432" }, { "name": "Shell", "bytes": "61859" }, { "name": "XSLT", "bytes": "11895" } ], "symlink_target": "" }
import json import logging import sys from logging.handlers import RotatingFileHandler from os.path import join, exists from shutil import rmtree def get_domain(): from .core.models import Configuration return Configuration.get('domain', 'company.com') def get_menu_links(): from .core.models import Configuration return [link for link in Configuration.menu_links()] def get_auto_cleanup_run_days(): from .core.models import Configuration from .settings import logger key = 'auto_cleanup_run_after_days' value = Configuration.get(key, 90) try: return int(value) except ValueError: logger.exception('config key: {} should be integer!'.format(key)) return 90 def cleanup_run_media(run_id): from .settings import MEDIA_ROOT, logger run_media_dir = join(MEDIA_ROOT, 'runs/{}'.format(run_id)) if exists(run_media_dir): try: rmtree(run_media_dir) except: logger.exception('failed to cleanup run media <{}>'.format(run_id)) def read_document(name): from .settings import SETTINGS_DIR doc_path = join(SETTINGS_DIR, 'static/docs', name + '.md') if exists(doc_path): with open(doc_path) as f: return f.read() else: return 'not found.' def setup_logger(log_dir=None, debug=False): logger = logging.getLogger('testcube') formatter = logging.Formatter('%(asctime)s %(levelname)-8s: %(message)s') console_handler = logging.StreamHandler(sys.stdout) console_handler.formatter = formatter logger.addHandler(console_handler) if log_dir: filename = join(log_dir, 'testcube.log') if debug: # use single file when debug file_handler = logging.FileHandler(filename) file_handler.setFormatter(formatter) else: file_handler = RotatingFileHandler(filename=filename, maxBytes=10 * 1024 * 1024, backupCount=5) logger.addHandler(file_handler) if debug: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) return logger def append_json(origin_txt, field, value): obj = to_json(origin_txt) if field in obj: obj[field] += '|*|' + value else: obj[field] = value return json.dumps(obj) def to_json(data_text): try: return json.loads(data_text) except: from testcube.settings import logger logger.exception('Cannot parse to json: {}'.format(data_text)) return {} def object_to_dict(obj): return {k: v for k, v in obj.__dict__.items() if not k.startswith('_')} def error_detail(e): return '{}: {}'.format(type(e).__name__, e)
{ "content_hash": "ced11d78856a6692b72465bdec38f439", "timestamp": "", "source": "github", "line_count": 107, "max_line_length": 79, "avg_line_length": 26.093457943925234, "alnum_prop": 0.620702005730659, "repo_name": "tobyqin/testcube", "id": "7537b9ec16d62414687af0b7e2d3bf2bcab5b466", "size": "2792", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "testcube/utils.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "262" }, { "name": "CSS", "bytes": "180930" }, { "name": "Dockerfile", "bytes": "302" }, { "name": "HTML", "bytes": "30516" }, { "name": "JavaScript", "bytes": "328876" }, { "name": "Python", "bytes": "126583" }, { "name": "Shell", "bytes": "78" } ], "symlink_target": "" }
import logging import smtplib from email.mime.text import MIMEText LOG = logging.getLogger('root') class notify(object): def email(self, to_list, message, subject="Notification", from_address="nobody@nowhere", smtp_server="pobox1663.lanl.gov"): LOG.debug("Sending an email.") email = MIMEText(message) email['Subject'] = "[hypnotoad] " + str(subject) email['From'] = from_address email['To'] = ', '.join(to_list) s = smtplib.SMTP(smtp_server) s.sendmail(from_address, to_list, email.as_string()) s.quit() # EOF
{ "content_hash": "a646d57562722ddd42c1a508edd48d1d", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 127, "avg_line_length": 24.5, "alnum_prop": 0.6292517006802721, "repo_name": "hpc/hypnotoad", "id": "fcde56790a3c4755ce3c80cccf5747b290d5c626", "size": "635", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hypnotoad/core/notify.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "143307" } ], "symlink_target": "" }
"""This script is meant to be run on a Swarming slave.""" import os import sys def main(): print('Hello world: ' + sys.argv[1]) if len(sys.argv) == 3: # Write a file in ${ISOLATED_OUTDIR}. with open(os.path.join(sys.argv[2], 'happiness.txt'), 'wb') as f: f.write( 'is where you look %d/%d' % ( int(os.environ['GTEST_SHARD_INDEX']), int(os.environ['GTEST_TOTAL_SHARDS']))) return 0 if __name__ == '__main__': sys.exit(main())
{ "content_hash": "15d9902181f639667effbe50130075b8", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 69, "avg_line_length": 24.3, "alnum_prop": 0.5637860082304527, "repo_name": "Teamxrtc/webrtc-streaming-node", "id": "d8a49d7ed00aaea2cf770a70710073f6a4fde791", "size": "686", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "third_party/webrtc/src/chromium/src/tools/swarming_client/example/payload/hello_world.py", "mode": "33261", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "44" }, { "name": "C++", "bytes": "221840" }, { "name": "HTML", "bytes": "2383" }, { "name": "JavaScript", "bytes": "37396" }, { "name": "Python", "bytes": "2860" }, { "name": "Shell", "bytes": "104" } ], "symlink_target": "" }
import logging import unittest from ipaddress import IPv6Network import config import thread_cert # Test description: # This test verifies bi-directional connectivity between Thread end device # and infra host. # # Topology: # ----------------(eth)-------------------- # | | # BR (Leader) HOST # | # ROUTER # BR = 1 ROUTER = 2 HOST = 3 # The two prefixes are set small enough that a random-generated OMR prefix is # very likely greater than them. So that the BR will remove the random-generated one. ON_MESH_PREFIX1 = "fd00:00:00:01::/64" ON_MESH_PREFIX2 = "fd00:00:00:02::/64" class SingleBorderRouter(thread_cert.TestCase): USE_MESSAGE_FACTORY = False TOPOLOGY = { BR: { 'name': 'BR', 'allowlist': [ROUTER], 'is_otbr': True, 'version': '1.2', }, ROUTER: { 'name': 'Router', 'allowlist': [BR], 'version': '1.2', }, HOST: { 'name': 'Host', 'is_host': True }, } def test(self): br = self.nodes[BR] router = self.nodes[ROUTER] host = self.nodes[HOST] host.start(start_radvd=False) self.simulator.go(5) br.start() self.simulator.go(config.LEADER_STARTUP_DELAY) self.assertEqual('leader', br.get_state()) router.start() self.simulator.go(config.ROUTER_STARTUP_DELAY) self.assertEqual('router', router.get_state()) # # Case 1. There is no OMR prefix or on-link prefix. # self.simulator.go(10) self.collect_ipaddrs() logging.info("BR addrs: %r", br.get_addrs()) logging.info("ROUTER addrs: %r", router.get_addrs()) logging.info("HOST addrs: %r", host.get_addrs()) self.assertEqual(len(br.get_netdata_omr_prefixes()), 1) self.assertEqual(len(router.get_netdata_omr_prefixes()), 1) self.assertEqual(len(br.get_netdata_non_nat64_prefixes()), 1) self.assertEqual(len(router.get_netdata_non_nat64_prefixes()), 1) omr_prefix = br.get_br_omr_prefix() on_link_prefix = br.get_br_on_link_prefix() self.assertEqual(len(br.get_ip6_address(config.ADDRESS_TYPE.OMR)), 1) self.assertEqual(len(router.get_ip6_address(config.ADDRESS_TYPE.OMR)), 1) self.assertEqual(len(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)), 1) br1_omr_address = br.get_ip6_address(config.ADDRESS_TYPE.OMR)[0] router1_omr_address = router.get_ip6_address(config.ADDRESS_TYPE.OMR)[0] host_ula_address = host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)[0] # Router1 can ping to/from the Host on infra link. self.assertTrue(router.ping(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)[0])) self.assertTrue(host.ping(router.get_ip6_address(config.ADDRESS_TYPE.OMR)[0], backbone=True)) # # Case 2. User adds smaller on-mesh prefix. # 1. Should deregister our local OMR prefix. # 2. Should re-register our local OMR prefix when user prefix # is removed. # br.add_prefix(ON_MESH_PREFIX1) br.add_prefix(ON_MESH_PREFIX2) br.register_netdata() self.simulator.go(10) self.collect_ipaddrs() logging.info("BR addrs: %r", br.get_addrs()) logging.info("ROUTER addrs: %r", router.get_addrs()) logging.info("HOST addrs: %r", host.get_addrs()) self.assertGreaterEqual(len(host.get_addrs()), 2) self.assertEqual(len(br.get_netdata_omr_prefixes()), 2) self.assertEqual(len(router.get_netdata_omr_prefixes()), 2) self.assertEqual(len(br.get_netdata_non_nat64_prefixes()), 1) self.assertEqual(len(router.get_netdata_non_nat64_prefixes()), 1) self.assertEqual(len(br.get_ip6_address(config.ADDRESS_TYPE.OMR)), 2) self.assertEqual(len(router.get_ip6_address(config.ADDRESS_TYPE.OMR)), 2) self.assertEqual(len(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)), 1) # Router1 can ping to/from the Host on infra link. self.assertTrue(router.ping(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)[0])) self.assertTrue(host.ping(router.get_ip6_address(config.ADDRESS_TYPE.OMR)[0], backbone=True)) self.assertTrue(host.ping(router.get_ip6_address(config.ADDRESS_TYPE.OMR)[1], backbone=True)) # Remove user prefixes, should re-register local OMR prefix. br.remove_prefix(ON_MESH_PREFIX1) br.remove_prefix(ON_MESH_PREFIX2) br.register_netdata() self.simulator.go(10) self.collect_ipaddrs() logging.info("BR addrs: %r", br.get_addrs()) logging.info("ROUTER addrs: %r", router.get_addrs()) logging.info("HOST addrs: %r", host.get_addrs()) self.assertEqual(len(br.get_netdata_omr_prefixes()), 1) self.assertEqual(len(router.get_netdata_omr_prefixes()), 1) self.assertEqual(len(br.get_netdata_non_nat64_prefixes()), 1) self.assertEqual(len(router.get_netdata_non_nat64_prefixes()), 1) # The same local OMR and on-link prefix should be re-register. self.assertEqual(br.get_netdata_omr_prefixes(), [omr_prefix]) self.assertEqual(router.get_netdata_omr_prefixes(), [omr_prefix]) self.assertEqual(br.get_netdata_non_nat64_prefixes(), [on_link_prefix]) self.assertEqual(router.get_netdata_non_nat64_prefixes(), [on_link_prefix]) self.assertEqual(len(br.get_ip6_address(config.ADDRESS_TYPE.OMR)), 1) self.assertEqual(len(router.get_ip6_address(config.ADDRESS_TYPE.OMR)), 1) self.assertEqual(len(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)), 1) self.assertEqual(br.get_ip6_address(config.ADDRESS_TYPE.OMR), [br1_omr_address]) self.assertEqual(router.get_ip6_address(config.ADDRESS_TYPE.OMR), [router1_omr_address]) self.assertEqual(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA), [host_ula_address]) # Router1 can ping to/from the Host on infra link. self.assertTrue(router.ping(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)[0])) self.assertTrue(host.ping(router.get_ip6_address(config.ADDRESS_TYPE.OMR)[0], backbone=True)) # # Case 3. OMR and on-link prefixes should be removed when Border Routing is # explicitly disabled and added when Border Routing is enabled again. # br.disable_br() self.simulator.go(10) self.collect_ipaddrs() logging.info("BR addrs: %r", br.get_addrs()) logging.info("ROUTER addrs: %r", router.get_addrs()) logging.info("HOST addrs: %r", host.get_addrs()) self.assertEqual(len(br.get_prefixes()), 0) self.assertEqual(len(router.get_prefixes()), 0) self.assertEqual(len(br.get_routes()), 0) self.assertEqual(len(router.get_routes()), 0) self.assertEqual(len(br.get_ip6_address(config.ADDRESS_TYPE.OMR)), 0) self.assertEqual(len(router.get_ip6_address(config.ADDRESS_TYPE.OMR)), 0) br.enable_br() # It takes around 10 seconds to start sending RA messages. self.simulator.go(config.BORDER_ROUTER_STARTUP_DELAY) self.collect_ipaddrs() logging.info("BR addrs: %r", br.get_addrs()) logging.info("ROUTER addrs: %r", router.get_addrs()) logging.info("HOST addrs: %r", host.get_addrs()) self.assertEqual(len(br.get_netdata_omr_prefixes()), 1) self.assertEqual(len(router.get_netdata_omr_prefixes()), 1) self.assertEqual(len(br.get_netdata_non_nat64_prefixes()), 1) self.assertEqual(len(router.get_netdata_non_nat64_prefixes()), 1) # The same local OMR and on-link prefix should be re-registered. self.assertEqual(br.get_netdata_omr_prefixes(), [omr_prefix]) self.assertEqual(router.get_netdata_omr_prefixes(), [omr_prefix]) self.assertEqual(br.get_netdata_non_nat64_prefixes(), [on_link_prefix]) self.assertEqual(router.get_netdata_non_nat64_prefixes(), [on_link_prefix]) self.assertEqual(len(br.get_ip6_address(config.ADDRESS_TYPE.OMR)), 1) self.assertEqual(len(router.get_ip6_address(config.ADDRESS_TYPE.OMR)), 1) self.assertEqual(len(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)), 1) self.assertEqual(br.get_ip6_address(config.ADDRESS_TYPE.OMR), [br1_omr_address]) self.assertEqual(router.get_ip6_address(config.ADDRESS_TYPE.OMR), [router1_omr_address]) self.assertEqual(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA), [host_ula_address]) # Router1 can ping to/from the Host on infra link. self.assertTrue(router.ping(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)[0])) self.assertTrue(host.ping(router.get_ip6_address(config.ADDRESS_TYPE.OMR)[0], backbone=True)) # # Case 4. The Routing Manager should be stopped if the infra interface went down. # br.disable_ether() self.simulator.go(10) self.collect_ipaddrs() logging.info("BR addrs: %r", br.get_addrs()) logging.info("ROUTER addrs: %r", router.get_addrs()) logging.info("HOST addrs: %r", host.get_addrs()) self.assertEqual(len(br.get_prefixes()), 0) self.assertEqual(len(router.get_prefixes()), 0) self.assertEqual(len(br.get_routes()), 0) self.assertEqual(len(router.get_routes()), 0) self.assertEqual(len(br.get_ip6_address(config.ADDRESS_TYPE.OMR)), 0) self.assertEqual(len(router.get_ip6_address(config.ADDRESS_TYPE.OMR)), 0) br.enable_ether() # The routing manager may fail to send RS and will wait for 4 seconds # before retrying. self.simulator.go(40) self.collect_ipaddrs() logging.info("BR addrs: %r", br.get_addrs()) logging.info("ROUTER addrs: %r", router.get_addrs()) logging.info("HOST addrs: %r", host.get_addrs()) self.assertEqual(len(br.get_netdata_omr_prefixes()), 1) self.assertEqual(len(router.get_netdata_omr_prefixes()), 1) self.assertEqual(len(br.get_netdata_non_nat64_prefixes()), 1) self.assertEqual(len(router.get_netdata_non_nat64_prefixes()), 1) # The same local OMR and on-link prefix should be re-registered. self.assertEqual(br.get_netdata_omr_prefixes(), [omr_prefix]) self.assertEqual(router.get_netdata_omr_prefixes(), [omr_prefix]) self.assertEqual(br.get_netdata_non_nat64_prefixes(), [on_link_prefix]) self.assertEqual(router.get_netdata_non_nat64_prefixes(), [on_link_prefix]) self.assertEqual(len(br.get_ip6_address(config.ADDRESS_TYPE.OMR)), 1) self.assertEqual(len(router.get_ip6_address(config.ADDRESS_TYPE.OMR)), 1) self.assertEqual(len(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)), 1) self.assertEqual(br.get_ip6_address(config.ADDRESS_TYPE.OMR), [br1_omr_address]) self.assertEqual(router.get_ip6_address(config.ADDRESS_TYPE.OMR), [router1_omr_address]) self.assertEqual(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA), [host_ula_address]) # Router1 can ping to/from the Host on infra link. self.assertTrue(host.ping(router.get_ip6_address(config.ADDRESS_TYPE.OMR)[0], backbone=True)) self.assertTrue(router.ping(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)[0])) # # Case 5. Test if the linux host is still reachable if rejoin the network. # host.disable_ether() self.simulator.go(10) host.enable_ether() self.simulator.go(10) self.assertTrue(router.ping(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)[0])) self.assertTrue(host.ping(router.get_ip6_address(config.ADDRESS_TYPE.OMR)[0], backbone=True)) # # Case 6. Test if the Border Router will remove the on-link prefix when # another RA daemon is started on the same infra interface. # br.start_radvd_service(prefix=config.ONLINK_GUA_PREFIX, slaac=True) self.simulator.go(5) self.assertEqual(len(br.get_netdata_non_nat64_prefixes()), 2) self.assertEqual(len(router.get_netdata_non_nat64_prefixes()), 2) self.assertTrue(router.ping(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_GUA)[0])) self.assertTrue(router.ping(host.get_ip6_address(config.ADDRESS_TYPE.ONLINK_ULA)[0])) self.assertTrue(host.ping(router.get_ip6_address(config.ADDRESS_TYPE.OMR)[0], backbone=True)) # # Case 7. Test if Border Router changes on-link prefix when # Extended PAN ID changes. # prefixA = br.get_br_on_link_prefix() router.commissioner_start() self.simulator.go(5) router.send_mgmt_active_set( active_timestamp=100, extended_panid='0001020304050607', ) self.simulator.go(10) prefixB = br.get_br_on_link_prefix() self.assertNotEqual(IPv6Network(prefixA), IPv6Network(prefixB)) if __name__ == '__main__': unittest.main()
{ "content_hash": "17b1c8c06fd71e8a4aa604abae3f82ef", "timestamp": "", "source": "github", "line_count": 323, "max_line_length": 101, "avg_line_length": 41.23839009287926, "alnum_prop": 0.6360360360360361, "repo_name": "openthread/openthread", "id": "1028629dfdc12a18e61b8d30856307d52faac8d5", "size": "14924", "binary": false, "copies": "3", "ref": "refs/heads/main", "path": "tests/scripts/thread-cert/border_router/test_single_border_router.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "2610" }, { "name": "C", "bytes": "1602099" }, { "name": "C++", "bytes": "8403018" }, { "name": "CMake", "bytes": "110320" }, { "name": "Dockerfile", "bytes": "10426" }, { "name": "M4", "bytes": "32369" }, { "name": "Makefile", "bytes": "192544" }, { "name": "Python", "bytes": "4630721" }, { "name": "Shell", "bytes": "165349" } ], "symlink_target": "" }
""" :Author Patrik Valkovic :Created 01.08.2017 08:46 :Licence MIT Part of grammpy """ from .CannotConvertException import CannotConvertException class NotASingleSymbolException(CannotConvertException): """ More symbols defined at the place where one symbol is expected """ def __init__(self, symbols): self.symbols = symbols
{ "content_hash": "9633d38a88dce92770b187d7a31103da", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 66, "avg_line_length": 21.823529411764707, "alnum_prop": 0.6900269541778976, "repo_name": "PatrikValkovic/grammpy", "id": "e783ae8cc1f5238dabc23821d6221621f286d98c", "size": "394", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "grammpy/exceptions/NotASingleSymbolException.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "604926" } ], "symlink_target": "" }
import queue import random import time from collections import defaultdict import simplejson as json from google.protobuf.json_format import MessageToJson from pyqrllib.pyqrllib import bin2hstr, hstr2bin from twisted.internet import reactor from twisted.internet.protocol import ServerFactory from qrl.core import config, logger, helper from qrl.core.helper import json_bytestream_bk, json_bytestream from qrl.core.p2pprotocol import P2PProtocol from qrl.core.qrlnode import QRLNode from qrl.crypto.misc import sha256 from qrl.generated import qrl_pb2 class P2PFactory(ServerFactory): protocol = P2PProtocol def __init__(self, chain, nodeState, node: QRLNode, pos=None): # FIXME: Constructor signature is not consistent with other factory classes self.master_mr = None self.pos = None self.chain = chain self.nodeState = nodeState self.stake = config.user.enable_auto_staking # default to mining off as the wallet functions are not that responsive at present with it enabled.. self.peers_blockheight = {} self.target_retry = defaultdict(int) self.target_peers = {} self.fork_target_peers = {} self.connections = 0 self.buffer = '' self.sync = 0 self.partial_sync = [0, 0] self.long_gap_block = 0 self.mining = 0 self.newblock = 0 self.exit = 0 self.genesis = 0 self.missed_block = 0 self.requested = [0, 0] self.ip_geotag = 1 # to be disabled in main release as reveals IP.. self.last_reveal_one = None self.last_reveal_two = None self.last_reveal_three = None self.peer_connections = [] self.node = node self.txn_processor_running = False self.bkmr_blocknumber = 0 # Blocknumber for which bkmr is being tracked self.bkmr_priorityq = queue.PriorityQueue() # Scheduled and cancel the call, just to initialize with IDelayedCall self.bkmr_processor = reactor.callLater(1, self.setPOS, pos=None) self.bkmr_processor.cancel() # factory network functions def setPOS(self, pos): self.pos = pos self.master_mr = self.pos.master_mr def RFM(self, data): """ Request Full Message This function request for the full message against, the Message Receipt received. :return: """ # FIXME: Again, breaking encasulation # FIXME: Huge amount of lookups in dictionaries msg_hash = data.hash if msg_hash in self.master_mr.hash_msg: if msg_hash in self.master_mr.requested_hash: del self.master_mr.requested_hash[msg_hash] return peers_list = self.master_mr.requested_hash[msg_hash].peers_connection_list message_request = self.master_mr.requested_hash[msg_hash] for peer in peers_list: if peer in message_request.already_requested_peers: continue message_request.already_requested_peers.append(peer) peer.transport.write(peer.wrap_message('SFM', MessageToJson(data))) call_later_obj = reactor.callLater(config.dev.message_receipt_timeout, self.RFM, data) message_request.callLater = call_later_obj return # If execution reach to this line, then it means no peer was able to provide # Full message for this hash thus the hash has to be deleted. # Moreover, negative points could be added to the peers, for this behavior if msg_hash in self.master_mr.requested_hash: del self.master_mr.requested_hash[msg_hash] def select_best_bkmr(self): block_chain_buffer = self.chain.block_chain_buffer blocknumber = self.bkmr_blocknumber try: score, hash = self.bkmr_priorityq.get_nowait() if blocknumber <= block_chain_buffer.height(): oldscore = block_chain_buffer.get_block_n_score(blocknumber) if score > oldscore: del self.bkmr_priorityq self.bkmr_priorityq = queue.PriorityQueue() return data = qrl_pb2.MR() data.hash = hash data.type = 'BK' self.RFM(data) self.bkmr_processor = reactor.callLater(5, self.select_best_bkmr) except queue.Empty: return except Exception as e: logger.error('select_best_bkmr Unexpected Exception') logger.error('%s', e) def connect_peers(self): """ Will connect to all known peers. This is typically the entry point It does result in: - connectionMade in each protocol (session) - :py:meth:startedConnecting - :py:meth:clientConnectionFailed - :py:meth:clientConnectionLost :return: :rtype: None """ logger.info('<<<Reconnecting to peer list: %s', self.node.peer_addresses) for peer_address in self.node.peer_addresses: # FIXME: Refactor search found = False for peer_conn in self.peer_connections: if peer_address == peer_conn.transport.getPeer().host: found = True break if found: continue reactor.connectTCP(peer_address, 9000, self) def get_block_a_to_b(self, a, b): logger.info('<<<Requested blocks: %s to %s from peers..', a, b) l = list(range(a, b)) for peer in self.peer_connections: if len(l) > 0: peer.transport.write(self.protocol.wrap_message('BN', str(l.pop(0)))) else: return def get_block_n_random_peer(self, n): logger.info('<<<Requested block: %s from random peer.', n) random.choice(self.peer_connections).get_block_n(n) return def get_block_n(self, n): logger.info('<<<Requested block: %s from peers.', n) for peer in self.peer_connections: peer.transport.write(self.protocol.wrap_message('BN', str(n))) return def get_m_blockheight_from_random_peer(self): logger.info('<<<Requested blockheight from random peer.') random.choice(self.peer_connections).get_m_blockheight_from_connection() return def get_blockheight_map_from_peers(self): logger.info('<<<Requested blockheight_map from peers.') for peer in self.peer_connections: peer.transport.write(self.protocol.wrap_message('BM')) return def get_m_blockheight_from_peers(self): for peer in self.peer_connections: peer.get_m_blockheight_from_connection() return def send_m_blockheight_to_peers(self): logger.info('<<<Sending blockheight to peers.') for peer in self.peer_connections: peer.send_m_blockheight_to_peer() return def send_st_to_peers(self, st): logger.info('<<<Transmitting ST: %s', st.epoch) self.register_and_broadcast('ST', st.get_message_hash(), st.to_json()) return def send_tx_to_peers(self, tx): logger.info('<<<Transmitting TX: %s', bin2hstr(tx.txhash)) self.register_and_broadcast('TX', tx.get_message_hash(), tx.to_json()) return def send_reboot(self, json_hash): logger.info('<<<Transmitting Reboot Command') for peer in self.peer_connections: peer.transport.write(self.protocol.wrap_message('reboot', json_hash)) return def ip_geotag_peers(self): logger.info('<<<IP geotag broadcast') for peer in self.peer_connections: peer.transport.write(self.protocol.wrap_message('IP')) return def ping_peers(self): logger.info('<<<Transmitting network PING') self.chain.last_ping = time.time() for peer in self.peer_connections: peer.transport.write(self.protocol.wrap_message('PING')) return # send POS block to peers.. def send_stake_block(self, block_obj): logger.info('<<<Transmitting POS created block %s %s', str(block_obj.blockheader.blocknumber), block_obj.blockheader.headerhash) for peer in self.peer_connections: peer.transport.write(self.protocol.wrap_message('S4', json_bytestream(block_obj))) return # send/relay block to peers def send_block_to_peers(self, block): # logger.info('<<<Transmitting block: ', block.blockheader.headerhash) data = qrl_pb2.MR() k = qrl_pb2.Transaction() data.stake_selector = block.transactions[0].addr_from data.block_number = block.blockheader.blocknumber data.prev_headerhash = bytes(block.blockheader.prev_blockheaderhash) if block.blockheader.blocknumber > 1: data.reveal_hash = block.blockheader.reveal_hash self.register_and_broadcast('BK', block.blockheader.headerhash, block.to_json(), data) return def register_and_broadcast(self, msg_type, msg_hash: bytes, msg_json, data=None): # FIXME: Try to keep parameters in the same order (consistency) self.master_mr.register(msg_hash, msg_json, msg_type) # FIXME: Clean if not data: data = qrl_pb2.MR() data.hash = msg_hash data.type = msg_type self.broadcast(msg_hash, msg_type, data) def broadcast(self, msg_hash: bytes, msg_type, data=None): # Move to factory """ Broadcast This function sends the Message Receipt to all connected peers. :return: """ ignore_peers = [] if msg_hash in self.master_mr.requested_hash: ignore_peers = self.master_mr.requested_hash[msg_hash].peers_connection_list if not data: data = qrl_pb2.MR() data.hash = msg_hash data.type = msg_type for peer in self.peer_connections: if peer in ignore_peers: continue peer.transport.write(self.protocol.wrap_message('MR', MessageToJson(data))) # request transaction_pool from peers def get_tx_pool_from_peers(self): logger.info('<<<Requesting TX pool from peers..') for peer in self.peer_connections: peer.transport.write(self.protocol.wrap_message('RT')) return # connection functions def reset_processor_flag(self, _): self.txn_processor_running = False def reset_processor_flag_with_err(self, msg): logger.error('Exception in txn task') logger.error('%s', msg) self.txn_processor_running = False # Event handlers # noinspection PyMethodMayBeStatic def clientConnectionLost(self, connector, reason): logger.debug('connection lost: %s', reason) # TODO: Reconnect has been disabled # connector.connect() # noinspection PyMethodMayBeStatic def clientConnectionFailed(self, connector, reason): logger.debug('connection failed: %s', reason) # noinspection PyMethodMayBeStatic def startedConnecting(self, connector): logger.debug('Started connecting: %s', connector)
{ "content_hash": "f9259754ad95856750781c983b5ee620", "timestamp": "", "source": "github", "line_count": 317, "max_line_length": 154, "avg_line_length": 36.1608832807571, "alnum_prop": 0.6099624880048853, "repo_name": "elliottdehn/QRL", "id": "727816cf5c939a0c43203f1afdd4e3935bec8612", "size": "11478", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "qrl/core/p2pfactory.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2276" }, { "name": "HTML", "bytes": "20501" }, { "name": "JavaScript", "bytes": "22142" }, { "name": "Python", "bytes": "431741" }, { "name": "Shell", "bytes": "1096" } ], "symlink_target": "" }
""" Template MainWindowBase.py """ #Author: Miguel Molero <miguel.molero@gmail.com> import sys import os from PyQt5.QtCore import * from PyQt5.QtGui import * from PyQt5.QtWidgets import * from PyQt5.QtCore import pyqtSignal as Signal import markdown2 import yaml import pprint #own components from pcloudpy.gui.resources_rc import * #from pcloudpy.gui.graphics.QVTKWidget import QVTKWidget from pcloudpy.gui.AppObject import AppObject from pcloudpy.gui.utils.qhelpers import * from pcloudpy.gui.components.ViewWidget import ViewWidget from pcloudpy.gui.components.TabViewWidget import TabViewWidget from pcloudpy.gui.components.ToolboxesWidget import ToolBoxesWidget from pcloudpy.gui.components.DatasetsWidget import DatasetsWidget from pcloudpy.gui.components.ObjectInspectorWidget import ObjectInspectorWidget from pcloudpy.gui.components.FilterWidget import FilterWidget #from shell.PythonConsole import PythonConsole #from shell.IPythonConsole import IPythonConsole #from shell.CodeEdit import CodeEdit NAME = "pcloudpy" class Info(object): version = "0.10" date = "27-10-2015" class MainWindowBase(QMainWindow): """ Base Class for the MainWindow Object. This class should inherit its attributes and methods to a MainWindow Class """ def __init__(self, parent = None): super(MainWindowBase, self).__init__(parent) self.setLocale((QLocale(QLocale.English, QLocale.UnitedStates))) self._software_name = NAME self.App = AppObject() self.init() self.create_menus() self.create_toolbars() self.setup_docks() self.setup_graphicsview() self.setup_statusbar() self.setup_connections() self.init_settings() self.init_toolboxes() QTimer.singleShot(0,self.load_initial_file) @property def software_name(self): return self._software_name @software_name.setter def software_name(self, name): self._software_name = name def init(self): self.Info = Info() self.dirty = False self.reset = False self.filename = None self.recent_files = [] self.dir_path = os.getcwd() self.setGeometry(100,100,900,600) self.setMinimumSize(400,400) self.setMaximumSize(2000,1500) self.setWindowFlags(self.windowFlags()) self.setWindowTitle(self.software_name) #Put here your init code def set_title(self, fname=None): title = os.path.basename(fname) self.setWindowTitle("%s:%s"%(self.softwareName,title)) def load_initial_file(self): settings = QSettings() fname = settings.value("LastFile") if fname and QFile.exists(fname): self.load_file(fname) def load_file(self, fname=None): if fname is None: action = self.sender() if isinstance(action, QAction): fname = action.data() if not self.ok_to_Continue(): return else: return if fname: self.filename = None self.add_recent_file(fname) self.filename = fname self.dirty = False self.set_title(fname) #Add More actions # # def add_recent_file(self, fname): if fname is None: return if not self.recentFiles.count(fname): self.recentFiles.insert(0,fname) while len(self.recentFiles)>9: self.recentFiles.pop() def create_menus(self): self.menubar = self.menuBar() file_menu = self.menubar.addMenu(self.tr('&File')) help_menu = self.menubar.addMenu(self.tr("&Help")) file_open_action = createAction(self, "&Open Dataset[s]", self.file_open) file_open_action.setIcon(self.style().standardIcon(QStyle.SP_DirIcon)) help_about_action = createAction(self, "&About %s"%self._software_name, self.help_about, icon="pcloudpy.png") addActions(file_menu, (file_open_action,)) addActions(help_menu, (help_about_action,)) def setup_connections(self): #Main Window self.workspaceLineEdit.textEdited.connect(self.editWorkSpace) #self.code_edit.codeRequested.connect(self.console_widget.execute_code) def setup_docks(self): #Toolboxes self.toolboxes_widget = ToolBoxesWidget() self.toolboxes_dockwidget = QDockWidget(self.tr("Toolboxes")) self.toolboxes_dockwidget.setObjectName("Toolboxes-Dock") self.toolboxes_dockwidget.setWidget(self.toolboxes_widget) self.toolboxes_dockwidget.setAllowedAreas(Qt.RightDockWidgetArea) self.addDockWidget(Qt.RightDockWidgetArea, self.toolboxes_dockwidget) #Datasets self.datasets_widget = DatasetsWidget() self.datasets_dockwidget = QDockWidget(self.tr("Datasets")) self.datasets_dockwidget.setObjectName("Datasets-Dock") self.datasets_dockwidget.setWidget(self.datasets_widget) self.datasets_dockwidget.setAllowedAreas(Qt.LeftDockWidgetArea) self.addDockWidget(Qt.LeftDockWidgetArea, self.datasets_dockwidget) #Object Inspector self.object_inspector_widget = ObjectInspectorWidget() self.object_inspector_dockwidget = QDockWidget(self.tr("Object Inspector")) self.object_inspector_dockwidget.setObjectName("Object-Inspector-Dock") self.object_inspector_dockwidget.setWidget(self.object_inspector_widget) self.object_inspector_dockwidget.setAllowedAreas(Qt.LeftDockWidgetArea) self.addDockWidget(Qt.LeftDockWidgetArea, self.object_inspector_dockwidget) #Filter Widget self.filter_widget = FilterWidget() self.filter_widget_dockwidget = QDockWidget(self.tr("Filter Setup")) self.filter_widget_dockwidget.setObjectName("Filter-Setup-Dock") self.filter_widget_dockwidget.setWidget(self.filter_widget) self.filter_widget_dockwidget.setAllowedAreas(Qt.RightDockWidgetArea) self.addDockWidget(Qt.RightDockWidgetArea, self.filter_widget_dockwidget) #Console self.tab_console = QTabWidget() #self.console_widget = IPythonConsole(self, self.App) #self.code_edit = CodeEdit() #self.tab_console.addTab(self.console_widget, "Console") #self.tab_console.addTab(self.code_edit, "Editor") #self.console_widget_dockwidget = QDockWidget(self.tr("IPython")) #self.console_widget_dockwidget.setObjectName("Console-Dock") #self.console_widget_dockwidget.setWidget(self.tab_console) #self.console_widget_dockwidget.setAllowedAreas(Qt.BottomDockWidgetArea) #self.addDockWidget(Qt.BottomDockWidgetArea, self.console_widget_dockwidget) def create_toolbars(self): self.actionOpen_WorkSpace = createAction(self,"Set Workspace", self.setWorkSpace) self.actionOpen_WorkSpace.setIcon(self.style().standardIcon(QStyle.SP_DirIcon)) self.first_toolbar = QToolBar(self) self.first_toolbar.setObjectName("Workspace Toolbar") self.first_toolbar.setAllowedAreas(Qt.TopToolBarArea | Qt.BottomToolBarArea) self.workspaceLineEdit = QLineEdit() self.workspaceLineEdit.setMinimumWidth(200) self.first_toolbar.addWidget(QLabel("Workspace Dir")) self.first_toolbar.addWidget(self.workspaceLineEdit) self.first_toolbar.addAction(self.actionOpen_WorkSpace) self.addToolBar(self.first_toolbar) if self.dir_path is None: self.dir_path = os.getcwd() self.workspaceLineEdit.setText(self.dir_path) self.addToolBarBreak() def setup_graphicsview(self): self.tab_view = TabViewWidget(self) view = ViewWidget() self.tab_view.addTab(view, "Layout #1") self.setCentralWidget(self.tab_view) # self.datasets_widget.init_tree(view.model) def setup_statusbar(self): self.status = self.statusBar() self.status.setSizeGripEnabled(False) #Add more action def setWorkSpace(self): dir = QFileDialog.getExistingDirectory(None, self.tr("Set Workspace directory"), self.dir_path, QFileDialog.ShowDirsOnly | QFileDialog.DontResolveSymlinks) if dir: self.dir_path = dir self.workspaceLineEdit.setText(self.dir_path) def editWorkSpace(self): if os.path.isdir(self.workspaceLineEdit.text()): self.dir_path = self.workspaceLineEdit.text() def init_settings(self): settings = QSettings() self.recentFiles = settings.value("RecentFiles") size = settings.value("MainWindow/Size",QSize(900,600)) position = settings.value("MainWindow/Position",QPoint(50,50)) self.restoreState(settings.value("MainWindow/State")) self.dir_path = settings.value("DirPath") #Retrives more options if self.recentFiles is None: self.recentFiles = [] self.resize(size) self.move(position) #Add more actions self.workspaceLineEdit.setText(self.dir_path) def reset_settings(self): settings = QSettings() settings.clear() self.reset = True self.close() def init_toolboxes(self): if hasattr(sys, 'frozen'): #http://stackoverflow.com/questions/14750997/load-txt-file-from-resources-in-python fd = QFile(":/config_toolboxes.yaml") if fd.open(QIODevice.ReadOnly | QFile.Text): text = QTextStream(fd).readAll() fd.close() data = yaml.load(text) else: path = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(path,'resources', 'conf', 'config_toolboxes.yaml'), 'r') as f: # use safe_load instead load data = yaml.safe_load(f) #pp = pprint.PrettyPrinter() #pp.pprint(data) self.toolboxes_widget.init_tree(data) def ok_to_continue(self): if self.dirty: reply = QMessageBox.question(self, "%s - Unsaved Changes"%self.softwareName, "Save unsaved changes?", QMessageBox.Yes|QMessageBox.No|QMessageBox.Cancel) if reply == QMessageBox.Cancel: return False elif reply == QMessageBox.Yes: self.file_save() return True def file_new(self): pass def file_open(self): pass def file_saveAs(self): pass def file_save(self): pass def help_about(self): message = read_file(":/about.md").format(self.Info.version, self.Info.date) html = markdown2.markdown(str(message)) QMessageBox.about(self, "About %s"%NAME, html) def closeEvent(self, event): if self.reset: return if self.ok_to_continue(): settings = QSettings() filename = self.filename if self.filename is not None else None settings.setValue("LastFile", filename) recentFiles = self.recentFiles if self.recentFiles else None settings.setValue("RecentFiles", recentFiles) settings.setValue("MainWindow/Size", self.size()) settings.setValue("MainWindow/Position", self.pos()) settings.setValue("MainWindow/State", self.saveState()) settings.setValue("DirPath", self.dir_path) #Set more options else: event.ignore() if __name__=='__main__': import sys app = QApplication(sys.argv) win = MainWindowBase() win.show() app.exec_()
{ "content_hash": "49baab245bdf98442046099a4ba307be", "timestamp": "", "source": "github", "line_count": 350, "max_line_length": 163, "avg_line_length": 33.714285714285715, "alnum_prop": 0.6441525423728813, "repo_name": "mmolero/pcloudpy", "id": "d71ad7bc63d605990df62c726642a9a0dfee3e2a", "size": "11800", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pcloudpy/gui/MainWindowBase.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "807415" } ], "symlink_target": "" }
import zxlolbot class helloworld(zxlolbot.zxLoLBoT): def __init__(self, username, password, region="NA"): zxlolbot.zxLoLBoT.__init__(self, username, password, region) @zxlolbot.botcommand def hello(self, sender, args): """Replies Hello world to the sender Usage: hello Example: hello""" self.message(sender, "Hello world") if __name__ == "__main__": bot = helloworld("username", "password") bot.connect()
{ "content_hash": "e9b898f6de15c54e8916e57e886b29a0", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 62, "avg_line_length": 30, "alnum_prop": 0.6952380952380952, "repo_name": "Mathzx/zxLoLBoT", "id": "bd70e7bc0902dfb78e9024eadf4d5d9be4b8635c", "size": "420", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/hello_world.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "28848" } ], "symlink_target": "" }
from holiday import * import utils import report_card import datetime import pytz import csv import cPickle as pickle import datetime import ephem # run `pip install pyephem` import os import collections import string import time import cPickle as pickle import numpy as np import pytz import datetime import string import time import numpy as np import dateutil import pytz import sys import matplotlib import heapq import datetime import math import calendar import sklearn import copy import warnings import collections import random import scipy def print_versions(): import types import sys print "Python version:\n\t", sys.version, "\n" unversioned = [] print "Modules with __version__:" for val in globals().values(): if isinstance(val, types.ModuleType): try: ver = val.__version__ print "\t%s %s" %((val.__name__ + ":").ljust(15), ver.rjust(10)) except: unversioned.append(val.__name__) print "\nModules with no __version__:" for mod in unversioned: print "\t%s" % mod print_versions()
{ "content_hash": "620b6e95cd05376559e23901678da246", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 80, "avg_line_length": 21, "alnum_prop": 0.658008658008658, "repo_name": "dssg/energywise", "id": "cffa34a66d2940dc9327f4991eabfffd7609a06f", "size": "1155", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Code/versions.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "73702" } ], "symlink_target": "" }
import multiprocessing import os import random import select import socket import time from time import perf_counter as clock import numpy as np import tables as tb # create a PyTables file with a single int64 array with the specified number of # elements def create_file(array_size): array = np.ones(array_size, dtype='i8') with tb.open_file('test.h5', 'w') as fobj: array = fobj.create_array('/', 'test', array) print('file created, size: {} MB'.format(array.size_on_disk / 1e6)) # process to receive an array using a multiprocessing.Pipe connection class PipeReceive(multiprocessing.Process): def __init__(self, receiver_pipe, result_send): super().__init__() self.receiver_pipe = receiver_pipe self.result_send = result_send def run(self): # block until something is received on the pipe array = self.receiver_pipe.recv() recv_timestamp = clock() # perform an operation on the received array array += 1 finish_timestamp = clock() assert(np.all(array == 2)) # send the measured timestamps back to the originating process self.result_send.send((recv_timestamp, finish_timestamp)) def read_and_send_pipe(send_type, array_size): # set up Pipe objects to send the actual array to the other process # and receive the timing results from the other process array_recv, array_send = multiprocessing.Pipe(False) result_recv, result_send = multiprocessing.Pipe(False) # start the other process and pause to allow it to start up recv_process = PipeReceive(array_recv, result_send) recv_process.start() time.sleep(0.15) with tb.open_file('test.h5', 'r') as fobj: array = fobj.get_node('/', 'test') start_timestamp = clock() # read an array from the PyTables file and send it to the other process output = array.read(0, array_size, 1) array_send.send(output) assert(np.all(output + 1 == 2)) # receive the timestamps from the other process recv_timestamp, finish_timestamp = result_recv.recv() print_results(send_type, start_timestamp, recv_timestamp, finish_timestamp) recv_process.join() # process to receive an array using a shared memory mapped file # for real use, this would require creating some protocol to specify the # array's data type and shape class MemmapReceive(multiprocessing.Process): def __init__(self, path_recv, result_send): super().__init__() self.path_recv = path_recv self.result_send = result_send def run(self): # block until the memmap file path is received from the other process path = self.path_recv.recv() # create a memmap array using the received file path array = np.memmap(path, 'i8', 'r+') recv_timestamp = clock() # perform an operation on the array array += 1 finish_timestamp = clock() assert(np.all(array == 2)) # send the timing results back to the other process self.result_send.send((recv_timestamp, finish_timestamp)) def read_and_send_memmap(send_type, array_size): # create a multiprocessing Pipe that will be used to send the memmap # file path to the receiving process path_recv, path_send = multiprocessing.Pipe(False) result_recv, result_send = multiprocessing.Pipe(False) # start the receiving process and pause to allow it to start up recv_process = MemmapReceive(path_recv, result_send) recv_process.start() time.sleep(0.15) with tb.open_file('test.h5', 'r') as fobj: array = fobj.get_node('/', 'test') start_timestamp = clock() # memmap a file as a NumPy array in 'overwrite' mode output = np.memmap('/tmp/array1', 'i8', 'w+', shape=(array_size, )) # read an array from a PyTables file into the memmory mapped array array.read(0, array_size, 1, out=output) # use a multiprocessing.Pipe to send the file's path to the receiving # process path_send.send('/tmp/array1') # receive the timestamps from the other process recv_timestamp, finish_timestamp = result_recv.recv() # because 'output' is shared between processes, all elements should now # be equal to 2 assert(np.all(output == 2)) print_results(send_type, start_timestamp, recv_timestamp, finish_timestamp) recv_process.join() # process to receive an array using a socket # for real use, this would require creating some protocol to specify the # array's data type and shape class SocketReceive(multiprocessing.Process): def __init__(self, socket_family, address, result_send, array_nbytes): super().__init__() self.socket_family = socket_family self.address = address self.result_send = result_send self.array_nbytes = array_nbytes def run(self): # create the socket, listen for a connection and use select to block # until a connection is made sock = socket.socket(self.socket_family, socket.SOCK_STREAM) sock.bind(self.address) sock.listen(1) readable, _, _ = select.select([sock], [], []) # accept the connection and read the sent data into a bytearray connection = sock.accept()[0] recv_buffer = bytearray(self.array_nbytes) view = memoryview(recv_buffer) bytes_recv = 0 while bytes_recv < self.array_nbytes: bytes_recv += connection.recv_into(view[bytes_recv:]) # convert the bytearray into a NumPy array array = np.frombuffer(recv_buffer, dtype='i8') recv_timestamp = clock() # perform an operation on the received array array += 1 finish_timestamp = clock() assert(np.all(array == 2)) # send the timestamps back to the originating process self.result_send.send((recv_timestamp, finish_timestamp)) connection.close() sock.close() def unix_socket_address(): # create a Unix domain address in the abstract namespace # this will only work on Linux return b'\x00' + os.urandom(5) def ipv4_socket_address(): # create an IPv4 socket address return ('127.0.0.1', random.randint(9000, 10_000)) def read_and_send_socket(send_type, array_size, array_bytes, address_func, socket_family): address = address_func() # start the receiving process and pause to allow it to start up result_recv, result_send = multiprocessing.Pipe(False) recv_process = SocketReceive(socket_family, address, result_send, array_bytes) recv_process.start() time.sleep(0.15) with tb.open_file('test.h5', 'r') as fobj: array = fobj.get_node('/', 'test') start_timestamp = clock() # connect to the receiving process' socket sock = socket.socket(socket_family, socket.SOCK_STREAM) sock.connect(address) # read the array from the PyTables file and send its # data buffer to the receiving process output = array.read(0, array_size, 1) sock.send(output.data) assert(np.all(output + 1 == 2)) # receive the timestamps from the other process recv_timestamp, finish_timestamp = result_recv.recv() sock.close() print_results(send_type, start_timestamp, recv_timestamp, finish_timestamp) recv_process.join() def print_results(send_type, start_timestamp, recv_timestamp, finish_timestamp): msg = 'type: {0}\t receive: {1:5.5f}, add:{2:5.5f}, total: {3:5.5f}' print(msg.format(send_type, recv_timestamp - start_timestamp, finish_timestamp - recv_timestamp, finish_timestamp - start_timestamp)) if __name__ == '__main__': random.seed(os.urandom(2)) array_num_bytes = [10**5, 10**6, 10**7, 10**8] for array_bytes in array_num_bytes: array_size = array_bytes // 8 create_file(array_size) read_and_send_pipe('multiproc.Pipe', array_size) read_and_send_memmap('memmap ', array_size) # comment out this line to run on an OS other than Linux read_and_send_socket('Unix socket', array_size, array_bytes, unix_socket_address, socket.AF_UNIX) read_and_send_socket('IPv4 socket', array_size, array_bytes, ipv4_socket_address, socket.AF_INET) print()
{ "content_hash": "ec24b7789304a532373e652a4f483335", "timestamp": "", "source": "github", "line_count": 218, "max_line_length": 79, "avg_line_length": 39.091743119266056, "alnum_prop": 0.6406946726120629, "repo_name": "PyTables/PyTables", "id": "be7220263aebf8f0cf9faca60c4d2d594697cd2f", "size": "9169", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "examples/multiprocess_access_benchmarks.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "165578" }, { "name": "CMake", "bytes": "2417" }, { "name": "Cython", "bytes": "283042" }, { "name": "Gnuplot", "bytes": "2104" }, { "name": "Makefile", "bytes": "2489" }, { "name": "Python", "bytes": "3119836" }, { "name": "Shell", "bytes": "19408" } ], "symlink_target": "" }
""" configuration.py Created by Thomas Mangin on 2009-08-25. Copyright (c) 2009-2013 Exa Networks. All rights reserved. """ import os import sys import stat import time import socket from pprint import pformat from copy import deepcopy from struct import pack,unpack from exabgp.util.ip import isipv4 from exabgp.configuration.environment import environment from exabgp.protocol.family import AFI,SAFI,known_families from exabgp.bgp.neighbor import Neighbor from exabgp.protocol.ip.inet import Inet,inet,pton from exabgp.bgp.message.direction import OUT from exabgp.bgp.message.open.asn import ASN from exabgp.bgp.message.open.holdtime import HoldTime from exabgp.bgp.message.open.routerid import RouterID from exabgp.bgp.message.update.nlri.prefix import Prefix from exabgp.bgp.message.update.nlri.bgp import NLRI,PathInfo,Labels,RouteDistinguisher from exabgp.bgp.message.update.nlri.flow import BinaryOperator,NumericOperator,FlowNLRI,Flow4Source,Flow4Destination,Flow6Source,Flow6Destination,FlowSourcePort,FlowDestinationPort,FlowAnyPort,FlowIPProtocol,FlowNextHeader,FlowTCPFlag,FlowFragment,FlowPacketLength,FlowICMPType,FlowICMPCode,FlowDSCP,FlowTrafficClass,FlowFlowLabel from exabgp.bgp.message.update.attribute.id import AttributeID from exabgp.bgp.message.update.attribute.origin import Origin from exabgp.bgp.message.update.attribute.nexthop import cachedNextHop from exabgp.bgp.message.update.attribute.aspath import ASPath from exabgp.bgp.message.update.attribute.med import MED from exabgp.bgp.message.update.attribute.localpref import LocalPreference from exabgp.bgp.message.update.attribute.atomicaggregate import AtomicAggregate from exabgp.bgp.message.update.attribute.aggregator import Aggregator from exabgp.bgp.message.update.attribute.communities import Community,cachedCommunity,Communities,ECommunity,ECommunities,to_ExtendedCommunity,to_FlowTrafficRate,to_FlowRedirectVRFASN,to_FlowRedirectVRFIP,to_FlowRedirect,to_FlowTrafficMark,to_FlowTrafficAction from exabgp.bgp.message.update.attribute.originatorid import OriginatorID from exabgp.bgp.message.update.attribute.clusterlist import ClusterList from exabgp.bgp.message.update.attribute.aigp import AIGP from exabgp.bgp.message.update.attribute.unknown import UnknownAttribute from exabgp.bgp.message.operational import MAX_ADVISORY,Advisory,Query,Response from exabgp.bgp.message.update.attributes import Attributes from exabgp.rib.change import Change from exabgp.bgp.message.refresh import RouteRefresh from exabgp.logger import Logger # Duck class, faking part of the Attribute interface # We add this to routes when when need o split a route in smaller route # The value stored is the longer netmask we want to use # As this is not a real BGP attribute this stays in the configuration file class Split (int): ID = AttributeID.INTERNAL_SPLIT MULTIPLE = False class Watchdog (str): ID = AttributeID.INTERNAL_WATCHDOG MULTIPLE = False class Withdrawn (object): ID = AttributeID.INTERNAL_WITHDRAW MULTIPLE = False # Take an integer an created it networked packed representation for the right family (ipv4/ipv6) def pack_int (afi,integer,mask): return ''.join([chr((integer>>(offset*8)) & 0xff) for offset in range(Inet.length[afi]-1,-1,-1)]) class Configuration (object): TTL_SECURITY = 255 # ' hold-time 180;\n' \ _str_bad_flow = "you tried to filter a flow using an invalid port for a component .." _str_route_error = \ 'community, extended-communities and as-path can take a single community as parameter.\n' \ 'only next-hop is mandatory\n' \ '\n' \ 'syntax:\n' \ 'route 10.0.0.1/22 {\n' \ ' path-information 0.0.0.1;\n' \ ' route-distinguisher|rd 255.255.255.255:65535|65535:65536|65536:65535' \ ' next-hop 192.0.1.254;\n' \ ' origin IGP|EGP|INCOMPLETE;\n' \ ' as-path [ AS-SEQUENCE-ASN1 AS-SEQUENCE-ASN2 ( AS-SET-ASN3 )] ;\n' \ ' med 100;\n' \ ' local-preference 100;\n' \ ' atomic-aggregate;\n' \ ' community [ 65000 65001 65002 ];\n' \ ' extended-community [ target:1234:5.6.7.8 target:1.2.3.4:5678 origin:1234:5.6.7.8 origin:1.2.3.4:5678 0x0002FDE800000001 ]\n' \ ' originator-id 10.0.0.10;\n' \ ' cluster-list [ 10.10.0.1 10.10.0.2 ];\n' \ ' label [ 100 200 ];\n' \ ' aggregator ( 65000:10.0.0.10 )\n' \ ' aigp 100;\n' \ ' split /24\n' \ ' watchdog watchdog-name\n' \ ' withdraw\n' \ '}\n' \ '\n' \ 'syntax:\n' \ 'route 10.0.0.1/22' \ ' path-information 0.0.0.1' \ ' route-distinguisher|rd 255.255.255.255:65535|65535:65536|65536:65535' \ ' next-hop 192.0.2.1' \ ' origin IGP|EGP|INCOMPLETE' \ ' as-path AS-SEQUENCE-ASN' \ ' med 100' \ ' local-preference 100' \ ' atomic-aggregate' \ ' community 65000' \ ' extended-community target:1234:5.6.7.8' \ ' originator-id 10.0.0.10' \ ' cluster-list 10.10.0.1' \ ' label 150' \ ' aggregator ( 65000:10.0.0.10 )' \ ' aigp 100' \ ' split /24' \ ' watchdog watchdog-name' \ ' withdraw' \ ';\n' \ _str_flow_error = \ 'syntax: flow {\n' \ ' route give-me-a-name\n' \ ' route-distinguisher|rd 255.255.255.255:65535|65535:65536|65536:65535; (optional)\n' \ ' next-hop 1.2.3.4; (to use with redirect-to-nexthop)\n' \ ' match {\n' \ ' source 10.0.0.0/24;\n' \ ' source ::1/128/0;\n' \ ' destination 10.0.1.0/24;\n' \ ' port 25;\n' \ ' source-port >1024\n' \ ' destination-port =80 =3128 >8080&<8088;\n' \ ' protocol [ udp tcp ]; (ipv4 only)\n' \ ' next-header [ udp tcp ]; (ipv6 only)\n' \ ' fragment [ not-a-fragment dont-fragment is-fragment first-fragment last-fragment ]; (ipv4 only)\n' \ ' packet-length >200&<300 >400&<500;\n' \ ' flow-label >100&<2000; (ipv6 only)\n' \ ' }\n' \ ' then {\n' \ ' discard;\n' \ ' rate-limit 9600;\n' \ ' redirect 30740:12345;\n' \ ' redirect 1.2.3.4:5678;\n' \ ' redirect 1.2.3.4;\n' \ ' redirect-next-hop;\n' \ ' copy 1.2.3.4;\n' \ ' mark 123;\n' \ ' action sample|terminal|sample-terminal;\n' \ ' }\n' \ ' }\n' \ '}\n\n' \ 'one or more match term, one action\n' \ 'fragment code is totally untested\n' \ _str_process_error = \ 'syntax: process name-of-process {\n' \ ' run /path/to/command with its args;\n' \ ' }\n\n' \ _str_family_error = \ 'syntax: family {\n' \ ' all; # default if not family block is present, announce all we know\n' \ ' minimal # use the AFI/SAFI required to announce the routes in the configuration\n' \ ' \n' \ ' ipv4 unicast;\n' \ ' ipv4 multicast;\n' \ ' ipv4 nlri-mpls;\n' \ ' ipv4 mpls-vpn;\n' \ ' ipv4 flow;\n' \ ' ipv4 flow-vpn;\n' \ ' ipv6 unicast;\n' \ ' ipv6 flow;\n' \ ' ipv6 flow-vpn;\n' \ ' }\n' _str_capa_error = \ 'syntax: capability {\n' \ ' graceful-restart <time in second>;\n' \ ' asn4 enable|disable;\n' \ ' add-path disable|send|receive|send/receive;\n' \ ' multi-session enable|disable;\n' \ ' operational enable|disable;\n' \ ' }\n' def __init__ (self,fname,text=False): self.debug = environment.settings().debug.configuration self.api_encoder = environment.settings().api.encoder self.logger = Logger() self._text = text self._fname = fname self._clear() def _clear (self): self.process = {} self.neighbor = {} self.error = '' self._neighbor = {} self._scope = [] self._location = ['root'] self._line = [] self._error = '' self._number = 1 self._flow_state = 'out' self._nexthopself = None # Public Interface def reload (self): try: return self._reload() except KeyboardInterrupt: self.error = 'configuration reload aborted by ^C or SIGINT' return False def _reload (self): if self._text: self._tokens = self._tokenise(self._fname.split('\n')) else: try: f = open(self._fname,'r') self._tokens = self._tokenise(f) f.close() except IOError,e: error = str(e) if error.count(']'): self.error = error.split(']')[1].strip() else: self.error = error if self.debug: raise return False self._clear() r = False while not self.finished(): r = self._dispatch(self._scope,'configuration',['group','neighbor'],[]) if r is False: break if r not in [True,None]: self.error = "\nsyntax error in section %s\nline %d : %s\n\n%s" % (self._location[-1],self.number(),self.line(),self._error) return False self.neighbor = self._neighbor if environment.settings().debug.route: self.decode(environment.settings().debug.route) sys.exit(0) if environment.settings().debug.selfcheck: self.selfcheck() sys.exit(0) return True def parse_api_route (self,command,peers,action): tokens = self._cleaned(command).split(' ')[1:] if len(tokens) < 4: return False if tokens[0] != 'route': return False changes = [] if 'self' in command: for peer,nexthop in peers.iteritems(): scope = [{}] self._nexthopself = nexthop if not self._single_static_route(scope,tokens[1:]): self._nexthopself = None return False for change in scope[0]['announce']: changes.append((peer,change)) self._nexthopself = None else: scope = [{}] if not self._single_static_route(scope,tokens[1:]): return False for peer in peers: for change in scope[0]['announce']: changes.append((peer,change)) if action == 'withdraw': for (peer,change) in changes: change.nlri.action = OUT.withdraw return changes def parse_api_attribute (self,command,peers,action): # This is a quick solution which does not support next-hop self attribute,nlris = command.split('nlri') route = '%s route 0.0.0.0/0 %s' % (action, ' '.join(attribute.split()[2:])) parsed = self.parse_api_route(route,peers,action) if parsed in (True,False,None): return parsed attributes = parsed[0][1].attributes nexthop = parsed[0][1].nlri.nexthop changes = [] for nlri in nlris.split(): ip,mask = nlri.split('/') change = Change(NLRI(*inet(ip),mask=int(mask),nexthop=nexthop,action=action),attributes) if action == 'withdraw': change.nlri.action = OUT.withdraw else: change.nlri.action = OUT.announce changes.append((peers.keys(),change)) return changes def parse_api_flow (self,command,action): self._tokens = self._tokenise(' '.join(self._cleaned(command).split(' ')[2:]).split('\\n')) scope = [{}] if not self._dispatch(scope,'flow',['route',],[]): return False if not self._check_flow_route(scope): return False changes = scope[0]['announce'] if action == 'withdraw': for change in changes: change.nlri.action = OUT.withdraw return changes def parse_api_refresh (self,command): tokens = self._cleaned(command).split(' ')[2:] if len(tokens) != 2: return False afi = AFI.value(tokens.pop(0)) safi = SAFI.value(tokens.pop(0)) if afi is None or safi is None: return False return RouteRefresh(afi,safi) # operational def parse_api_operational (self,command): tokens = self._cleaned(command).split(' ',2) scope = [{}] if len(tokens) != 3: return False operational = tokens[0].lower() what = tokens[1].lower() if operational != 'operational': return False if what == 'asm': if not self._single_operational(Advisory.ASM,scope,['afi','safi','advisory'],tokens[2]): return False elif what == 'adm': if not self._single_operational(Advisory.ADM,scope,['afi','safi','advisory'],tokens[2]): return False elif what == 'rpcq': if not self._single_operational(Query.RPCQ,scope,['afi','safi','sequence'],tokens[2]): return False elif what == 'rpcp': if not self._single_operational(Response.RPCP,scope,['afi','safi','sequence','rxc','txc'],tokens[2]): return False elif what == 'apcq': if not self._single_operational(Query.APCQ,scope,['afi','safi','sequence'],tokens[2]): return False elif what == 'apcp': if not self._single_operational(Response.APCP,scope,['afi','safi','sequence','counter'],tokens[2]): return False elif what == 'lpcq': if not self._single_operational(Query.LPCQ,scope,['afi','safi','sequence'],tokens[2]): return False elif what == 'lpcp': if not self._single_operational(Response.LPCP,scope,['afi','safi','sequence','counter'],tokens[2]): return False else: return False operational = scope[0]['operational'][0] return operational # XXX: FIXME: move this from here to the reactor (or whatever will manage command from user later) def change_to_peers (self,change,peers): result = True for neighbor in self.neighbor: if neighbor in peers: if change.nlri.family() in self.neighbor[neighbor].families(): self.neighbor[neighbor].rib.outgoing.insert_announced(change) else: self.logger.configuration('the route family is not configured on neighbor','error') result = False return result # XXX: FIXME: move this from here to the reactor (or whatever will manage command from user later) def operational_to_peers (self,operational,peers): result = True for neighbor in self.neighbor: if neighbor in peers: if operational.family() in self.neighbor[neighbor].families(): if operational.name == 'ASM': self.neighbor[neighbor].asm[operational.family()] = operational self.neighbor[neighbor].messages.append(operational) else: self.logger.configuration('the route family is not configured on neighbor','error') result = False return result # XXX: FIXME: move this from here to the reactor (or whatever will manage command from user later) def refresh_to_peers (self,refresh,peers): result = True for neighbor in self.neighbor: if neighbor in peers: family = (refresh.afi,refresh.safi) if family in self.neighbor[neighbor].families(): self.neighbor[neighbor].refresh.append(refresh.__class__(refresh.afi,refresh.safi)) else: result = False return result # Tokenisation def _cleaned (self,line): changed_line = '#' new_line = line.strip().replace('\t',' ').replace(']',' ]').replace('[','[ ').replace(')',' )').replace('(','( ') while new_line != changed_line: changed_line = new_line new_line = new_line.replace(' ',' ') return new_line def _tokenise (self,text): r = [] config = '' for line in text: self.logger.configuration('loading | %s' % line.rstrip()) replaced = self._cleaned(line) config += line if not replaced: continue if replaced.startswith('#'): continue command = replaced[:3] if command in ('md5','asm'): string = line.strip()[3:].strip() if string[-1] == ';': string = string[:-1] r.append([command,string,';']) elif replaced[:3] == 'run': r.append([t for t in replaced[:-1].split(' ',1) if t] + [replaced[-1]]) else: r.append([t.lower() for t in replaced[:-1].split(' ') if t] + [replaced[-1]]) self.logger.config(config) return r def tokens (self): self._number += 1 self._line = self._tokens.pop(0) return self._line def number (self): return self._number def line (self): return ' '.join(self._line) def finished (self): return len(self._tokens) == 0 # Flow control ...................... # name is not used yet but will come really handy if we have name collision :D def _dispatch (self,scope,name,multi,single): try: tokens = self.tokens() except IndexError: self._error = 'configuration file incomplete (most likely missing })' if self.debug: raise return False self.logger.configuration('analysing tokens %s ' % str(tokens)) self.logger.configuration(' valid block options %s' % str(multi)) self.logger.configuration(' valid parameters %s' % str(single)) end = tokens[-1] if multi and end == '{': self._location.append(tokens[0]) return self._multi_line(scope,name,tokens[:-1],multi) if single and end == ';': return self._single_line(scope,name,tokens[:-1],single) if end == '}': if len(self._location) == 1: self._error = 'closing too many parenthesis' if self.debug: raise return False self._location.pop(-1) return None return False def _multi_line (self,scope,name,tokens,valid): command = tokens[0] if valid and command not in valid: self._error = 'option %s in not valid here' % command if self.debug: raise return False if name == 'configuration': if command == 'neighbor': if self._multi_neighbor(scope,tokens[1:]): return self._make_neighbor(scope) return False if command == 'group': if len(tokens) != 2: self._error = 'syntax: group <name> { <options> }' if self.debug: raise return False return self._multi_group(scope,tokens[1]) if name == 'group': if command == 'neighbor': if self._multi_neighbor(scope,tokens[1:]): return self._make_neighbor(scope) return False if command == 'static': return self._multi_static(scope,tokens[1:]) if command == 'flow': return self._multi_flow(scope,tokens[1:]) if command == 'process': return self._multi_process(scope,tokens[1:]) if command == 'family': return self._multi_family(scope,tokens[1:]) if command == 'capability': return self._multi_capability(scope,tokens[1:]) if command == 'operational': return self._multi_operational(scope,tokens[1:]) if name == 'neighbor': if command == 'static': return self._multi_static(scope,tokens[1:]) if command == 'flow': return self._multi_flow(scope,tokens[1:]) if command == 'process': return self._multi_process(scope,tokens[1:]) if command == 'family': return self._multi_family(scope,tokens[1:]) if command == 'capability': return self._multi_capability(scope,tokens[1:]) if command == 'operational': return self._multi_operational(scope,tokens[1:]) if name == 'static': if command == 'route': if self._multi_static_route(scope,tokens[1:]): return self._check_static_route(scope) return False if name == 'flow': if command == 'route': if self._multi_flow_route(scope,tokens[1:]): return self._check_flow_route(scope) return False if name == 'flow-route': if command == 'match': if self._multi_match(scope,tokens[1:]): return True return False if command == 'then': if self._multi_then(scope,tokens[1:]): return True return False return False def _single_line (self,scope,name,tokens,valid): command = tokens[0] if valid and command not in valid: self._error = 'invalid keyword "%s"' % command if self.debug: raise return False elif name == 'route': if command == 'origin': return self._route_origin(scope,tokens[1:]) if command == 'as-path': return self._route_aspath(scope,tokens[1:]) # For legacy with version 2.0.x if command == 'as-sequence': return self._route_aspath(scope,tokens[1:]) if command == 'med': return self._route_med(scope,tokens[1:]) if command == 'aigp': return self._route_aigp(scope,tokens[1:]) if command == 'next-hop': return self._route_next_hop(scope,tokens[1:]) if command == 'local-preference': return self._route_local_preference(scope,tokens[1:]) if command == 'atomic-aggregate': return self._route_atomic_aggregate(scope,tokens[1:]) if command == 'aggregator': return self._route_aggregator(scope,tokens[1:]) if command == 'path-information': return self._route_path_information(scope,tokens[1:]) if command == 'originator-id': return self._route_originator_id(scope,tokens[1:]) if command == 'cluster-list': return self._route_cluster_list(scope,tokens[1:]) if command == 'split': return self._route_split(scope,tokens[1:]) if command == 'label': return self._route_label(scope,tokens[1:]) if command in ('rd','route-distinguisher'): return self._route_rd(scope,tokens[1:],SAFI.mpls_vpn) if command == 'watchdog': return self._route_watchdog(scope,tokens[1:]) # withdrawn is here to not break legacy code if command in ('withdraw','withdrawn'): return self._route_withdraw(scope,tokens[1:]) if command == 'community': return self._route_community(scope,tokens[1:]) if command == 'extended-community': return self._route_extended_community(scope,tokens[1:]) if command == 'attribute': self._route_generic_attribute(scope,tokens[1:]) elif name == 'flow-route': if command in ('rd','route-distinguisher'): return self._route_rd(scope,tokens[1:],SAFI.flow_vpn) if command == 'next-hop': return self._flow_route_next_hop(scope,tokens[1:]) elif name == 'flow-match': if command == 'source': return self._flow_source(scope,tokens[1:]) if command == 'destination': return self._flow_destination(scope,tokens[1:]) if command == 'port': return self._flow_route_anyport(scope,tokens[1:]) if command == 'source-port': return self._flow_route_source_port(scope,tokens[1:]) if command == 'destination-port': return self._flow_route_destination_port(scope,tokens[1:]) if command == 'protocol': return self._flow_route_protocol(scope,tokens[1:]) if command == 'next-header': return self._flow_route_next_header(scope,tokens[1:]) if command == 'tcp-flags': return self._flow_route_tcp_flags(scope,tokens[1:]) if command == 'icmp-type': return self._flow_route_icmp_type(scope,tokens[1:]) if command == 'icmp-code': return self._flow_route_icmp_code(scope,tokens[1:]) if command == 'fragment': return self._flow_route_fragment(scope,tokens[1:]) if command == 'dscp': return self._flow_route_dscp(scope,tokens[1:]) if command == 'traffic-class': return self._flow_route_traffic_class(scope,tokens[1:]) if command == 'packet-length': return self._flow_route_packet_length(scope,tokens[1:]) if command == 'flow-label': return self._flow_route_flow_label(scope,tokens[1:]) elif name == 'flow-then': if command == 'discard': return self._flow_route_discard(scope,tokens[1:]) if command == 'rate-limit': return self._flow_route_rate_limit(scope,tokens[1:]) if command == 'redirect': return self._flow_route_redirect(scope,tokens[1:]) if command == 'redirect-to-nexthop': return self._flow_route_redirect_next_hop(scope,tokens[1:]) if command == 'copy': return self._flow_route_copy(scope,tokens[1:]) if command == 'mark': return self._flow_route_mark(scope,tokens[1:]) if command == 'action': return self._flow_route_action(scope,tokens[1:]) if command == 'community': return self._route_community(scope,tokens[1:]) if command == 'extended-community': return self._route_extended_community(scope,tokens[1:]) if name in ('neighbor','group'): if command == 'description': return self._set_description(scope,tokens[1:]) if command == 'router-id': return self._set_router_id(scope,'router-id',tokens[1:]) if command == 'local-address': return self._set_ip(scope,'local-address',tokens[1:]) if command == 'local-as': return self._set_asn(scope,'local-as',tokens[1:]) if command == 'peer-as': return self._set_asn(scope,'peer-as',tokens[1:]) if command == 'passive': return self._set_passive(scope,'passive',tokens[1:]) if command == 'hold-time': return self._set_holdtime(scope,'hold-time',tokens[1:]) if command == 'md5': return self._set_md5(scope,'md5',tokens[1:]) if command == 'ttl-security': return self._set_ttl(scope,'ttl-security',tokens[1:]) if command == 'group-updates': return self._set_group_updates(scope,'group-updates',tokens[1:]) if command == 'aigp': return self._set_boolean(scope,'aigp',tokens[1:],None) # deprecated if command == 'route-refresh': return self._set_boolean(scope,'route-refresh',tokens[1:]) if command == 'graceful-restart': return self._set_gracefulrestart(scope,'graceful-restart',tokens[1:]) if command == 'multi-session': return self._set_boolean(scope,'multi-session',tokens[1:]) if command == 'add-path': return self._set_addpath(scope,'add-path',tokens[1:]) if command == 'auto-flush': return self._set_boolean(scope,'auto-flush',tokens[1:]) if command == 'adj-rib-out': return self._set_boolean(scope,'adj-rib-out',tokens[1:]) elif name == 'family': if command == 'inet': return self._set_family_inet4(scope,tokens[1:]) if command == 'inet4': return self._set_family_inet4(scope,tokens[1:]) if command == 'inet6': return self._set_family_inet6(scope,tokens[1:]) if command == 'ipv4': return self._set_family_ipv4(scope,tokens[1:]) if command == 'ipv6': return self._set_family_ipv6(scope,tokens[1:]) if command == 'minimal': return self._set_family_minimal(scope,tokens[1:]) if command == 'all': return self._set_family_all(scope,tokens[1:]) elif name == 'capability': if command == 'route-refresh': return self._set_boolean(scope,'route-refresh',tokens[1:]) if command == 'graceful-restart': return self._set_gracefulrestart(scope,'graceful-restart',tokens[1:]) if command == 'multi-session': return self._set_boolean(scope,'multi-session',tokens[1:]) if command == 'operational': return self._set_boolean(scope,'capa-operational',tokens[1:]) if command == 'add-path': return self._set_addpath(scope,'add-path',tokens[1:]) if command == 'asn4': return self._set_asn4(scope,'asn4',tokens[1:]) if command == 'aigp': return self._set_boolean(scope,'aigp',tokens[1:],None) elif name == 'process': if command == 'run': return self._set_process_run(scope,'process-run',tokens[1:]) # legacy ... if command == 'parse-routes': self._set_process_command(scope,'neighbor-changes',tokens[1:]) self._set_process_command(scope,'receive-routes',tokens[1:]) return True # legacy ... if command == 'peer-updates': self._set_process_command(scope,'neighbor-changes',tokens[1:]) self._set_process_command(scope,'receive-routes',tokens[1:]) return True # new interface if command == 'encoder': return self._set_process_encoder(scope,'encoder',tokens[1:]) if command == 'receive-packets': return self._set_process_command(scope,'receive-packets',tokens[1:]) if command == 'send-packets': return self._set_process_command(scope,'send-packets',tokens[1:]) if command == 'receive-routes': return self._set_process_command(scope,'receive-routes',tokens[1:]) if command == 'neighbor-changes': return self._set_process_command(scope,'neighbor-changes',tokens[1:]) if command == 'receive-operational': return self._set_process_command(scope,'receive-operational',tokens[1:]) elif name == 'static': if command == 'route': return self._single_static_route(scope,tokens[1:]) elif name == 'operational': if command == 'asm': return self._single_operational_asm(scope,tokens[1]) # it does not make sense to have adm return False # Programs used to control exabgp def _multi_process (self,scope,tokens): while True: r = self._dispatch(scope,'process',[],['run','encoder','receive-packets','send-packets','receive-routes','receive-operational','neighbor-changes', 'peer-updates','parse-routes']) if r is False: return False if r is None: break name = tokens[0] if len(tokens) >= 1 else 'conf-only-%s' % str(time.time())[-6:] self.process.setdefault(name,{})['neighbor'] = scope[-1]['peer-address'] if 'peer-address' in scope[-1] else '*' run = scope[-1].pop('process-run','') if run: if len(tokens) != 1: self._error = self._str_process_error if self.debug: raise return False self.process[name]['encoder'] = scope[-1].get('encoder','') or self.api_encoder self.process[name]['run'] = run return True elif len(tokens): self._error = self._str_process_error if self.debug: raise return False def _set_process_command (self,scope,command,value): scope[-1][command] = True return True def _set_process_encoder (self,scope,command,value): if value and value[0] in ('text','json'): scope[-1][command] = value[0] return True self._error = self._str_process_error if self.debug: raise return False def _set_process_run (self,scope,command,value): line = ' '.join(value).strip() if len(line) > 2 and line[0] == line[-1] and line[0] in ['"',"'"]: line = line[1:-1] if ' ' in line: prg,args = line.split(' ',1) else: prg = line args = '' if not prg: self._error = 'prg requires the program to prg as an argument (quoted or unquoted)' if self.debug: raise return False if prg[0] != '/': if prg.startswith('etc/exabgp'): parts = prg.split('/') path = [os.environ.get('ETC','etc'),] + parts[2:] prg = os.path.join(*path) else: prg = os.path.abspath(os.path.join(os.path.dirname(self._fname),prg)) if not os.path.exists(prg): self._error = 'can not locate the the program "%s"' % prg if self.debug: raise return False # XXX: Yep, race conditions are possible, those are sanity checks not security ones ... s = os.stat(prg) if stat.S_ISDIR(s.st_mode): self._error = 'can not execute directories "%s"' % prg if self.debug: raise return False if s.st_mode & stat.S_ISUID: self._error = 'refusing to run setuid programs "%s"' % prg if self.debug: raise return False check = stat.S_IXOTH if s.st_uid == os.getuid(): check |= stat.S_IXUSR if s.st_gid == os.getgid(): check |= stat.S_IXGRP if not check & s.st_mode: self._error = 'exabgp will not be able to run this program "%s"' % prg if self.debug: raise return False if args: scope[-1][command] = [prg] + args.split(' ') else: scope[-1][command] = [prg,] return True # Limit the AFI/SAFI pair announced to peers def _multi_family (self,scope,tokens): # we know all the families we should use self._family = False scope[-1]['families'] = [] while True: r = self._dispatch(scope,'family',[],['inet','inet4','inet6','ipv4','ipv6','minimal','all']) if r is False: return False if r is None: break self._family = False return True def _set_family_inet4 (self,scope,tokens): self.logger.configuration("the word inet4 is deprecated, please use ipv4 instead",'error') return self._set_family_ipv4 (scope,tokens) def _set_family_ipv4 (self,scope,tokens): if self._family: self._error = 'ipv4 can not be used with all or minimal' if self.debug: raise return False try: safi = tokens.pop(0) except IndexError: self._error = 'missing family safi' if self.debug: raise return False if safi == 'unicast': scope[-1]['families'].append((AFI(AFI.ipv4),SAFI(SAFI.unicast))) elif safi == 'multicast': scope[-1]['families'].append((AFI(AFI.ipv4),SAFI(SAFI.multicast))) elif safi == 'nlri-mpls': scope[-1]['families'].append((AFI(AFI.ipv4),SAFI(SAFI.nlri_mpls))) elif safi == 'mpls-vpn': scope[-1]['families'].append((AFI(AFI.ipv4),SAFI(SAFI.mpls_vpn))) elif safi in ('flow'): scope[-1]['families'].append((AFI(AFI.ipv4),SAFI(SAFI.flow_ip))) elif safi == 'flow-vpn': scope[-1]['families'].append((AFI(AFI.ipv4),SAFI(SAFI.flow_vpn))) else: return False return True def _set_family_inet6 (self,scope,tokens): self.logger.configuration("the word inet6 is deprecated, please use ipv6 instead",'error') return self._set_family_ipv6 (scope,tokens) def _set_family_ipv6 (self,scope,tokens): try: if self._family: self._error = 'ipv6 can not be used with all or minimal' if self.debug: raise return False safi = tokens.pop(0) if safi == 'unicast': scope[-1]['families'].append((AFI(AFI.ipv6),SAFI(SAFI.unicast))) elif safi == 'mpls-vpn': scope[-1]['families'].append((AFI(AFI.ipv6),SAFI(SAFI.mpls_vpn))) elif safi in ('flow'): scope[-1]['families'].append((AFI(AFI.ipv6),SAFI(SAFI.flow_ip))) elif safi == 'flow-vpn': scope[-1]['families'].append((AFI(AFI.ipv6),SAFI(SAFI.flow_vpn))) else: return False return True except (IndexError,ValueError): self._error = 'missing safi' if self.debug: raise return False def _set_family_minimal (self,scope,tokens): if scope[-1]['families']: self._error = 'minimal can not be used with any other options' if self.debug: raise return False scope[-1]['families'] = 'minimal' self._family = True return True def _set_family_all (self,scope,tokens): if scope[-1]['families']: self._error = 'all can not be used with any other options' if self.debug: raise return False scope[-1]['families'] = 'all' self._family = True return True # capacity def _multi_capability (self,scope,tokens): # we know all the families we should use self._capability = False while True: r = self._dispatch(scope,'capability',[],['route-refresh','graceful-restart','multi-session','operational','add-path','asn4','aigp']) if r is False: return False if r is None: break return True def _set_gracefulrestart (self,scope,command,value): if not len(value): scope[-1][command] = None return True try: # README: Should it be a subclass of int ? grace = int(value[0]) if grace < 0: raise ValueError('graceful-restart can not be negative') if grace >= pow(2,16): raise ValueError('graceful-restart must be smaller than %d' % pow(2,16)) scope[-1][command] = grace return True except ValueError: self._error = '"%s" is an invalid graceful-restart time' % ' '.join(value) if self.debug: raise return False return True def _set_addpath (self,scope,command,value): try: ap = value[0].lower() apv = 0 if ap.endswith('receive'): apv += 1 if ap.startswith('send'): apv += 2 if not apv and ap not in ('disable','disabled'): raise ValueError('invalid add-path') scope[-1][command] = apv return True except (ValueError,IndexError): self._error = '"%s" is an invalid add-path' % ' '.join(value) + '\n' + self._str_capa_error if self.debug: raise return False def _set_boolean (self,scope,command,value,default='true'): try: boolean = value[0].lower() if value else default if boolean in ('true','enable','enabled'): scope[-1][command] = True elif boolean in ('false','disable','disabled'): scope[-1][command] = False else: raise ValueError() return True except (ValueError,IndexError): self._error = 'invalid %s command (valid options are true or false)' % command if self.debug: raise return False def _set_asn4 (self,scope,command,value): try: if not value: scope[-1][command] = True return True asn4 = value[0].lower() if asn4 in ('disable','disabled'): scope[-1][command] = False return True if asn4 in ('enable','enabled'): scope[-1][command] = True return True self._error = '"%s" is an invalid asn4 parameter options are enable (default) and disable)' % ' '.join(value) return False except ValueError: self._error = '"%s" is an invalid asn4 parameter options are enable (default) and disable)' % ' '.join(value) if self.debug: raise return False # route grouping with watchdog def _route_watchdog (self,scope,tokens): try: w = tokens.pop(0) if w.lower() in ['announce','withdraw']: raise ValueError('invalid watchdog name %s' % w) except IndexError: self._error = self._str_route_error if self.debug: raise return False try: scope[-1]['announce'][-1].attributes.add(Watchdog(w)) return True except ValueError: self._error = self._str_route_error if self.debug: raise return False def _route_withdraw (self,scope,tokens): try: scope[-1]['announce'][-1].attributes.add(Withdrawn()) return True except ValueError: self._error = self._str_route_error if self.debug: raise return False # Group Neighbor def _multi_group (self,scope,address): scope.append({}) while True: r = self._dispatch(scope,'group',['static','flow','neighbor','process','family','capability','operational'],['description','router-id','local-address','local-as','peer-as','passive','hold-time','add-path','graceful-restart','md5','ttl-security','multi-session','group-updates','route-refresh','asn4','aigp','auto-flush','adj-rib-out']) if r is False: return False if r is None: scope.pop(-1) return True def _make_neighbor (self,scope): # we have local_scope[-2] as the group template and local_scope[-1] as the peer specific if len(scope) > 1: for key,content in scope[-2].iteritems(): if key not in scope[-1]: scope[-1][key] = deepcopy(content) elif key == 'announce': scope[-1][key].extend(scope[-2][key]) self.logger.configuration("\nPeer configuration complete :") for _key in scope[-1].keys(): stored = scope[-1][_key] if hasattr(stored,'__iter__'): for category in scope[-1][_key]: for _line in pformat(str(category),3,3,3).split('\n'): self.logger.configuration(" %s: %s" %(_key,_line)) else: for _line in pformat(str(stored),3,3,3).split('\n'): self.logger.configuration(" %s: %s" %(_key,_line)) self.logger.configuration("\n") neighbor = Neighbor() for local_scope in scope: v = local_scope.get('router-id','') if v: neighbor.router_id = v v = local_scope.get('peer-address','') if v: neighbor.peer_address = v v = local_scope.get('local-address','') if v: neighbor.local_address = v v = local_scope.get('local-as','') if v: neighbor.local_as = v v = local_scope.get('peer-as','') if v: neighbor.peer_as = v v = local_scope.get('passive',False) if v: neighbor.passive = v v = local_scope.get('hold-time','') if v: neighbor.hold_time = v changes = local_scope.get('announce',[]) messages = local_scope.get('operational',[]) for local_scope in (scope[0],scope[-1]): neighbor.api.receive_packets |= local_scope.get('receive-packets',False) neighbor.api.send_packets |= local_scope.get('send-packets',False) neighbor.api.receive_routes |= local_scope.get('receive-routes',False) neighbor.api.receive_operational |= local_scope.get('receive-operational',False) neighbor.api.neighbor_changes |= local_scope.get('neighbor-changes',False) if not neighbor.router_id: neighbor.router_id = neighbor.local_address local_scope = scope[-1] neighbor.description = local_scope.get('description','') neighbor.md5 = local_scope.get('md5',None) neighbor.ttl = local_scope.get('ttl-security',None) neighbor.group_updates = local_scope.get('group-updates',False) neighbor.route_refresh = local_scope.get('route-refresh',0) neighbor.graceful_restart = local_scope.get('graceful-restart',0) if neighbor.graceful_restart is None: # README: Should it be a subclass of int ? neighbor.graceful_restart = int(neighbor.hold_time) neighbor.multisession = local_scope.get('multi-session',False) neighbor.operational = local_scope.get('capa-operational',False) neighbor.add_path = local_scope.get('add-path',0) neighbor.flush = local_scope.get('auto-flush',True) neighbor.adjribout = local_scope.get('adj-rib-out',True) neighbor.asn4 = local_scope.get('asn4',True) neighbor.aigp = local_scope.get('aigp',None) if neighbor.route_refresh and not neighbor.adjribout: self._error = 'incomplete option route-refresh and no adj-rib-out' if self.debug: raise return False missing = neighbor.missing() if missing: self._error = 'incomplete neighbor, missing %s' % missing if self.debug: raise return False if neighbor.local_address.afi != neighbor.peer_address.afi: self._error = 'local-address and peer-address must be of the same family' if self.debug: raise return False if neighbor.peer_address.ip in self._neighbor: self._error = 'duplicate peer definition %s' % neighbor.peer_address.ip if self.debug: raise return False openfamilies = local_scope.get('families','everything') # announce every family we known if neighbor.multisession and openfamilies == 'everything': # announce what is needed, and no more, no need to have lots of TCP session doing nothing families = neighbor.families() elif openfamilies in ('all','everything'): families = known_families() # only announce what you have as routes elif openfamilies == 'minimal': families = neighbor.families() else: families = openfamilies # check we are not trying to announce routes without the right MP announcement for family in neighbor.families(): if family not in families: afi,safi = family self._error = 'Trying to announce a route of type %s,%s when we are not announcing the family to our peer' % (afi,safi) if self.debug: raise return False # add the families to the list of families known initial_families = list(neighbor.families()) for family in families: if family not in initial_families : # we are modifying the data used by .families() here neighbor.add_family(family) # create one neighbor object per family for multisession if neighbor.multisession: for family in neighbor.families(): # XXX: FIXME: Ok, it works but it takes LOTS of memory .. m_neighbor = deepcopy(neighbor) for f in neighbor.families(): if f == family: continue m_neighbor.rib.outgoing.remove_family(f) m_neighbor.make_rib() families = neighbor.families() for change in changes: if change.nlri.family() in families: # This add the family to neighbor.families() neighbor.rib.outgoing.insert_announced_watchdog(change) for message in messages: if message.family() in families: if message.name == 'ASM': neighbor.asm[message.family()] = message else: neighbor.messages.append(message) self._neighbor[m_neighbor.name()] = m_neighbor else: neighbor.make_rib() families = neighbor.families() for change in changes: if change.nlri.family() in families: # This add the family to neighbor.families() neighbor.rib.outgoing.insert_announced_watchdog(change) for message in messages: if message.family() in families: if message.name == 'ASM': neighbor.asm[message.family()] = message else: neighbor.messages.append(message) self._neighbor[neighbor.name()] = neighbor for line in str(neighbor).split('\n'): self.logger.configuration(line) self.logger.configuration("\n") scope.pop(-1) return True def _multi_neighbor (self,scope,tokens): if len(tokens) != 1: self._error = 'syntax: neighbor <ip> { <options> }' if self.debug: raise return False address = tokens[0] scope.append({}) try: scope[-1]['peer-address'] = Inet(*inet(address)) except (IndexError,ValueError,socket.error): self._error = '"%s" is not a valid IP address' % address if self.debug: raise return False while True: r = self._dispatch(scope,'neighbor',['static','flow','process','family','capability','operational'],['description','router-id','local-address','local-as','peer-as','passive','hold-time','add-path','graceful-restart','md5','ttl-security','multi-session','group-updates','asn4','aigp','auto-flush','adj-rib-out']) if r is False: return False if r is None: return True # Command Neighbor def _set_router_id (self,scope,command,value): try: ip = RouterID(value[0]) except (IndexError,ValueError): self._error = '"%s" is an invalid IP address' % ' '.join(value) if self.debug: raise return False scope[-1][command] = ip return True def _set_description (self,scope,tokens): text = ' '.join(tokens) if len(text) < 2 or text[0] != '"' or text[-1] != '"' or text[1:-1].count('"'): self._error = 'syntax: description "<description>"' if self.debug: raise return False scope[-1]['description'] = text[1:-1] return True # will raise ValueError if the ASN is not correct def _newASN (self,value): if value.count('.'): high,low = value.split('.',1) asn = (int(high) << 16) + int(low) else: asn = int(value) return ASN(asn) def _set_asn (self,scope,command,value): try: scope[-1][command] = self._newASN(value[0]) return True except ValueError: self._error = '"%s" is an invalid ASN' % ' '.join(value) if self.debug: raise return False def _set_ip (self,scope,command,value): try: ip = Inet(*inet(value[0])) except (IndexError,ValueError,socket.error): self._error = '"%s" is an invalid IP address' % ' '.join(value) if self.debug: raise return False scope[-1][command] = ip return True def _set_passive (self,scope,command,value): if value: self._error = '"%s" is an invalid for passive' % ' '.join(value) if self.debug: raise return False scope[-1][command] = True return True def _set_holdtime (self,scope,command,value): try: holdtime = HoldTime(value[0]) if holdtime < 3 and holdtime != 0: raise ValueError('holdtime must be zero or at least three seconds') if holdtime >= pow(2,16): raise ValueError('holdtime must be smaller than %d' % pow(2,16)) scope[-1][command] = holdtime return True except ValueError: self._error = '"%s" is an invalid hold-time' % ' '.join(value) if self.debug: raise return False def _set_md5 (self,scope,command,value): md5 = value[0] if len(md5) > 2 and md5[0] == md5[-1] and md5[0] in ['"',"'"]: md5 = md5[1:-1] if len(md5) > 80: self._error = 'md5 password must be no larger than 80 characters' if self.debug: raise return False if not md5: self._error = 'md5 requires the md5 password as an argument (quoted or unquoted). FreeBSD users should use "kernel" as the argument.' if self.debug: raise return False scope[-1][command] = md5 return True def _set_ttl (self,scope,command,value): if not len(value): scope[-1][command] = self.TTL_SECURITY return True try: # README: Should it be a subclass of int ? ttl = int(value[0]) if ttl < 0: raise ValueError('ttl-security can not be negative') if ttl >= 255: raise ValueError('ttl must be smaller than 256') scope[-1][command] = ttl return True except ValueError: self._error = '"%s" is an invalid ttl-security' % ' '.join(value) if self.debug: raise return False return True def _set_group_updates (self,scope,command,value): scope[-1][command] = True return True # Group Static ................ def _multi_static (self,scope,tokens): if len(tokens) != 0: self._error = 'syntax: static { route; route; ... }' if self.debug: raise return False while True: r = self._dispatch(scope,'static',['route',],['route',]) if r is False: return False if r is None: return True # Group Route ........ def _split_last_route (self,scope): # if the route does not need to be broken in smaller routes, return change = scope[-1]['announce'][-1] if not AttributeID.INTERNAL_SPLIT in change.attributes: return True # ignore if the request is for an aggregate, or the same size mask = change.nlri.mask split = change.attributes[AttributeID.INTERNAL_SPLIT] if mask >= split: return True # get a local copy of the route change = scope[-1]['announce'].pop(-1) # calculate the number of IP in the /<size> of the new route increment = pow(2,(len(change.nlri.packed)*8) - split) # how many new routes are we going to create from the initial one number = pow(2,split - change.nlri.mask) # convert the IP into a integer/long ip = 0 for c in change.nlri.packed: ip = ip << 8 ip += ord(c) afi = change.nlri.afi safi = change.nlri.safi # Really ugly labels = change.nlri.labels rd = change.nlri.rd path_info = change.nlri.path_info nexthop = change.nlri.nexthop change.mask = split change.nlri = None # generate the new routes for _ in range(number): # update ip to the next route, this recalculate the "ip" field of the Inet class nlri = NLRI(afi,safi,pack_int(afi,ip,split),split,nexthop,OUT.announce) nlri.labels = labels nlri.rd = rd nlri.path_info = path_info # next ip ip += increment # save route scope[-1]['announce'].append(Change(nlri,change.attributes)) return True def _insert_static_route (self,scope,tokens): try: ip = tokens.pop(0) except IndexError: self._error = self._str_route_error if self.debug: raise return False try: ip,mask = ip.split('/') mask = int(mask) except ValueError: mask = 32 try: # nexthop must be false and its str return nothing .. an empty string does that update = Change(NLRI(*inet(ip),mask=mask,nexthop=None,action=OUT.announce),Attributes()) if len(Prefix.pack(update.nlri)) != len(update.nlri): self._error = 'invalid mask for this prefix %s' % str(update.nlri) if self.debug: raise return False except ValueError: self._error = self._str_route_error if self.debug: raise return False if 'announce' not in scope[-1]: scope[-1]['announce'] = [] scope[-1]['announce'].append(update) return True def _check_static_route (self,scope): update = scope[-1]['announce'][-1] if not update.nlri.nexthop: self._error = 'syntax: route <ip>/<mask> { next-hop <ip>; }' if self.debug: raise return False return True def _multi_static_route (self,scope,tokens): if len(tokens) != 1: self._error = self._str_route_error if self.debug: raise return False if not self._insert_static_route(scope,tokens): return False while True: r = self._dispatch(scope,'route',[],['next-hop','origin','as-path','as-sequence','med','aigp','local-preference','atomic-aggregate','aggregator','path-information','community','originator-id','cluster-list','extended-community','split','label','rd','route-distinguisher','watchdog','withdraw']) if r is False: return False if r is None: return self._split_last_route(scope) def _single_static_route (self,scope,tokens): if len(tokens) <3: return False if not self._insert_static_route(scope,tokens): return False while len(tokens): command = tokens.pop(0) if command == 'withdraw': if self._route_withdraw(scope,tokens): continue return False if len(tokens) < 1: return False if command == 'next-hop': if self._route_next_hop(scope,tokens): continue return False if command == 'origin': if self._route_origin(scope,tokens): continue return False if command == 'as-path': if self._route_aspath(scope,tokens): continue return False if command == 'as-sequence': if self._route_aspath(scope,tokens): continue return False if command == 'med': if self._route_med(scope,tokens): continue return False if command == 'aigp': if self._route_aigp(scope,tokens): continue return False if command == 'local-preference': if self._route_local_preference(scope,tokens): continue return False if command == 'atomic-aggregate': if self._route_atomic_aggregate(scope,tokens): continue return False if command == 'aggregator': if self._route_aggregator(scope,tokens): continue return False if command == 'path-information': if self._route_path_information(scope,tokens): continue return False if command == 'community': if self._route_community(scope,tokens): continue return False if command == 'originator-id': if self._route_originator_id(scope,tokens): continue return False if command == 'cluster-list': if self._route_cluster_list(scope,tokens): continue return False if command == 'extended-community': if self._route_extended_community(scope,tokens): continue return False if command == 'split': if self._route_split(scope,tokens): continue return False if command == 'label': if self._route_label(scope,tokens): continue return False if command in ('rd','route-distinguisher'): if self._route_rd(scope,tokens,SAFI.mpls_vpn): continue return False if command == 'watchdog': if self._route_watchdog(scope,tokens): continue return False if command == 'attribute': if self._route_generic_attribute(scope,tokens): continue return False return False if not self._check_static_route(scope): return False return self._split_last_route(scope) # Command Route def _route_generic_attribute (self,scope,tokens): try: start = tokens.pop(0) code = tokens.pop(0).lower() flag = tokens.pop(0).lower() data = tokens.pop(0).lower() end = tokens.pop(0) if (start,end) != ('[',']'): self._error = self._str_route_error if self.debug: raise return False if not code.startswith('0x'): self._error = self._str_route_error if self.debug: raise return False code = int(code[2:],16) if not flag.startswith('0x'): self._error = self._str_route_error if self.debug: raise return False flag = int(flag[2:],16) if not data.startswith('0x'): self._error = self._str_route_error if self.debug: raise return False raw = '' for i in range(2,len(data),2): raw += chr(int(data[i:i+2],16)) for (ID,klass) in Attributes.lookup.iteritems(): if code == ID and flag == klass.FLAG: scope[-1]['announce'][-1].attributes.add(klass(raw)) return True scope[-1]['announce'][-1].attributes.add(UnknownAttribute(code,flag,raw)) return True except (IndexError,ValueError): self._error = self._str_route_error if self.debug: raise return False def _route_next_hop (self,scope,tokens): if scope[-1]['announce'][-1].attributes.has(AttributeID.NEXT_HOP): self._error = self._str_route_error if self.debug: raise return False try: # next-hop self is unsupported ip = tokens.pop(0) if ip.lower() == 'self': if 'local-address' in scope[-1]: la = scope[-1]['local-address'] elif self._nexthopself: la = self._nexthopself else: self._error = 'next-hop self can only be specified with a neighbor' if self.debug: raise ValueError(self._error) return False nh = la.pack() else: nh = pton(ip) change = scope[-1]['announce'][-1] nlri = change.nlri afi = nlri.afi safi = nlri.safi nlri.nexthop = cachedNextHop(nh) if afi == AFI.ipv4 and safi in (SAFI.unicast,SAFI.multicast): change.attributes.add(cachedNextHop(nh)) return True except: self._error = self._str_route_error if self.debug: raise return False def _route_origin (self,scope,tokens): try: data = tokens.pop(0).lower() if data == 'igp': scope[-1]['announce'][-1].attributes.add(Origin(Origin.IGP)) return True if data == 'egp': scope[-1]['announce'][-1].attributes.add(Origin(Origin.EGP)) return True if data == 'incomplete': scope[-1]['announce'][-1].attributes.add(Origin(Origin.INCOMPLETE)) return True self._error = self._str_route_error if self.debug: raise return False except IndexError: self._error = self._str_route_error if self.debug: raise return False def _route_aspath (self,scope,tokens): as_seq = [] as_set = [] asn = tokens.pop(0) try: if asn == '[': while True: try: asn = tokens.pop(0) except IndexError: self._error = self._str_route_error if self.debug: raise return False if asn == '(': while True: try: asn = tokens.pop(0) except IndexError: self._error = self._str_route_error if self.debug: raise return False if asn == ')': break as_set.append(self._newASN(asn)) if asn == ')': continue if asn == ']': break as_seq.append(self._newASN(asn)) else: as_seq.append(self._newASN(asn)) except (IndexError,ValueError): self._error = self._str_route_error if self.debug: raise return False scope[-1]['announce'][-1].attributes.add(ASPath(as_seq,as_set)) return True def _route_med (self,scope,tokens): try: scope[-1]['announce'][-1].attributes.add(MED(pack('!L',int(tokens.pop(0))))) return True except (IndexError,ValueError): self._error = self._str_route_error if self.debug: raise return False def _route_aigp (self,scope,tokens): try: scope[-1]['announce'][-1].attributes.add(AIGP('\x01\x00\x0b' + pack('!Q',int(tokens.pop(0))))) return True except (IndexError,ValueError): self._error = self._str_route_error if self.debug: raise return False def _route_local_preference (self,scope,tokens): try: scope[-1]['announce'][-1].attributes.add(LocalPreference(pack('!L',int(tokens.pop(0))))) return True except (IndexError,ValueError): self._error = self._str_route_error if self.debug: raise return False def _route_atomic_aggregate (self,scope,tokens): try: scope[-1]['announce'][-1].attributes.add(AtomicAggregate()) return True except ValueError: self._error = self._str_route_error if self.debug: raise return False def _route_aggregator (self,scope,tokens): try: if tokens: if tokens.pop(0) != '(': raise ValueError('invalid aggregator syntax') asn,address = tokens.pop(0).split(':') if tokens.pop(0) != ')': raise ValueError('invalid aggregator syntax') local_as = ASN(asn) local_address = RouterID(address) else: local_as = scope[-1]['local-as'] local_address = scope[-1]['local-address'] except (ValueError,IndexError): self._error = self._str_route_error if self.debug: raise return False except KeyError: self._error = 'local-as and/or local-address missing from neighbor/group to make aggregator' if self.debug: raise return False except ValueError: self._error = self._str_route_error if self.debug: raise return False scope[-1]['announce'][-1].attributes.add(Aggregator(local_as.pack(True)+local_address.pack())) return True def _route_path_information (self,scope,tokens): try: pi = tokens.pop(0) if pi.isdigit(): scope[-1]['announce'][-1].nlri.path_info = PathInfo(integer=int(pi)) else: scope[-1]['announce'][-1].nlri.path_info = PathInfo(ip=pi) return True except ValueError: self._error = self._str_route_error if self.debug: raise return False def _parse_community (self,scope,data): separator = data.find(':') if separator > 0: prefix = int(data[:separator]) suffix = int(data[separator+1:]) if prefix >= pow(2,16): raise ValueError('invalid community %s (prefix too large)' % data) if suffix >= pow(2,16): raise ValueError('invalid community %s (suffix too large)' % data) return cachedCommunity(pack('!L',(prefix<<16) + suffix)) elif len(data) >=2 and data[1] in 'xX': value = long(data,16) if value >= pow(2,32): raise ValueError('invalid community %s (too large)' % data) return cachedCommunity(pack('!L',value)) else: low = data.lower() if low == 'no-export': return cachedCommunity(Community.NO_EXPORT) elif low == 'no-advertise': return cachedCommunity(Community.NO_ADVERTISE) elif low == 'no-export-subconfed': return cachedCommunity(Community.NO_EXPORT_SUBCONFED) # no-peer is not a correct syntax but I am sure someone will make the mistake :) elif low == 'nopeer' or low == 'no-peer': return cachedCommunity(Community.NO_PEER) elif data.isdigit(): value = unpack('!L',data)[0] if value >= pow(2,32): raise ValueError('invalid community %s (too large)' % data) return cachedCommunity(pack('!L',value)) else: raise ValueError('invalid community name %s' % data) def _route_originator_id (self,scope,tokens): try: scope[-1]['announce'][-1].attributes.add(OriginatorID(*inet(tokens.pop(0)))) return True except: self._error = self._str_route_error if self.debug: raise return False def _route_cluster_list (self,scope,tokens): _list = '' clusterid = tokens.pop(0) try: if clusterid == '[': while True: try: clusterid = tokens.pop(0) except IndexError: self._error = self._str_route_error if self.debug: raise return False if clusterid == ']': break _list += ''.join([chr(int(_)) for _ in clusterid.split('.')]) else: _list = ''.join([chr(int(_)) for _ in clusterid.split('.')]) if not _list: raise ValueError('no cluster-id in the cluster-list') clusterlist = ClusterList(_list) except ValueError: self._error = self._str_route_error if self.debug: raise return False scope[-1]['announce'][-1].attributes.add(clusterlist) return True def _route_community (self,scope,tokens): communities = Communities() community = tokens.pop(0) try: if community == '[': while True: try: community = tokens.pop(0) except IndexError: self._error = self._str_route_error if self.debug: raise return False if community == ']': break communities.add(self._parse_community(scope,community)) else: communities.add(self._parse_community(scope,community)) except ValueError: self._error = self._str_route_error if self.debug: raise return False scope[-1]['announce'][-1].attributes.add(communities) return True def _parse_extended_community (self,scope,data): if data[:2].lower() == '0x': try: raw = '' for i in range(2,len(data),2): raw += chr(int(data[i:i+2],16)) except ValueError: raise ValueError('invalid extended community %s' % data) if len(raw) != 8: raise ValueError('invalid extended community %s' % data) return ECommunity(raw) elif data.count(':'): return to_ExtendedCommunity(data) else: raise ValueError('invalid extended community %s - lc+gc' % data) def _route_extended_community (self,scope,tokens): extended_communities = ECommunities() extended_community = tokens.pop(0) try: if extended_community == '[': while True: try: extended_community = tokens.pop(0) except IndexError: self._error = self._str_route_error if self.debug: raise return False if extended_community == ']': break extended_communities.add(self._parse_extended_community(scope,extended_community)) else: extended_communities.add(self._parse_extended_community(scope,extended_community)) except ValueError: self._error = self._str_route_error if self.debug: raise return False scope[-1]['announce'][-1].attributes.add(extended_communities) return True def _route_split (self,scope,tokens): try: size = tokens.pop(0) if not size or size[0] != '/': raise ValueError('route "as" require a CIDR') scope[-1]['announce'][-1].attributes.add(Split(int(size[1:]))) return True except ValueError: self._error = self._str_route_error if self.debug: raise return False def _route_label (self,scope,tokens): labels = [] label = tokens.pop(0) try: if label == '[': while True: try: label = tokens.pop(0) except IndexError: self._error = self._str_route_error if self.debug: raise return False if label == ']': break labels.append(int(label)) else: labels.append(int(label)) except ValueError: self._error = self._str_route_error if self.debug: raise return False nlri = scope[-1]['announce'][-1].nlri if not nlri.safi.has_label(): nlri.safi = SAFI(SAFI.nlri_mpls) nlri.labels = Labels(labels) return True def _route_rd (self,scope,tokens,safi): try: try: data = tokens.pop(0) except IndexError: self._error = self._str_route_error if self.debug: raise return False separator = data.find(':') if separator > 0: prefix = data[:separator] suffix = int(data[separator+1:]) if '.' in prefix: bytes = [chr(0),chr(1)] bytes.extend([chr(int(_)) for _ in prefix.split('.')]) bytes.extend([chr(suffix>>8),chr(suffix&0xFF)]) rd = ''.join(bytes) else: number = int(prefix) if number < pow(2,16) and suffix < pow(2,32): rd = chr(0) + chr(0) + pack('!H',number) + pack('!L',suffix) elif number < pow(2,32) and suffix < pow(2,16): rd = chr(0) + chr(2) + pack('!L',number) + pack('!H',suffix) else: raise ValueError('invalid route-distinguisher %s' % data) nlri = scope[-1]['announce'][-1].nlri # overwrite nlri-mpls nlri.safi = SAFI(safi) nlri.rd = RouteDistinguisher(rd) return True except ValueError: self._error = self._str_route_error if self.debug: raise return False # Group Flow ........ def _multi_flow (self,scope,tokens): if len(tokens) != 0: self._error = self._str_flow_error if self.debug: raise return False while True: r = self._dispatch(scope,'flow',['route',],[]) if r is False: return False if r is None: break return True def _insert_flow_route (self,scope,tokens=None): if self._flow_state != 'out': self._error = self._str_flow_error if self.debug: raise return False self._flow_state = 'match' try: attributes = Attributes() attributes[AttributeID.EXTENDED_COMMUNITY] = ECommunities() flow = Change(FlowNLRI(),attributes) except ValueError: self._error = self._str_flow_error if self.debug: raise return False if 'announce' not in scope[-1]: scope[-1]['announce'] = [] scope[-1]['announce'].append(flow) return True def _check_flow_route (self,scope): self.logger.configuration('warning: no check on flows are implemented') return True def _multi_flow_route (self,scope,tokens): if len(tokens) > 1: self._error = self._str_flow_error if self.debug: raise return False if not self._insert_flow_route(scope): return False while True: r = self._dispatch(scope,'flow-route',['match','then'],['rd','route-distinguisher','next-hop']) if r is False: return False if r is None: break if self._flow_state != 'out': self._error = self._str_flow_error if self.debug: raise return False return True # .......................................... def _multi_match (self,scope,tokens): if len(tokens) != 0: self._error = self._str_flow_error if self.debug: raise return False if self._flow_state != 'match': self._error = self._str_flow_error if self.debug: raise return False self._flow_state = 'then' while True: r = self._dispatch(scope,'flow-match',[],['source','destination','port','source-port','destination-port','protocol','next-header','tcp-flags','icmp-type','icmp-code','fragment','dscp','traffic-class','packet-length','flow-label']) if r is False: return False if r is None: break return True def _multi_then (self,scope,tokens): if len(tokens) != 0: self._error = self._str_flow_error if self.debug: raise return False if self._flow_state != 'then': self._error = self._str_flow_error if self.debug: raise return False self._flow_state = 'out' while True: r = self._dispatch(scope,'flow-then',[],['discard','rate-limit','redirect','copy','redirect-to-nexthop','mark','action','community']) if r is False: return False if r is None: break return True # Command Flow def _flow_source (self,scope,tokens): try: data = tokens.pop(0) if data.count('/') == 1: ip,netmask = data.split('/') raw = ''.join(chr(int(_)) for _ in ip.split('.')) if not scope[-1]['announce'][-1].nlri.add(Flow4Source(raw,int(netmask))): self._error = 'Flow can only have one destination' if self.debug: raise ValueError(self._error) return False else: ip,netmask,offset = data.split('/') afi,safi,raw = inet(ip) change = scope[-1]['announce'][-1] # XXX: This is ugly change.nlri.afi = AFI(AFI.ipv6) if not change.nlri.add(Flow6Source(raw,int(netmask),int(offset))): self._error = 'Flow can only have one destination' if self.debug: raise ValueError(self._error) return False return True except (IndexError,ValueError): self._error = self._str_flow_error if self.debug: raise return False def _flow_destination (self,scope,tokens): try: data = tokens.pop(0) if data.count('/') == 1: ip,netmask = data.split('/') raw = ''.join(chr(int(_)) for _ in ip.split('.')) if not scope[-1]['announce'][-1].nlri.add(Flow4Destination(raw,int(netmask))): self._error = 'Flow can only have one destination' if self.debug: raise ValueError(self._error) return False else: ip,netmask,offset = data.split('/') afi,safi,raw = inet(ip) change = scope[-1]['announce'][-1] # XXX: This is ugly change.nlri.afi = AFI(AFI.ipv6) if not change.nlri.add(Flow6Destination(raw,int(netmask),int(offset))): self._error = 'Flow can only have one destination' if self.debug: raise ValueError(self._error) return False return True except (IndexError,ValueError): self._error = self._str_flow_error if self.debug: raise return False # to parse the port configuration of flow def _operator (self,string): try: if string[0] == '=': return NumericOperator.EQ,string[1:] elif string[0] == '>': operator = NumericOperator.GT elif string[0] == '<': operator = NumericOperator.LT else: raise ValueError('Invalid operator in test %s' % string) if string[1] == '=': operator += NumericOperator.EQ return operator,string[2:] else: return operator,string[1:] except IndexError: raise('Invalid expression (too short) %s' % string) def _value (self,string): l = 0 for c in string: if c not in ['&',]: l += 1 continue break return string[:l],string[l:] # parse =80 or >80 or <25 or &>10<20 def _flow_generic_expression (self,scope,tokens,klass): try: for test in tokens: AND = BinaryOperator.NOP while test: operator,_ = self._operator(test) value,test = self._value(_) nlri = scope[-1]['announce'][-1].nlri # XXX : should do a check that the rule is valid for the family nlri.add(klass(AND|operator,klass.converter(value))) if test: if test[0] == '&': AND = BinaryOperator.AND test = test[1:] if not test: raise ValueError("Can not finish an expresion on an &") else: raise ValueError("Unknown binary operator %s" % test[0]) return True except ValueError,e: self._error = self._str_route_error + str(e) if self.debug: raise return False # parse [ content1 content2 content3 ] def _flow_generic_list (self,scope,tokens,klass): try: name = tokens.pop(0) AND = BinaryOperator.NOP if name == '[': while True: name = tokens.pop(0) if name == ']': break try: nlri = scope[-1]['announce'][-1].nlri # XXX : should do a check that the rule is valid for the family nlri.add(klass(NumericOperator.EQ|AND,klass.converter(name))) except IndexError: self._error = self._str_flow_error if self.debug: raise return False else: scope[-1]['announce'][-1].nlri.add(klass(NumericOperator.EQ|AND,klass.converter(name))) except (IndexError,ValueError): self._error = self._str_flow_error if self.debug: raise return False return True def _flow_generic_condition (self,scope,tokens,klass): if tokens[0][0] in ['=','>','<']: return self._flow_generic_expression(scope,tokens,klass) return self._flow_generic_list(scope,tokens,klass) def _flow_route_anyport (self,scope,tokens): return self._flow_generic_condition(scope,tokens,FlowAnyPort) def _flow_route_source_port (self,scope,tokens): return self._flow_generic_condition(scope,tokens,FlowSourcePort) def _flow_route_destination_port (self,scope,tokens): return self._flow_generic_condition(scope,tokens,FlowDestinationPort) def _flow_route_packet_length (self,scope,tokens): return self._flow_generic_condition(scope,tokens,FlowPacketLength) def _flow_route_tcp_flags (self,scope,tokens): return self._flow_generic_list(scope,tokens,FlowTCPFlag) def _flow_route_protocol (self,scope,tokens): return self._flow_generic_list(scope,tokens,FlowIPProtocol) def _flow_route_next_header (self,scope,tokens): return self._flow_generic_list(scope,tokens,FlowNextHeader) def _flow_route_icmp_type (self,scope,tokens): return self._flow_generic_list(scope,tokens,FlowICMPType) def _flow_route_icmp_code (self,scope,tokens): return self._flow_generic_list(scope,tokens,FlowICMPCode) def _flow_route_fragment (self,scope,tokens): return self._flow_generic_list(scope,tokens,FlowFragment) def _flow_route_dscp (self,scope,tokens): return self._flow_generic_condition(scope,tokens,FlowDSCP) def _flow_route_traffic_class (self,scope,tokens): return self._flow_generic_condition(scope,tokens,FlowTrafficClass) def _flow_route_flow_label (self,scope,tokens): return self._flow_generic_condition(scope,tokens,FlowFlowLabel) def _flow_route_next_hop (self,scope,tokens): try: change = scope[-1]['announce'][-1] if change.nlri.nexthop: self._error = self._str_flow_error if self.debug: raise return False ip = tokens.pop(0) nh = pton(ip) change.nlri.nexthop = cachedNextHop(nh) return True except (IndexError,ValueError): self._error = self._str_route_error if self.debug: raise return False def _flow_route_discard (self,scope,tokens): # README: We are setting the ASN as zero as that what Juniper (and Arbor) did when we created a local flow route try: scope[-1]['announce'][-1].attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowTrafficRate(ASN(0),0)) return True except ValueError: self._error = self._str_route_error if self.debug: raise return False def _flow_route_rate_limit (self,scope,tokens): # README: We are setting the ASN as zero as that what Juniper (and Arbor) did when we created a local flow route try: speed = int(tokens[0]) if speed < 9600 and speed != 0: self.logger.configuration("rate-limiting flow under 9600 bytes per seconds may not work",'warning') if speed > 1000000000000: speed = 1000000000000 self.logger.configuration("rate-limiting changed for 1 000 000 000 000 bytes from %s" % tokens[0],'warning') scope[-1]['announce'][-1].attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowTrafficRate(ASN(0),speed)) return True except ValueError: self._error = self._str_route_error if self.debug: raise return False def _flow_route_redirect (self,scope,tokens): try: if tokens[0].count(':') == 1: prefix,suffix=tokens[0].split(':',1) if prefix.count('.'): ip = prefix.split('.') if len(ip) != 4: raise ValueError('invalid IP %s' % prefix) ipn = 0 while ip: ipn <<= 8 ipn += int(ip.pop(0)) number = int(suffix) if number >= pow(2,16): raise ValueError('number is too large, max 16 bits %s' % number) scope[-1]['announce'][-1].attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowRedirectVRFIP(ipn,number)) return True else: asn = int(prefix) route_target = int(suffix) if asn >= pow(2,16): raise ValueError('asn is a 32 bits number, it can only be 16 bit %s' % route_target) if route_target >= pow(2,32): raise ValueError('route target is a 32 bits number, value too large %s' % route_target) scope[-1]['announce'][-1].attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowRedirectVRFASN(asn,route_target)) return True else: change = scope[-1]['announce'][-1] if change.nlri.nexthop: self._error = self._str_flow_error if self.debug: raise return False ip = tokens.pop(0) nh = pton(ip) change.nlri.nexthop = cachedNextHop(nh) change.attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowRedirect(False)) return True except (IndexError,ValueError): self._error = self._str_route_error if self.debug: raise return False def _flow_route_redirect_next_hop (self,scope,tokens): try: change = scope[-1]['announce'][-1] if not change.nlri.nexthop: self._error = self._str_flow_error if self.debug: raise return False change.attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowRedirect(False)) return True except (IndexError,ValueError): self._error = self._str_route_error if self.debug: raise return False def _flow_route_copy (self,scope,tokens): # README: We are setting the ASN as zero as that what Juniper (and Arbor) did when we created a local flow route try: if scope[-1]['announce'][-1].attributes.has(AttributeID.NEXT_HOP): self._error = self._str_flow_error if self.debug: raise return False ip = tokens.pop(0) nh = pton(ip) change = scope[-1]['announce'][-1] change.nlri.nexthop = cachedNextHop(nh) change.attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowRedirect(True)) return True except (IndexError,ValueError): self._error = self._str_flow_error if self.debug: raise return False def _flow_route_mark (self,scope,tokens): try: dscp = int(tokens.pop(0)) if dscp < 0 or dscp > 0b111111: self._error = self._str_flow_error if self.debug: raise return False change = scope[-1]['announce'][-1] change.attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowTrafficMark(dscp)) return True except (IndexError,ValueError): self._error = self._str_flow_error if self.debug: raise return False def _flow_route_action (self,scope,tokens): try: action = tokens.pop(0) sample = 'sample' in action terminal = 'terminal' in action if not sample and not terminal: self._error = self._str_flow_error if self.debug: raise return False change = scope[-1]['announce'][-1] change.attributes[AttributeID.EXTENDED_COMMUNITY].add(to_FlowTrafficAction(sample,terminal)) return True except (IndexError,ValueError): self._error = self._str_flow_error if self.debug: raise return False # Group Operational ................ def _multi_operational (self,scope,tokens): if len(tokens) != 0: self._error = 'syntax: operational { command; command; ... }' if self.debug: raise return False while True: r = self._dispatch(scope,'operational',[],['asm',]) if r is False: return False if r is None: return True def _single_operational_asm (self,scope,value): #return self._single_advisory(Advisory.ASM,scope,value) return self._single_operational(Advisory.ASM,scope,['afi','safi','advisory'],value) def _single_operational (self,klass,scope,parameters,value): def utf8 (string): return string.encode('utf-8')[1:-1] convert = { 'afi': AFI.value, 'safi': SAFI.value, 'sequence': int, 'counter': long, 'rxc': long, 'txc': long, 'advisory': utf8 } def valid (_): return True def u32 (_): return int(_) <= 0xFFFFFFFF def u64 (_): return long(_) <= 0xFFFFFFFFFFFFFFFF def advisory (_): return len(_.encode('utf-8')) <= MAX_ADVISORY + 2 # the two quotes validate = { 'afi': AFI.value, 'safi': SAFI.value, 'sequence': u32, 'txc': u64, 'rxc': u64, 'counter': u64, } number = len(parameters)*2 tokens = self._cleaned(value).split(' ',number-1) if len(tokens) != number: self._error = 'invalid operational syntax, wrong number of arguments' return False data = {} while tokens and parameters: command = tokens.pop(0).lower() value = tokens.pop(0) if command == 'router-id': if isipv4(value): data['routerid'] = RouterID(value) else: self._error = 'invalid operational value for %s' % command return False continue expected = parameters.pop(0) if command != expected: self._error = 'invalid operational syntax, unknown argument %s' % command return False if not validate.get(command,valid)(value): self._error = 'invalid operational value for %s' % command return False data[command] = convert[command](value) if tokens or parameters: self._error = 'invalid advisory syntax, missing argument(s) %s' % ', '.join(parameters) return False if 'routerid' not in data: data['routerid'] = None if 'operational' not in scope[-1]: scope[-1]['operational'] = [] # iterate on each family for the peer if multiprotocol is set. scope[-1]['operational'].append(klass(**data)) return True # .............................. def decode (self,update): # self check to see if we can decode what we encode import sys from exabgp.bgp.message.update.factory import UpdateFactory from exabgp.bgp.message.open import Open from exabgp.bgp.message.open.capability import Capabilities from exabgp.bgp.message.open.capability.negotiated import Negotiated from exabgp.bgp.message.open.capability.id import CapabilityID from exabgp.bgp.message.notification import Notify from exabgp.reactor.api.encoding import JSON self.logger._parser = True self.logger.parser('\ndecoding routes in configuration') n = self.neighbor[self.neighbor.keys()[0]] path = {} for f in known_families(): if n.add_path: path[f] = n.add_path capa = Capabilities().new(n,False) capa[CapabilityID.ADD_PATH] = path capa[CapabilityID.MULTIPROTOCOL_EXTENSIONS] = n.families() o1 = Open(4,n.local_as,str(n.local_address),capa,180) o2 = Open(4,n.peer_as,str(n.peer_address),capa,180) negotiated = Negotiated(n) negotiated.sent(o1) negotiated.received(o2) #grouped = False raw = ''.join(chr(int(_,16)) for _ in (update[i*2:(i*2)+2] for i in range(len(update)/2))) while raw: if raw.startswith('\xff'*16): kind = ord(raw[18]) size = (ord(raw[16]) << 16) + (ord(raw[17])) injected,raw = raw[19:size],raw[size:] if kind == 2: self.logger.parser('the message is an update') factory = UpdateFactory decoding = 'update' else: self.logger.parser('the message is not an update (%d) - aborting' % kind) sys.exit(1) else: self.logger.parser('header missing, assuming this message is ONE update') factory = UpdateFactory decoding = 'update' injected,raw = raw,'' try: # This does not take the BGP header - let's assume we will not break that :) update = factory(negotiated,injected) except KeyboardInterrupt: raise except Notify,e: self.logger.parser('could not parse the message') self.logger.parser(str(e)) sys.exit(1) except Exception,e: self.logger.parser('could not parse the message') self.logger.parser(str(e)) sys.exit(1) self.logger.parser('') # new line for number in range(len(update.nlris)): change = Change(update.nlris[number],update.attributes) self.logger.parser('decoded %s %s %s' % (decoding,change.nlri.action,change.extensive())) self.logger.parser('update json %s' % JSON('1.0').update(str(n.peer_address),update)) import sys sys.exit(0) # ASN4 merge test # injected = ['0x0', '0x0', '0x0', '0x2e', '0x40', '0x1', '0x1', '0x0', '0x40', '0x2', '0x8', '0x2', '0x3', '0x78', '0x14', '0xab', '0xe9', '0x5b', '0xa0', '0x40', '0x3', '0x4', '0x52', '0xdb', '0x0', '0x4f', '0xc0', '0x8', '0x8', '0x78', '0x14', '0xc9', '0x46', '0x78', '0x14', '0xfd', '0xea', '0xe0', '0x11', '0xa', '0x2', '0x2', '0x0', '0x0', '0xab', '0xe9', '0x0', '0x3', '0x5', '0x54', '0x17', '0x9f', '0x65', '0x9e', '0x15', '0x9f', '0x65', '0x80', '0x18', '0x9f', '0x65', '0x9f'] # EOR # injected = '\x00\x00\x00\x07\x90\x0f\x00\x03\x00\x02\x01' def selfcheck (self): import sys # self check to see if we can decode what we encode from exabgp.util.od import od from exabgp.bgp.message.update import Update from exabgp.bgp.message.update.factory import UpdateFactory from exabgp.bgp.message.open import Open from exabgp.bgp.message.open.capability import Capabilities from exabgp.bgp.message.open.capability.negotiated import Negotiated from exabgp.bgp.message.open.capability.id import CapabilityID from exabgp.bgp.message.notification import Notify from exabgp.rib.change import Change self.logger._parser = True self.logger.parser('\ndecoding routes in configuration') n = self.neighbor[self.neighbor.keys()[0]] path = {} for f in known_families(): if n.add_path: path[f] = n.add_path capa = Capabilities().new(n,False) capa[CapabilityID.ADD_PATH] = path capa[CapabilityID.MULTIPROTOCOL_EXTENSIONS] = n.families() o1 = Open(4,n.local_as,str(n.local_address),capa,180) o2 = Open(4,n.peer_as,str(n.peer_address),capa,180) negotiated = Negotiated(n) negotiated.sent(o1) negotiated.received(o2) #grouped = False for nei in self.neighbor.keys(): for message in self.neighbor[nei].rib.outgoing.updates(False): pass for change1 in self.neighbor[nei].rib.outgoing.sent_changes(): str1 = change1.extensive() packed = list(Update([change1.nlri],change1.attributes).messages(negotiated)) pack1 = packed[0] self.logger.parser('parsed route requires %d updates' % len(packed)) self.logger.parser('update size is %d' % len(pack1)) self.logger.parser('parsed route %s' % str1) self.logger.parser('parsed hex %s' % od(pack1)) # This does not take the BGP header - let's assume we will not break that :) try: self.logger.parser('') # new line pack1s = pack1[19:] if pack1.startswith('\xFF'*16) else pack1 update = UpdateFactory(negotiated,pack1s) change2 = Change(update.nlris[0],update.attributes) str2 = change2.extensive() pack2 = list(Update([update.nlris[0]],update.attributes).messages(negotiated))[0] self.logger.parser('recoded route %s' % str2) self.logger.parser('recoded hex %s' % od(pack2)) str1r = str1.replace(' med 100','').replace(' local-preference 100','').replace(' origin igp','') str2r = str2.replace(' med 100','').replace(' local-preference 100','').replace(' origin igp','') skip = False if str1r != str2r: if 'attribute [' in str1r and ' 0x00 ' in str1r: # we do not decode non-transitive attributes self.logger.parser('skipping string check on udpate with non-transitive attribute(s)') skip = True else: self.logger.parser('strings are different:') self.logger.parser('[%s]'%str1r) self.logger.parser('[%s]'%str2r) sys.exit(1) else: self.logger.parser('strings are fine') if skip: self.logger.parser('skipping encoding for update with non-transitive attribute(s)') elif pack1 != pack2: self.logger.parser('encoding are different') self.logger.parser('[%s]'%od(pack1)) self.logger.parser('[%s]'%od(pack2)) sys.exit(1) else: self.logger.parser('encoding is fine') self.logger.parser('----------------------------------------') except Notify,e: print 'failed due to notification' print str(e) sys.exit(1) import sys sys.exit(0)
{ "content_hash": "59daf80cbfc5e953a065240e101777ae", "timestamp": "", "source": "github", "line_count": 2682, "max_line_length": 487, "avg_line_length": 32.3214019388516, "alnum_prop": 0.6583646724961355, "repo_name": "mshahbaz/exabgp", "id": "5e66a597f94bd414da672195c91b7a38918c79dc", "size": "86704", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/exabgp/configuration/file.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "940" }, { "name": "Perl", "bytes": "1553" }, { "name": "Python", "bytes": "672789" }, { "name": "Scala", "bytes": "44" }, { "name": "Shell", "bytes": "16626" } ], "symlink_target": "" }
# Hive Netius System # Copyright (c) 2008-2020 Hive Solutions Lda. # # This file is part of Hive Netius System. # # Hive Netius System is free software: you can redistribute it and/or modify # it under the terms of the Apache License as published by the Apache # Foundation, either version 2.0 of the License, or (at your option) any # later version. # # Hive Netius System is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Apache License for more details. # # You should have received a copy of the Apache License along with # Hive Netius System. If not, see <http://www.apache.org/licenses/>. __author__ = "João Magalhães <joamag@hive.pt>" """ The author(s) of the module """ __version__ = "1.0.0" """ The version of the module """ __revision__ = "$LastChangedRevision$" """ The revision number of the module """ __date__ = "$LastChangedDate$" """ The last change date of the module """ __copyright__ = "Copyright (c) 2008-2020 Hive Solutions Lda." """ The copyright for the module """ __license__ = "Apache License, Version 2.0" """ The license for the module """ import ssl import struct import netius.common from . import http class HTTP2Connection(http.HTTPConnection): def __init__( self, legacy = True, window = netius.common.HTTP2_WINDOW, settings = netius.common.HTTP2_SETTINGS_OPTIMAL, settings_r = netius.common.HTTP2_SETTINGS, *args, **kwargs ): http.HTTPConnection.__init__(self, *args, **kwargs) self.legacy = legacy self.settings = dict(settings) self.settings_r = dict(settings_r) self.window = window self.window_o = self.settings[netius.common.http2.SETTINGS_INITIAL_WINDOW_SIZE] self.window_l = self.window_o self.window_t = self.window_o // 2 self.preface = False self.preface_b = b"" self.frames = [] self.unavailable = {} def open(self, *args, **kwargs): http.HTTPConnection.open(self, *args, **kwargs) if not self.is_open(): return if not self.legacy: self.set_h2() def info_dict(self, full = False): info = http.HTTPConnection.info_dict(self, full = full) info.update( legacy = self.legacy, window = self.window, window_o = self.window_o, window_l = self.window_l, window_t = self.window_t, frames = len(self.frames) ) return info def flush_s(self, stream = None, callback = None): return self.send_part( b"", stream = stream, final = True, flush = True, callback = callback ) def set_h2(self): self.legacy = False if self.parser: self.parser.destroy() self.parser = netius.common.HTTP2Parser(self, store = True) self.parser.bind("on_data", self.on_data) self.parser.bind("on_header", self.on_header) self.parser.bind("on_payload", self.on_payload) self.parser.bind("on_frame", self.on_frame) self.parser.bind("on_data_h2", self.on_data_h2) self.parser.bind("on_headers_h2", self.on_headers_h2) self.parser.bind("on_rst_stream", self.on_rst_stream) self.parser.bind("on_settings", self.on_settings) self.parser.bind("on_ping", self.on_ping) self.parser.bind("on_goaway", self.on_goaway) self.parser.bind("on_window_update", self.on_window_update) self.parser.bind("on_continuation", self.on_continuation) def parse(self, data): if not self.legacy and not self.preface: data = self.parse_preface(data) if not data: return try: return self.parser.parse(data) except netius.ParserError as error: if not self.legacy: raise self.send_response( code = error.code, apply = True ) def parse_preface(self, data): """ Tries to run the parsing on the preface part of the connection establishment using the provided data note that the data is buffered in case the proper size has not been reached for proper validation. This should be the first step when trying to establish a proper HTTP 2 connection. :type data: String :param data: The data buffer that is going to be used to try to parse the connection preface. :rtype: String :return: The resulting data after the preface has been parsed, this should be empty or invalid in case no data is pending to be parsed. """ # adds the current data to the buffer of bytes pending # in the preface parsing and then verified that the proper # preface size has been reached, in case it has not returned # an invalid value immediately (no further parsing) self.preface_b += data preface_l = len(netius.common.HTTP2_PREFACE) is_size = len(self.preface_b) >= preface_l if not is_size: return None # retrieves the preface string from the buffer (according to size) # and runs the string based verification, raising an exception in # case there's a mismatch in the string validation preface = self.preface_b[:preface_l] if not preface == netius.common.HTTP2_PREFACE: raise netius.ParserError("Invalid preface") # sets the preface (parsed) flag indicating that the preface has # been parsed for the current connection self.preface = True # retrieves the extra data added to the preface buffer and then # unsets the same buffer (no more preface parsing) data = self.preface_b[preface_l:] self.preface_b = b"" # calls the proper callback for the preface sending both the current # instance and the associated parser for handling self.owner.on_preface_http2(self, self.parser) # returns the remaining data pending to be parsed so that it may be # parsed by any extra operation return data def send_plain( self, data, stream = None, final = True, delay = True, callback = None ): if self.legacy: return http.HTTPConnection.send_plain( self, data, stream = stream, final = final, delay = delay, callback = callback ) # verifies if the data should be fragmented for the provided # stream and if that's not required send the required data # straight away with any required splitting/fragmentation of it if not self.fragmentable_stream(stream, data): return self.send_data( data, stream = stream, end_stream = final, delay = delay, callback = callback ) # sends the same data but using a fragmented approach where the # data is going to be splitted according to the maximum determined # frame size, this is required to overcome limitations in the connection # that has been established with the other peer return self.send_fragmented( data, stream = stream, final = final, delay = delay, callback = callback ) def send_chunked( self, data, stream = None, final = True, delay = True, callback = None ): if self.legacy: return http.HTTPConnection.send_chunked( self, data, stream = stream, final = final, delay = delay, callback = callback ) return self.send_plain( data, stream = stream, final = final, delay = delay, callback = callback ) def send_fragmented( self, data, stream = None, final = True, delay = True, callback = None ): count = 0 fragments = self.fragment_stream(stream, data) fragments = list(fragments) fragments_l = len(fragments) for index in netius.legacy.xrange(fragments_l): is_last = index == fragments_l - 1 fragment = fragments[index] if is_last: count += self.send_data( fragment, stream = stream, end_stream = final, delay = delay, callback = callback ) else: count += self.send_data( fragment, stream = stream, end_stream = False, delay = delay ) return count def send_response( self, data = None, headers = None, version = None, code = 200, code_s = None, apply = False, stream = None, final = True, flush = True, delay = True, callback = None ): # in case the legacy mode is enabled the send response call is # forwarded to the upper layers so that it's handled properly if self.legacy: return http.HTTPConnection.send_response( self, data = data, headers = headers, version = version, code = code, code_s = code_s, apply = apply, stream = stream, final = final, flush = flush, delay = delay, callback = callback ) # retrieves the various parts that define the response # and runs a series of normalization processes to retrieve # the relevant information of the data to be sent to client data = data or b"" data = netius.legacy.bytes(data) headers = headers or dict() data_l = len(data) if data else 0 is_empty = code in (204, 304) and data_l == 0 # runs a series of verifications taking into account the type # of the method defined in the current request, for instance if # the current request is a HEAD one then no data is sent (as expected) if self.parser_ctx.method and self.parser_ctx.method.upper() == "HEAD": data = b"" # verifies if the content length header is currently present # in the provided headers and in case it's not inserts it if not "content-length" in headers and not is_empty: headers["content-length"] = str(data_l) # in case the apply flag is set the apply all operation is performed # so that a series of headers are applied to the current context # (things like the name of the server connection, etc) if apply: self.owner._apply_all(self.parser, self, headers) # sends the initial headers data (including status line), this should # trigger the initial data sent to the peer/client count = self.send_header( headers = headers, version = version, code = code, code_s = code_s, stream = stream ) # sends the part/payload information (data) to the client and optionally # flushes the current internal buffers to enforce sending of the value count += self.send_part( data, stream = stream, final = final, flush = flush, delay = delay, callback = callback ) return count def send_header( self, headers = None, version = None, code = 200, code_s = None, stream = None, final = False, delay = True, callback = None ): # in case the legacy mode is enabled the send header call is # forwarded to the upper layers so that it's handled properly if self.legacy: return http.HTTPConnection.send_header( self, headers = headers, version = version, code = code, code_s = code_s, stream = stream, delay = delay, callback = callback ) # verifies if the headers value has been provided and in case it # has not creates a new empty dictionary (runtime compatibility) headers = headers or dict() # defines the proper default base HTTP version in case it has not # been provided as part the default values version = version or "HTTP/2.0" # creates the headers base list that is going to store the various # header tuples representing the headers in canonical http2 form headers_b = [] headers_b.append((":status", str(code))) # iterates over the complete set of raw header values to normalize # them and add them to the currently defined base list for key, value in netius.legacy.iteritems(headers): key = netius.common.header_down(key) if key in ("connection", "transfer-encoding"): continue if not isinstance(value, list): value = (value,) for _value in value: headers_b.append((key, _value)) # verifies if this is considered to be the final operation in the stream # and if that's the case creates a new callback for the closing of the # stream at the end of the operation, this is required for proper collection if final: old_callback = callback def callback(connection): self.close_stream(stream, final = final) old_callback and old_callback(connection) # runs the send headers operations that should send the headers list # to the other peer and returns the number of bytes sent count = self.send_headers( headers_b, end_stream = final, stream = stream, delay = delay, callback = callback ) # "notifies" the owner of the connection that the headers have been # sent all the HTTP header information should be present self.owner.on_send_http( self.connection_ctx, self.parser_ctx, headers = headers, version = version, code = code, code_s = code_s ) # returns the final number of bytes that have been sent during the current # operation of sending headers to the other peer return count def send_part( self, data, stream = None, final = True, flush = False, delay = True, callback = None ): if self.legacy: return http.HTTPConnection.send_part( self, data, stream = stream, final = final, flush = flush, delay = delay, callback = callback ) # verifies if this is considered to be the final operation in the stream # and if that's the case creates a new callback for the closing of the # stream at the end of the operation, this is required for proper collection if final: old_callback = callback def callback(connection): self.close_stream(stream, final = final) old_callback and old_callback(connection) # verifies if the current connection/stream is flushed meaning that it requires # a final chunk of data to be sent to the peer, if that's not the case there's # no need to run the flushing as a possible empty data frame may be sent which # may cause errors to be raised from the server side flush = flush and self.is_flushed() if flush: count = self.send_base( data, stream = stream, final = False ) self.flush(stream = stream, callback = callback) else: count = self.send_base( data, stream = stream, final = final, delay = delay, callback = callback ) return count def send_frame( self, type = 0x01, flags = 0x00, payload = b"", stream = 0x00, delay = True, callback = None ): size = len(payload) size_h = size >> 16 size_l = size & 0xffff header = struct.pack("!BHBBI", size_h, size_l, type, flags, stream) message = header + payload self.owner.on_send_http2(self, self.parser, type, flags, payload, stream) return self.send(message, delay = delay, callback = callback) def send_data( self, data = b"", end_stream = True, stream = None, delay = True, callback = None ): # builds the flags byte taking into account the various # options that have been passed to the sending of data flags = 0x00 data_l = len(data) if end_stream: flags |= 0x01 # builds the callback clojure so that the connection state # is properly updated upon the sending of data callback = self._build_c(callback, stream, data_l) # verifies if the stream is available for the amount of data # that is currently being sent and if that's not the case delays # the sending of the frame to when the stream becomes available if not self.available_stream(stream, data_l): count = self.delay_frame( type = netius.common.DATA, flags = flags, payload = data, stream = stream, delay = delay, callback = callback ) self.try_unavailable(stream) return count # runs the increments remove window value, decrementing the window # by the size of the data being sent self.increment_remote(stream, data_l * -1, all = True) # runs the "proper" sending of the data frame, registering the callback # with the expected clojure count = self.send_frame( type = netius.common.DATA, flags = flags, payload = data, stream = stream, delay = delay, callback = callback ) # runs the try unavailable method to verify if the stream did became # unavailable after the sending of the data self.try_unavailable(stream) # returns the final number of bytes sent to the called method, this should # match the value of the data length return count def send_headers( self, headers = [], end_stream = False, end_headers = True, stream = None, delay = True, callback = None ): flags = 0x00 if end_stream: flags |= 0x01 if end_headers: flags |= 0x04 payload = self.parser.encoder.encode(headers) return self.send_frame( type = netius.common.HEADERS, flags = flags, payload = payload, stream = stream, delay = delay, callback = callback ) def send_rst_stream( self, error_code = 0x00, stream = None, delay = True, callback = None ): payload = struct.pack("!I", error_code) return self.send_frame( type = netius.common.RST_STREAM, payload = payload, stream = stream, delay = delay, callback = callback ) def send_settings( self, settings = (), ack = False, delay = True, callback = None ): flags = 0x00 if ack: flags |= 0x01 buffer = [] for ident, value in settings: setting_s = struct.pack("!HI", ident, value) buffer.append(setting_s) payload = b"".join(buffer) return self.send_frame( type = netius.common.SETTINGS, flags = flags, payload = payload, delay = delay, callback = callback ) def send_ping( self, opaque = b"\0\0\0\0\0\0\0\0", ack = False, delay = True, callback = None ): flags = 0x00 if ack: flags |= 0x01 return self.send_frame( type = netius.common.PING, flags = flags, payload = opaque, delay = delay, callback = callback ) def send_goaway( self, last_stream = 0x00, error_code = 0x00, message = "", close = True, delay = True, callback = None ): if close: old_callback = callback def callback(connection): self.close() old_callback and old_callback(connection) message = netius.legacy.bytes(message) payload = struct.pack("!II", last_stream, error_code) payload += message return self.send_frame( type = netius.common.GOAWAY, payload = payload, delay = delay, callback = callback ) def send_window_update( self, increment = 0, stream = None, delay = True, callback = None ): payload = struct.pack("!I", increment) return self.send_frame( type = netius.common.WINDOW_UPDATE, payload = payload, stream = stream, delay = delay, callback = callback ) def send_delta(self): delta = self.window_l -\ netius.common.HTTP2_SETTINGS[netius.common.http2.SETTINGS_INITIAL_WINDOW_SIZE] if delta == 0: return self.send_window_update(increment = delta, stream = 0x00) def delay_frame(self, *args, **kwargs): # retrieves the reference to the stream identifier for which # the frame is meant to be sent, and then uses this same value # to try to retrieve the target stream of the frame stream = kwargs["stream"] stream = self.parser._get_stream(stream) # adds the frame structure (tuple) as the structure describing # the frame to be delayed, then increments the frame counter in # the stream so that it represent a proper value self.frames.append((args, kwargs)) stream.frames += 1 # returns a zero value indicating that no bytes have been sent # "immediately" by this method return 0 def flush_frames(self, all = True): """ Runs the flush operation on the delayed/pending frames, meaning that the window/availability tests are going to be run, checking if the various streams and connection are ready for sending the frames. In case the all flag is active the complete set of frames are going to be tested for sending, this operation implies more resource usage. This method should be called after a window update frame is received so that the pending frames may be sent. :type all: bool :param all: If the complete set of frames should be tested, or if instead at the first testing fail the control flow should be returned immediately. :rtype: bool :return: If all the pending frames have been successfully flushed. """ # starts the values for both the offset value to be used in the # pop operation and the dictionary to be used in the storage of # the bitset of streams marked as started in the iteration offset = 0 starved = dict() if all else None # iterates over the complete set of frames pending to to be sent # (delayed) trying to send each of them until one fails and the # flushing operation is delayed until further requesting while offset < len(self.frames): # retrieves the reference to the current frame tuple to # be sent and retrieves the stream and payload from it frame = self.frames[offset] args, kwargs = frame stream = kwargs["stream"] payload = kwargs["payload"] payload_l = len(payload) # verifies that the stream is currently place in the list of # stream that are considered unavailable as this is a state # required for proper execution netius.verify(stream in self.unavailable) # verifies if the stream associated with the frame to be # sent is in the started map and if that's the case continue # the current loop immediately (cannot flush frame) if starved and stream in starved: offset += 1 continue # retrieves the reference to the stream object from the # identifier of the stream, this may an invalid/unset value _stream = self.parser._get_stream(stream, strict = False) # verifies if the current stream to be flushed is still # open and if that's not the case removes the frame from # the frames queue and skips the current iteration if not _stream or not _stream.is_open(): self.frames.pop(offset) if _stream: _stream.frames -= 1 continue # makes sure that the stream is currently marked as not available # this should be the state for every stream that has pending frames netius.verify(not _stream._available) # verifies if there's available "space" in the stream flow # to send the current payload and in case there's not breaks # the current loop as there's nothing else to be done, delays # pending frames for a new flush operation, note that the # return value is invalid (meaning that the stream may not # be available), a special failover operation exists if the # all flush operation is enabled in which the stream is marked # as starved and the current iteration is skipped trying to # flush frames from different streams available = self.available_stream(stream, payload_l, strict = False) if not available and not all: return False if not available and all: starved[stream] = True offset += 1 continue # removes the frame from both of the frame queues (both global # and stream) so that it is no longer going to be used for flush self.frames.pop(offset) _stream.frames -= 1 # decrements the current stream window by the size of the payload # and then runs the send frame operation for the pending frame self.increment_remote(stream, payload_l * -1, all = True) self.send_frame(*args, **kwargs) # returns the final result with a valid value meaning that all of the # flush operations have been successful (no frames pending in connection) return True if offset == 0 else False def flush_available(self): """ Runs the (became) available flush operation that tries to determine all the streams that were under the "blocked" state and became "unblocked", notifying them about that "edge" operation. This operation must be performed after any of the blocking constraints is changed (eg: connection window, stream window, etc.). """ # iterates over the complete set of streams (identifiers) that are # currently under the unavailable/blocked state, to try to determine # if they became unblocked by the "current operation" for stream in netius.legacy.keys(self.unavailable): self.try_available(stream) def set_settings(self, settings): self.settings_r.update(settings) def close_stream(self, stream, final = False, flush = False, reset = False): if not self.parser._has_stream(stream): return stream = self.parser._get_stream(stream) if not stream: return stream.end_stream_l = final stream.close(flush = flush, reset = reset) def available_stream(self, stream, length, strict = True): if self.window == 0: return False if self.window < length: return False stream = self.parser._get_stream(stream) if not stream: return True if stream.window == 0: return False if stream.window < length: return False if strict and stream.frames: return False return True def fragment_stream(self, stream, data): stream = self.parser._get_stream(stream) return stream.fragment(data) def fragmentable_stream(self, stream, data): stream = self.parser._get_stream(stream) return stream.fragmentable(data) def open_stream(self, stream): stream = self.parser._get_stream(stream, strict = False) if not stream : return False return True if stream and stream.is_open() else False def try_available(self, stream, strict = True): """ Tries to determine if the stream with the provided identifier has just became available (unblocked from blocked state), this happens when the required window value (either connection or stream is increased properly). :type stream: int :param stream: The identifier of the stream that is going to be tested from proper connection availability. :type strict: bool :param strict: If the strict mode should be used in the availability testing, this implies extra verifications. """ # verifies if the stream is currently present in the map of unavailable # or blocked streams and if that's the case returns immediately as # the connection is not blocked if not stream in self.unavailable: return # tries to retrieve the stream object reference from the identifier and # in case none is retrieved (probably stream closed) returns immediately # and removes the stream from the map of unavailability _stream = self.parser._get_stream(stream, strict = False) if not _stream: del self.unavailable[stream] return # tries to determine if the stream is available for the sending of at # least one byte and if that's not the case returns immediately, not # setting the stream as available if not self.available_stream(stream, 1, strict = strict): return # removes the stream from the map of unavailable stream and "notifies" # the stream about the state changing operation to available/unblocked del self.unavailable[stream] _stream.available() def try_unavailable(self, stream, strict = True): """ Runs the unavailability test on the stream with the provided identifier meaning that a series of validation will be performed to try to determine if for some reason is not possible to send any more data frames to the stream until some window changes. A stream that is under the unavailable state is considered "blocked". :type stream: int :param stream: The identifier of the stream that is going to be tested from proper connection unavailability. :type strict: bool :param strict: If the strict mode should be used in the availability testing, this implies extra verifications. """ # in case the stream identifier is already present in the unavailable # map it cannot be marked as unavailable again if stream in self.unavailable: return # tries to retrieve the reference to the stream object to be tested # an in case none is found (connection closed) returns immediately _stream = self.parser._get_stream(stream, strict = False) if not _stream: return # runs the proper availability verification by testing the capacity # of the stream to send one byte and in case there's capacity to send # that byte the stream is considered available or unblocked, so the # control flow must be returned (stream not marked) if self.available_stream(stream, 1, strict = strict): return # marks the stream as unavailable and "notifies" the stream object # about the changing to the unavailable/blocked state self.unavailable[stream] = True _stream.unavailable() def increment_remote(self, stream, increment, all = False): """ Increments the size of the remove window associated with the stream passed by argument by the size defined in the increment field (in bytes). If the stream is not provided or invalid the global window is updated instead of the stream one. :type stream: int :param stream: The identifier of the stream that is going to have its window incremented, or invalid if the global connection window is meant to be updated. :type increment: int :param increment: The increment in bytes for the window, this value may be negative for decrement operations. :type all: bool :param all: If all the resources (connection and stream) should be updated by the increment operation. """ if not stream or all: self.window += increment if not stream: return stream = self.parser._get_stream(stream) if not stream: return stream.remote_update(increment) def increment_local(self, stream, increment): # increments the global connection local window # by the provided value, and then verifies if the # threshold has been passed, if that's the case # the window updated frame must be sent so that # the remove end point is properly notified self.window_l += increment if self.window_l < self.window_t: self.send_window_update( increment = self.window_o - self.window_l, stream = 0x00 ) self.window_l = self.window_o # tries to retrieve the stream associates with the # provided identifier and then runs the local update # operation in it (may trigger window update flushing) stream = self.parser._get_stream(stream) if not stream: return stream.local_update(increment) def error_connection( self, last_stream = 0x00, error_code = 0x00, message = "", close = True, callback = None ): self.send_goaway( last_stream = last_stream, error_code = error_code, message = message, close = close, callback = callback ) def error_stream( self, stream, last_stream = 0x00, error_code = 0x00, message = "", close = True, callback = None ): self.send_rst_stream( error_code = error_code, stream = stream, callback = lambda c: self.error_connection( last_stream = last_stream, error_code = error_code, message = message, close = close, callback = callback ) ) def on_header(self, header): self.owner.on_header_http2(self, self.parser, header) def on_payload(self): self.owner.on_payload_http2(self, self.parser) def on_frame(self): self.owner.on_frame_http2(self, self.parser) def on_data_h2(self, stream, contents): self.increment_local( stream and stream.identifier, increment = len(contents) * -1 ) self.owner.on_data_http2(self, self.parser, stream, contents) def on_headers_h2(self, stream): self.owner.on_headers_http2(self, self.parser, stream) def on_rst_stream(self, stream, error_code): self.owner.on_rst_stream_http2(self, self.parser, stream, error_code) def on_settings(self, settings, ack): self.owner.on_settings_http2(self, self.parser, settings, ack) def on_ping(self, opaque, ack): self.owner.on_ping_http2(self, self.parser, opaque, ack) def on_goaway(self, last_stream, error_code, extra): self.owner.on_goaway_http2(self, self.parser, last_stream, error_code, extra) def on_window_update(self, stream, increment): self.increment_remote(stream and stream.identifier, increment) self.flush_frames() self.flush_available() self.owner.on_window_update_http2(self, self.parser, stream, increment) def on_continuation(self, stream): self.owner.on_continuation_http2(self, self.parser, stream) def is_throttleable(self): if self.legacy: return http.HTTPConnection.is_throttleable(self) return False @property def connection_ctx(self): if self.legacy: return super(HTTP2Connection, self).connection_ctx if not self.parser: return self if not self.parser.stream_o: return self return self.parser.stream_o @property def parser_ctx(self): if self.legacy: return super(HTTP2Connection, self).parser_ctx if not self.parser: return None if not self.parser.stream_o: return self.parser return self.parser.stream_o def _build_c(self, callback, stream, data_l): stream = self.parser._get_stream(stream, strict = False) if not stream: return callback stream.pending_s += data_l old_callback = callback def callback(connection): stream.pending_s -= data_l if not old_callback: return return old_callback(connection) return callback def _flush_plain(self, stream = None, callback = None): self.send_part(b"", stream = stream, callback = callback) def _flush_chunked(self, stream = None, callback = None): if self.legacy: return http.HTTPConnection._flush_chunked( self, stream = stream, callback = callback ) self._flush_plain(stream = stream, callback = callback) class HTTP2Server(http.HTTPServer): def __init__( self, legacy = True, safe = False, settings = netius.common.HTTP2_SETTINGS_OPTIMAL, *args, **kwargs ): self.legacy = legacy self.safe = safe self.settings = settings self.settings_t = netius.legacy.items(self.settings) self.has_h2 = self._has_h2() self.has_all_h2 = self._has_all_h2() self._protocols = [] self.safe = self.get_env("SAFE", self.safe, cast = bool) http.HTTPServer.__init__(self, *args, **kwargs) @classmethod def _has_hpack(cls): try: import hpack #@UnusedImport except ImportError: return False return True @classmethod def _has_alpn(cls): return ssl.HAS_ALPN @classmethod def _has_npn(cls): return ssl.HAS_NPN def info_dict(self, full = False): info = http.HTTPServer.info_dict(self, full = full) info.update( legacy = self.legacy, safe = self.safe, has_h2 = self.has_h2, has_all_h2 = self.has_all_h2 ) return info def get_protocols(self): if self._protocols: return self._protocols if not self.safe and self.has_h2: self._protocols.extend(["h2"]) if self.legacy: self._protocols.extend(["http/1.1", "http/1.0"]) return self._protocols def build_connection(self, socket, address, ssl = False): return HTTP2Connection( owner = self, socket = socket, address = address, ssl = ssl, encoding = self.encoding, legacy = self.legacy, settings = self.settings ) def on_exception(self, exception, connection): if hasattr(connection, "legacy") and connection.legacy: return http.HTTPServer.on_exception(self, exception, connection) if not isinstance(exception, netius.NetiusError): return http.HTTPServer.on_exception(self, exception, connection) try: self._handle_exception(exception, connection) except Exception: connection.close() def on_ssl(self, connection): http.HTTPServer.on_ssl(self, connection) if self.safe or not self.has_h2: return protocol = connection.ssl_protocol() if not protocol == "h2": return connection.set_h2() def on_serve(self): http.HTTPServer.on_serve(self) safe_s = "with" if self.safe else "without" self.info("Starting HTTP2 server %s safe mode ..." % safe_s) if not self.has_h2: self.info("No support for HTTP2 is available ...") elif not self.has_all_h2: self.info("Limited support for HTTP2 is available ...") for setting, name in netius.common.HTTP2_TUPLES: if not self.env: continue value = self.get_env(name, None, cast = int) if value == None: continue self.settings[setting] = value self.info("Setting HTTP2 setting %s with value '%d' ..." % (name, value)) self.settings_t = netius.legacy.items(self.settings) def on_preface_http2(self, connection, parser): connection.send_settings(settings = self.settings_t) connection.send_delta() def on_header_http2(self, connection, parser, header): pass def on_payload_http2(self, connection, parser): is_debug = self.is_debug() is_debug and self._log_frame(connection, parser) def on_frame_http2(self, connection, parser): pass def on_data_http2(self, connection, parser, stream, contents): pass def on_headers_http2(self, connection, parser, stream): pass def on_rst_stream_http2(self, connection, parser, stream, error_code): if not stream: return stream.end_stream = True stream.end_stream_l = True stream.close(reset = False) def on_settings_http2(self, connection, parser, settings, ack): if ack: return self.debug("Received settings %s for connection" % str(settings)) connection.set_settings(dict(settings)) connection.send_settings(ack = True) def on_ping_http2(self, connection, parser, opaque, ack): if ack: return connection.send_ping(opaque = opaque, ack = True) def on_goaway_http2(self, connection, parser, last_stream, error_code, extra): if error_code == 0x00: return self._log_error(error_code, extra) def on_window_update_http2(self, connection, parser, stream, increment): self.debug("Window updated with increment %d bytes" % increment) def on_continuation_http2(self, connection, parser, stream): pass def on_send_http2(self, connection, parser, type, flags, payload, stream): is_debug = self.is_debug() is_debug and self._log_send(connection, parser, type, flags, payload, stream) def _has_h2(self): cls = self.__class__ if not cls._has_hpack(): return False return True def _has_all_h2(self): cls = self.__class__ if not cls._has_hpack(): return False if not cls._has_alpn(): return False if not cls._has_npn(): return False return True def _handle_exception(self, exception, connection): stream = exception.get_kwarg("stream") error_code = exception.get_kwarg("error_code", 0x00) message = exception.get_kwarg("message", "") ignore = exception.get_kwarg("ignore", False) self.warning(exception) self.log_stack() if ignore: return connection.send_ping(ack = True) if stream: return connection.error_stream( stream, error_code = error_code, message = message ) return connection.error_connection( error_code = error_code, message = message ) def _log_frame(self, connection, parser): self.debug( "Received frame 0x%02x (%s) for stream %d with length %d bytes" %\ (parser.type, parser.type_s, parser.stream, parser.length) ) self._log_frame_details( parser, parser.type_s, parser.flags, parser.payload, parser.stream, False ) def _log_error(self, error_code, extra): message = netius.legacy.str(extra) self.warning( "Received error 0x%02x with message '%s'" %\ (error_code, message) ) def _log_send(self, connection, parser, type, flags, payload, stream): length = len(payload) type_s = parser.get_type_s(type) self.debug( "Sent frame 0x%02x (%s) for stream %d with length %d bytes" %\ (type, type_s, stream, length) ) self._log_frame_details(parser, type_s, flags, payload, stream, True) def _log_window(self, parser, stream, remote = False): name = "SEND" if remote else "RECV" connection = parser.connection window = connection.window if remote else connection.window_l self.debug("Connection %s window size is %d bytes" % (name, window)) stream = parser._get_stream(stream, strict = False) if not stream: return window = stream.window if remote else stream.window_l self.debug( "Stream %d (dependency = %d, weight = %d) %s window size is %d bytes" %\ (stream.identifier, stream.dependency, stream.weight, name, window) ) def _log_frame_details(self, parser, type_s, flags, payload, stream, out): type_l = type_s.lower() method_s = "_log_frame_" + type_l if not hasattr(self, method_s): return method = getattr(self, method_s) method(parser, flags, payload, stream, out) def _log_frame_flags(self, type_s, *args): flags = ", ".join(args) pluralized = "flags" if len(args) > 1 else "flag" if flags: self.debug("%s with %s %s active" % (type_s, pluralized, flags)) else: self.debug("Frame %s with no flags active" % type_s) def _log_frame_data(self, parser, flags, payload, stream, out): _stream = parser._get_stream(stream, strict = False) flags_l = self._flags_l(flags, (("END_STREAM", 0x01),)) self._log_frame_flags("DATA", *flags_l) if _stream: self.debug("Frame DATA for path '%s'" % _stream.path_s) self._log_window(parser, stream, remote = out) def _log_frame_headers(self, parser, flags, payload, stream, out): flags_l = self._flags_l( flags, ( ("END_STREAM", 0x01), ("END_HEADERS", 0x04), ("PADDED", 0x08), ("PRIORITY", 0x20) ) ) self._log_frame_flags("HEADERS", *flags_l) def _log_frame_rst_stream(self, parser, flags, payload, stream, out): error_code, = struct.unpack("!I", payload) self.debug("Frame RST_STREAM with error code %d" % error_code) def _log_frame_goaway(self, parser, flags, payload, stream, out): last_stream, error_code = struct.unpack("!II", payload[:8]) extra = payload[8:] self.debug( "Frame GOAWAY with last stream %d, error code %d and message %s" %\ (last_stream, error_code, extra) ) def _log_frame_window_update(self, parser, flags, payload, stream, out): increment, = struct.unpack("!I", payload) self.debug("Frame WINDOW_UPDATE with increment %d" % increment) self._log_window(parser, stream, remote = not out) def _flags_l(self, flags, definition): flags_l = [] for name, value in definition: valid = True if flags & value else False if not valid: continue flags_l.append(name) return flags_l
{ "content_hash": "adf9cccf5bde51286fff54f3d5ba5d2d", "timestamp": "", "source": "github", "line_count": 1360, "max_line_length": 90, "avg_line_length": 36.93382352941177, "alnum_prop": 0.5740792355166235, "repo_name": "hivesolutions/netius", "id": "25521560394c30051c50c2c7a1059b9d6a431fb2", "size": "50276", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/netius/servers/http2.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "1400497" } ], "symlink_target": "" }
import json from django import forms from django.conf import settings from django.contrib.admin.utils import ( display_for_field, flatten_fieldsets, help_text_for_field, label_for_field, lookup_field, ) from django.core.exceptions import ObjectDoesNotExist from django.db.models.fields.related import ManyToManyRel from django.forms.utils import flatatt from django.template.defaultfilters import capfirst, linebreaksbr from django.utils.html import conditional_escape, format_html from django.utils.safestring import mark_safe from django.utils.translation import gettext, gettext_lazy as _ ACTION_CHECKBOX_NAME = '_selected_action' class ActionForm(forms.Form): action = forms.ChoiceField(label=_('Action:')) select_across = forms.BooleanField( label='', required=False, initial=0, widget=forms.HiddenInput({'class': 'select-across'}), ) checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False) class AdminForm: def __init__(self, form, fieldsets, prepopulated_fields, readonly_fields=None, model_admin=None): self.form, self.fieldsets = form, fieldsets self.prepopulated_fields = [{ 'field': form[field_name], 'dependencies': [form[f] for f in dependencies] } for field_name, dependencies in prepopulated_fields.items()] self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields def __iter__(self): for name, options in self.fieldsets: yield Fieldset( self.form, name, readonly_fields=self.readonly_fields, model_admin=self.model_admin, **options ) @property def errors(self): return self.form.errors @property def non_field_errors(self): return self.form.non_field_errors @property def media(self): media = self.form.media for fs in self: media = media + fs.media return media class Fieldset: def __init__(self, form, name=None, readonly_fields=(), fields=(), classes=(), description=None, model_admin=None): self.form = form self.name, self.fields = name, fields self.classes = ' '.join(classes) self.description = description self.model_admin = model_admin self.readonly_fields = readonly_fields @property def media(self): if 'collapse' in self.classes: extra = '' if settings.DEBUG else '.min' return forms.Media(js=['admin/js/collapse%s.js' % extra]) return forms.Media() def __iter__(self): for field in self.fields: yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin) class Fieldline: def __init__(self, form, field, readonly_fields=None, model_admin=None): self.form = form # A django.forms.Form instance if not hasattr(field, "__iter__") or isinstance(field, str): self.fields = [field] else: self.fields = field self.has_visible_field = not all( field in self.form.fields and self.form.fields[field].widget.is_hidden for field in self.fields ) self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields def __iter__(self): for i, field in enumerate(self.fields): if field in self.readonly_fields: yield AdminReadonlyField(self.form, field, is_first=(i == 0), model_admin=self.model_admin) else: yield AdminField(self.form, field, is_first=(i == 0)) def errors(self): return mark_safe( '\n'.join( self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields ).strip('\n') ) class AdminField: def __init__(self, form, field, is_first): self.field = form[field] # A django.forms.BoundField instance self.is_first = is_first # Whether this field is first on the line self.is_checkbox = isinstance(self.field.field.widget, forms.CheckboxInput) self.is_readonly = False def label_tag(self): classes = [] contents = conditional_escape(self.field.label) if self.is_checkbox: classes.append('vCheckboxLabel') if self.field.field.required: classes.append('required') if not self.is_first: classes.append('inline') attrs = {'class': ' '.join(classes)} if classes else {} # checkboxes should not have a label suffix as the checkbox appears # to the left of the label. return self.field.label_tag( contents=mark_safe(contents), attrs=attrs, label_suffix='' if self.is_checkbox else None, ) def errors(self): return mark_safe(self.field.errors.as_ul()) class AdminReadonlyField: def __init__(self, form, field, is_first, model_admin=None): # Make self.field look a little bit like a field. This means that # {{ field.name }} must be a useful class name to identify the field. # For convenience, store other field-related data here too. if callable(field): class_name = field.__name__ if field.__name__ != '<lambda>' else '' else: class_name = field if form._meta.labels and class_name in form._meta.labels: label = form._meta.labels[class_name] else: label = label_for_field(field, form._meta.model, model_admin, form=form) if form._meta.help_texts and class_name in form._meta.help_texts: help_text = form._meta.help_texts[class_name] else: help_text = help_text_for_field(class_name, form._meta.model) self.field = { 'name': class_name, 'label': label, 'help_text': help_text, 'field': field, } self.form = form self.model_admin = model_admin self.is_first = is_first self.is_checkbox = False self.is_readonly = True self.empty_value_display = model_admin.get_empty_value_display() def label_tag(self): attrs = {} if not self.is_first: attrs["class"] = "inline" label = self.field['label'] return format_html('<label{}>{}{}</label>', flatatt(attrs), capfirst(label), self.form.label_suffix) def contents(self): from django.contrib.admin.templatetags.admin_list import _boolean_icon field, obj, model_admin = self.field['field'], self.form.instance, self.model_admin try: f, attr, value = lookup_field(field, obj, model_admin) except (AttributeError, ValueError, ObjectDoesNotExist): result_repr = self.empty_value_display else: if field in self.form.fields: widget = self.form[field].field.widget # This isn't elegant but suffices for contrib.auth's # ReadOnlyPasswordHashWidget. if getattr(widget, 'read_only', False): return widget.render(field, value) if f is None: if getattr(attr, 'boolean', False): result_repr = _boolean_icon(value) else: if hasattr(value, "__html__"): result_repr = value else: result_repr = linebreaksbr(value) else: if isinstance(f.remote_field, ManyToManyRel) and value is not None: result_repr = ", ".join(map(str, value.all())) else: result_repr = display_for_field(value, f, self.empty_value_display) result_repr = linebreaksbr(result_repr) return conditional_escape(result_repr) class InlineAdminFormSet: """ A wrapper around an inline formset for use in the admin system. """ def __init__(self, inline, formset, fieldsets, prepopulated_fields=None, readonly_fields=None, model_admin=None, has_add_permission=True, has_change_permission=True, has_delete_permission=True, has_view_permission=True): self.opts = inline self.formset = formset self.fieldsets = fieldsets self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields if prepopulated_fields is None: prepopulated_fields = {} self.prepopulated_fields = prepopulated_fields self.classes = ' '.join(inline.classes) if inline.classes else '' self.has_add_permission = has_add_permission self.has_change_permission = has_change_permission self.has_delete_permission = has_delete_permission self.has_view_permission = has_view_permission def __iter__(self): if self.has_change_permission: readonly_fields_for_editing = self.readonly_fields else: readonly_fields_for_editing = self.readonly_fields + flatten_fieldsets(self.fieldsets) for form, original in zip(self.formset.initial_forms, self.formset.get_queryset()): view_on_site_url = self.opts.get_view_on_site_url(original) yield InlineAdminForm( self.formset, form, self.fieldsets, self.prepopulated_fields, original, readonly_fields_for_editing, model_admin=self.opts, view_on_site_url=view_on_site_url, ) for form in self.formset.extra_forms: yield InlineAdminForm( self.formset, form, self.fieldsets, self.prepopulated_fields, None, self.readonly_fields, model_admin=self.opts, ) if self.has_add_permission: yield InlineAdminForm( self.formset, self.formset.empty_form, self.fieldsets, self.prepopulated_fields, None, self.readonly_fields, model_admin=self.opts, ) def fields(self): fk = getattr(self.formset, "fk", None) empty_form = self.formset.empty_form meta_labels = empty_form._meta.labels or {} meta_help_texts = empty_form._meta.help_texts or {} for i, field_name in enumerate(flatten_fieldsets(self.fieldsets)): if fk and fk.name == field_name: continue if not self.has_change_permission or field_name in self.readonly_fields: yield { 'name': field_name, 'label': meta_labels.get(field_name) or label_for_field( field_name, self.opts.model, self.opts, form=empty_form, ), 'widget': {'is_hidden': False}, 'required': False, 'help_text': meta_help_texts.get(field_name) or help_text_for_field(field_name, self.opts.model), } else: form_field = empty_form.fields[field_name] label = form_field.label if label is None: label = label_for_field(field_name, self.opts.model, self.opts, form=empty_form) yield { 'name': field_name, 'label': label, 'widget': form_field.widget, 'required': form_field.required, 'help_text': form_field.help_text, } def inline_formset_data(self): verbose_name = self.opts.verbose_name return json.dumps({ 'name': '#%s' % self.formset.prefix, 'options': { 'prefix': self.formset.prefix, 'addText': gettext('Add another %(verbose_name)s') % { 'verbose_name': capfirst(verbose_name), }, 'deleteText': gettext('Remove'), } }) @property def forms(self): return self.formset.forms @property def non_form_errors(self): return self.formset.non_form_errors @property def media(self): media = self.opts.media + self.formset.media for fs in self: media = media + fs.media return media class InlineAdminForm(AdminForm): """ A wrapper around an inline form for use in the admin system. """ def __init__(self, formset, form, fieldsets, prepopulated_fields, original, readonly_fields=None, model_admin=None, view_on_site_url=None): self.formset = formset self.model_admin = model_admin self.original = original self.show_url = original and view_on_site_url is not None self.absolute_url = view_on_site_url super().__init__(form, fieldsets, prepopulated_fields, readonly_fields, model_admin) def __iter__(self): for name, options in self.fieldsets: yield InlineFieldset( self.formset, self.form, name, self.readonly_fields, model_admin=self.model_admin, **options ) def needs_explicit_pk_field(self): return ( # Auto fields are editable, so check for auto or non-editable pk. self.form._meta.model._meta.auto_field or not self.form._meta.model._meta.pk.editable or # Also search any parents for an auto field. (The pk info is # propagated to child models so that does not need to be checked # in parents.) any(parent._meta.auto_field or not parent._meta.model._meta.pk.editable for parent in self.form._meta.model._meta.get_parent_list()) ) def pk_field(self): return AdminField(self.form, self.formset._pk_field.name, False) def fk_field(self): fk = getattr(self.formset, "fk", None) if fk: return AdminField(self.form, fk.name, False) else: return "" def deletion_field(self): from django.forms.formsets import DELETION_FIELD_NAME return AdminField(self.form, DELETION_FIELD_NAME, False) def ordering_field(self): from django.forms.formsets import ORDERING_FIELD_NAME return AdminField(self.form, ORDERING_FIELD_NAME, False) class InlineFieldset(Fieldset): def __init__(self, formset, *args, **kwargs): self.formset = formset super().__init__(*args, **kwargs) def __iter__(self): fk = getattr(self.formset, "fk", None) for field in self.fields: if not fk or fk.name != field: yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin) class AdminErrorList(forms.utils.ErrorList): """Store errors for the form/formsets in an add/change view.""" def __init__(self, form, inline_formsets): super().__init__() if form.is_bound: self.extend(form.errors.values()) for inline_formset in inline_formsets: self.extend(inline_formset.non_form_errors()) for errors_in_inline_form in inline_formset.errors: self.extend(errors_in_inline_form.values())
{ "content_hash": "2019094f69f61f8926f290de766a62d9", "timestamp": "", "source": "github", "line_count": 407, "max_line_length": 117, "avg_line_length": 38.21375921375921, "alnum_prop": 0.5843245676075355, "repo_name": "simonw/django", "id": "0337c50010257f8a081fa5e0352efd8888619403", "size": "15553", "binary": false, "copies": "14", "ref": "refs/heads/master", "path": "django/contrib/admin/helpers.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "85351" }, { "name": "HTML", "bytes": "227641" }, { "name": "JavaScript", "bytes": "258434" }, { "name": "Makefile", "bytes": "125" }, { "name": "Python", "bytes": "13501540" }, { "name": "Shell", "bytes": "809" }, { "name": "Smarty", "bytes": "142" } ], "symlink_target": "" }
from django.contrib import messages from django.utils.translation import ugettext from registration import signals from registration.backends.simple import SimpleBackend from userprofile import utils class SimpleBackend(SimpleBackend): def get_form_class(self, request): return utils.get_profile_model().registration_form def post_registration_redirect(self, request, user): """ After registration, redirect to the next or otherwise user's absolute url. """ return (self._get_redirect_url(request), (), {}) def _get_redirect_url(self, request): """ Next gathered from session, then GET, then POST, then users absolute url. """ if 'next' in request.session: next_url = request.session['next'] del request.session['next'] elif 'next' in request.GET: next_url = request.GET.get('next') elif 'next' in request.POST: next_url = request.POST.get('next') else: next_url = request.user.get_absolute_url() if not next_url: next_url = '/' return next_url def user_registered(sender, user, request, *args, **kwargs): profile = user.profile # Build from from post form = utils.get_profile_model().registration_form(request.POST) # Username causes clean to fail, remove it. del form.fields['username'] form.full_clean() # Assign cleaned values to user or profile objects. for field, value in form.cleaned_data.items(): if hasattr(user, field): setattr(user, field, value) if hasattr(profile, field): setattr(profile, field, value) user.save() profile.save() msg = ugettext("You have signed up successfully.") messages.success(request, msg, fail_silently=True) signals.user_registered.connect(user_registered)
{ "content_hash": "25a11e2c083c567af087d13de82c9284", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 68, "avg_line_length": 31.34426229508197, "alnum_prop": 0.6365062761506276, "repo_name": "praekelt/django-userprofile", "id": "03dc26bbf63eb08eb9b0b48c322de3c2a672d943", "size": "1912", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "userprofile/backends/simple/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "258" }, { "name": "Python", "bytes": "17725" } ], "symlink_target": "" }
"""Support for Z-Wave cover devices.""" from __future__ import annotations from typing import Any, cast from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import TARGET_STATE_PROPERTY, TARGET_VALUE_PROPERTY from zwave_js_server.const.command_class.barrier_operator import BarrierState from zwave_js_server.const.command_class.multilevel_switch import ( COVER_CLOSE_PROPERTY, COVER_DOWN_PROPERTY, COVER_OFF_PROPERTY, COVER_ON_PROPERTY, COVER_OPEN_PROPERTY, COVER_UP_PROPERTY, ) from zwave_js_server.model.driver import Driver from zwave_js_server.model.value import Value as ZwaveValue from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, CoverDeviceClass, CoverEntity, CoverEntityFeature, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DATA_CLIENT, DOMAIN from .discovery import ZwaveDiscoveryInfo from .discovery_data_template import CoverTiltDataTemplate from .entity import ZWaveBaseEntity PARALLEL_UPDATES = 0 async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Z-Wave Cover from Config Entry.""" client: ZwaveClient = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT] @callback def async_add_cover(info: ZwaveDiscoveryInfo) -> None: """Add Z-Wave cover.""" driver = client.driver assert driver is not None # Driver is ready before platforms are loaded. entities: list[ZWaveBaseEntity] = [] if info.platform_hint == "motorized_barrier": entities.append(ZwaveMotorizedBarrier(config_entry, driver, info)) elif info.platform_hint == "window_shutter_tilt": entities.append(ZWaveTiltCover(config_entry, driver, info)) else: entities.append(ZWaveCover(config_entry, driver, info)) async_add_entities(entities) config_entry.async_on_unload( async_dispatcher_connect( hass, f"{DOMAIN}_{config_entry.entry_id}_add_{COVER_DOMAIN}", async_add_cover, ) ) def percent_to_zwave_position(value: int) -> int: """Convert position in 0-100 scale to 0-99 scale. `value` -- (int) Position byte value from 0-100. """ if value > 0: return max(1, round((value / 100) * 99)) return 0 def percent_to_zwave_tilt(value: int) -> int: """Convert position in 0-100 scale to 0-99 scale. `value` -- (int) Position byte value from 0-100. """ if value > 0: return round((value / 100) * 99) return 0 def zwave_tilt_to_percent(value: int) -> int: """Convert 0-99 scale to position in 0-100 scale. `value` -- (int) Position byte value from 0-99. """ if value > 0: return round((value / 99) * 100) return 0 class ZWaveCover(ZWaveBaseEntity, CoverEntity): """Representation of a Z-Wave Cover device.""" def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo, ) -> None: """Initialize a ZWaveCover entity.""" super().__init__(config_entry, driver, info) # Entity class attributes self._attr_device_class = CoverDeviceClass.WINDOW if self.info.platform_hint in ("window_shutter", "window_shutter_tilt"): self._attr_device_class = CoverDeviceClass.SHUTTER if self.info.platform_hint == "window_blind": self._attr_device_class = CoverDeviceClass.BLIND @property def is_closed(self) -> bool | None: """Return true if cover is closed.""" if self.info.primary_value.value is None: # guard missing value return None return bool(self.info.primary_value.value == 0) @property def current_cover_position(self) -> int | None: """Return the current position of cover where 0 means closed and 100 is fully open.""" if self.info.primary_value.value is None: # guard missing value return None return round((self.info.primary_value.value / 99) * 100) async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" target_value = self.get_zwave_value(TARGET_VALUE_PROPERTY) assert target_value is not None await self.info.node.async_set_value( target_value, percent_to_zwave_position(kwargs[ATTR_POSITION]) ) async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" target_value = self.get_zwave_value(TARGET_VALUE_PROPERTY) assert target_value is not None await self.info.node.async_set_value(target_value, 99) async def async_close_cover(self, **kwargs: Any) -> None: """Close cover.""" target_value = self.get_zwave_value(TARGET_VALUE_PROPERTY) assert target_value is not None await self.info.node.async_set_value(target_value, 0) async def async_stop_cover(self, **kwargs: Any) -> None: """Stop cover.""" open_value = ( self.get_zwave_value(COVER_OPEN_PROPERTY) or self.get_zwave_value(COVER_UP_PROPERTY) or self.get_zwave_value(COVER_ON_PROPERTY) ) if open_value: # Stop the cover if it's opening await self.info.node.async_set_value(open_value, False) close_value = ( self.get_zwave_value(COVER_CLOSE_PROPERTY) or self.get_zwave_value(COVER_DOWN_PROPERTY) or self.get_zwave_value(COVER_OFF_PROPERTY) ) if close_value: # Stop the cover if it's closing await self.info.node.async_set_value(close_value, False) class ZWaveTiltCover(ZWaveCover): """Representation of a Z-Wave Cover device with tilt.""" _attr_supported_features = ( CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE | CoverEntityFeature.STOP | CoverEntityFeature.SET_POSITION | CoverEntityFeature.OPEN_TILT | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.SET_TILT_POSITION ) def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo, ) -> None: """Initialize a ZWaveCover entity.""" super().__init__(config_entry, driver, info) self.data_template = cast( CoverTiltDataTemplate, self.info.platform_data_template ) @property def current_cover_tilt_position(self) -> int | None: """Return current position of cover tilt. None is unknown, 0 is closed, 100 is fully open. """ value = self.data_template.current_tilt_value(self.info.platform_data) if value is None or value.value is None: return None return zwave_tilt_to_percent(int(value.value)) async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: """Move the cover tilt to a specific position.""" tilt_value = self.data_template.current_tilt_value(self.info.platform_data) if tilt_value: await self.info.node.async_set_value( tilt_value, percent_to_zwave_tilt(kwargs[ATTR_TILT_POSITION]), ) async def async_open_cover_tilt(self, **kwargs: Any) -> None: """Open the cover tilt.""" await self.async_set_cover_tilt_position(tilt_position=100) async def async_close_cover_tilt(self, **kwargs: Any) -> None: """Close the cover tilt.""" await self.async_set_cover_tilt_position(tilt_position=0) class ZwaveMotorizedBarrier(ZWaveBaseEntity, CoverEntity): """Representation of a Z-Wave motorized barrier device.""" _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE _attr_device_class = CoverDeviceClass.GARAGE def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo, ) -> None: """Initialize a ZwaveMotorizedBarrier entity.""" super().__init__(config_entry, driver, info) # TARGET_STATE_PROPERTY is required in the discovery schema. self._target_state = cast( ZwaveValue, self.get_zwave_value(TARGET_STATE_PROPERTY, add_to_watched_value_ids=False), ) @property def is_opening(self) -> bool | None: """Return if the cover is opening or not.""" if self.info.primary_value.value is None: return None return bool(self.info.primary_value.value == BarrierState.OPENING) @property def is_closing(self) -> bool | None: """Return if the cover is closing or not.""" if self.info.primary_value.value is None: return None return bool(self.info.primary_value.value == BarrierState.CLOSING) @property def is_closed(self) -> bool | None: """Return if the cover is closed or not.""" if self.info.primary_value.value is None: return None # If a barrier is in the stopped state, the only way to proceed is by # issuing an open cover command. Return None in this case which # produces an unknown state and allows it to be resolved with an open # command. if self.info.primary_value.value == BarrierState.STOPPED: return None return bool(self.info.primary_value.value == BarrierState.CLOSED) async def async_open_cover(self, **kwargs: Any) -> None: """Open the garage door.""" await self.info.node.async_set_value(self._target_state, BarrierState.OPEN) async def async_close_cover(self, **kwargs: Any) -> None: """Close the garage door.""" await self.info.node.async_set_value(self._target_state, BarrierState.CLOSED)
{ "content_hash": "ad747fea659f3a23de5d1c599ff910aa", "timestamp": "", "source": "github", "line_count": 286, "max_line_length": 94, "avg_line_length": 35.50699300699301, "alnum_prop": 0.6415558838010832, "repo_name": "w1ll1am23/home-assistant", "id": "b3f3aeaf1c04814dffee2a1a48654d1098946828", "size": "10155", "binary": false, "copies": "2", "ref": "refs/heads/dev", "path": "homeassistant/components/zwave_js/cover.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "2963" }, { "name": "PLSQL", "bytes": "840" }, { "name": "Python", "bytes": "52277012" }, { "name": "Shell", "bytes": "6252" } ], "symlink_target": "" }
from urlparse import urljoin from urllib import urlencode, urlopen from django.db import models from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from cas.exceptions import CasTicketException, CasConfigException # Ed Crewe - add in signals to delete old tickets from django.db.models.signals import post_save from datetime import datetime class Tgt(models.Model): username = models.CharField(max_length = 255, unique = True) tgt = models.CharField(max_length = 255) def get_proxy_ticket_for(self, service): """Verifies CAS 2.0+ XML-based authentication ticket. Returns username on success and None on failure. """ if not settings.CAS_PROXY_CALLBACK: raise CasConfigException("No proxy callback set in settings") try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree params = {'pgt': self.tgt, 'targetService': service} url = (urljoin(settings.CAS_SERVER_URL, 'proxy') + '?' + urlencode(params)) page = urlopen(url) try: response = page.read() tree = ElementTree.fromstring(response) if tree[0].tag.endswith('proxySuccess'): return tree[0][0].text else: raise CasTicketException('Failed to get proxy ticket: %s' % \ tree[0].text.strip()) finally: page.close() class PgtIOU(models.Model): """ Proxy granting ticket and IOU """ pgtIou = models.CharField(max_length = 255, unique = True) tgt = models.CharField(max_length = 255) created = models.DateTimeField(auto_now = True) def get_tgt_for(user): if not settings.CAS_PROXY_CALLBACK: raise CasConfigException("No proxy callback set in settings") try: return Tgt.objects.get(username = user.username) except ObjectDoesNotExist: raise CasTicketException("no ticket found for user " + user.username) def delete_old_tickets(**kwargs): """ Delete tickets if they are over 2 days old kwargs = ['raw', 'signal', 'instance', 'sender', 'created'] """ sender = kwargs.get('sender', None) now = datetime.now() expire = datetime(now.year, now.month, now.day - 2) sender.objects.filter(created__lt=expire).delete() post_save.connect(delete_old_tickets, sender=PgtIOU)
{ "content_hash": "0955105513fc22595a60cf56c510c7ac", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 77, "avg_line_length": 32.89333333333333, "alnum_prop": 0.6445074989866234, "repo_name": "divio/django-cas", "id": "480c8bb187eee3aeae1de19fd6720c238260625f", "size": "2467", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "cas/models.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "36092" } ], "symlink_target": "" }
"""Example for aiohttp.web websocket server """ import asyncio import os from aiohttp.web import (Application, Response, WebSocketResponse, WSMsgType, run_app) WS_FILE = os.path.join(os.path.dirname(__file__), 'websocket.html') async def wshandler(request): resp = WebSocketResponse() ok, protocol = resp.can_prepare(request) if not ok: with open(WS_FILE, 'rb') as fp: return Response(body=fp.read(), content_type='text/html') await resp.prepare(request) try: print('Someone joined.') for ws in request.app['sockets']: ws.send_str('Someone joined') request.app['sockets'].append(resp) async for msg in resp: if msg.type == WSMsgType.TEXT: for ws in request.app['sockets']: if ws is not resp: ws.send_str(msg.data) else: return resp return resp finally: request.app['sockets'].remove(resp) print('Someone disconnected.') for ws in request.app['sockets']: ws.send_str('Someone disconnected.') async def on_shutdown(app): for ws in app['sockets']: await ws.close() async def init(loop): app = Application(loop=loop) app['sockets'] = [] app.router.add_get('/', wshandler) app.on_shutdown.append(on_shutdown) return app loop = asyncio.get_event_loop() app = loop.run_until_complete(init(loop)) run_app(app)
{ "content_hash": "ae61f0bc5914af1047cd931d294f3f31", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 77, "avg_line_length": 25.52542372881356, "alnum_prop": 0.5876494023904383, "repo_name": "esaezgil/aiohttp", "id": "6456230dc2d060b067f3ad49941db54b83dd9378", "size": "1529", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "examples/web_ws.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "838" }, { "name": "CSS", "bytes": "112" }, { "name": "HTML", "bytes": "4885" }, { "name": "Makefile", "bytes": "3073" }, { "name": "PLpgSQL", "bytes": "765" }, { "name": "Python", "bytes": "1076160" }, { "name": "Shell", "bytes": "2298" } ], "symlink_target": "" }
import argparse import socket import sys from socket import gethostbyaddr, gethostbyname_ex DESCRIPTION = \ "Queries for known hostnames/IP addresses of the given\ hostname/IP argument(s)\ " class memoize(object): """Decorator that caches a function's return value each time it is called. If called later with the same arguments, the cached value is returned, and not re-evaluated. """ # http://wiki.python.org/moin/PythonDecoratorLibrary#Memoize def __init__(self, func): self.func = func self.cache = {} def __call__(self, *args): try: return self.cache[args] except KeyError: value = self.func(*args) self.cache[args] = value return value except TypeError: # uncachable -- for instance, passing a list as an argument. # Better to not cache than to blow up entirely. return self.func(*args) def __repr__(self): """Return the function's docstring.""" return self.func.__doc__ def __get__(self, obj, objtype): """Support instance methods.""" return functools.partial(self.__call__, obj) def _is_ip(addr): """Attempt to identify if @addr is a valid IP4/6 address.""" try: socket.inet_pton(socket.AF_INET6, addr) return True except: try: socket.inet_pton(socket.AF_INET, addr) return True except: return False @memoize def _related_names(addr): fn = socket.gethostbyaddr if _is_ip(addr) else socket.gethostbyname_ex try: (cname, aliases, ips) = fn(addr) aliases.append(cname) aliases.extend(ips) return set(aliases) except (socket.herror, socket.gaierror): # herror raised for certain IP addresses that don't have rDNS # gaierror raised for .arpa addresses return set() def names_for_servers(addr_list, show_arpa=False): result = set() for addr in addr_list: result.update(server_names(addr, show_arpa)) return result def server_names(addr, show_arpa=False): """Obtain all names that are associated with a given address. Keyword arguments: addr -- the address to explore (as IP or hostname) show_arpa -- include .arpa addresses (boolean, defaults to False) Returns a set containing the explored IPs and hostnames. """ names = set([addr]) # track if new entries need to be looked at for related names new_entries = True while new_entries: aliases = set() for ent in names: aliases.update(_related_names(ent)) old_size = len(names) names.update(aliases) new_size = len(names) new_entries = old_size != new_size if not show_arpa: names = set([n for n in names if not n.endswith('.arpa')]) return names def main(argv): # setup and parse arguments parser = argparse.ArgumentParser(description=DESCRIPTION) parser.add_argument('-a', '--arpa', action='store_true', help='Display .arpa addresses') parser.add_argument('addresses', metavar='ADDRESS', type=str, nargs='+', help='an address to lookup') # create namespace with args if len(argv) == 0: parser.print_help() sys.exit(0) ns = parser.parse_args(args=argv) for addr in ns.addresses: names = server_names(addr, show_arpa=ns.arpa) sys.stdout.write('%s is known as:\n\n' % addr) for name in names: sys.stdout.write('\t%s\n' % name) sys.stdout.write('\n\n') if __name__ == '__main__': main(sys.argv[1:])
{ "content_hash": "0c0ad1e324f8acb201576f7e8bb7f277", "timestamp": "", "source": "github", "line_count": 131, "max_line_length": 78, "avg_line_length": 28.251908396946565, "alnum_prop": 0.6052418265333693, "repo_name": "axtl/allookup", "id": "163744306482deab6be3400fa877f051addd4117", "size": "4876", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "allookup.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "4876" } ], "symlink_target": "" }
from __future__ import absolute_import import sys from unittest import TestCase as BaseTestCase from uitools import trampoline from uitools.qt import QtCore, QtGui, Qt from uitools.trampoline import bounce, sleep, qpath from mayatools.test import requires_maya try: from maya import cmds, mel except ImportError: class Stub(object): cmds = None mel = None utils = None standalone = None maya = Stub() sys.modules['maya'] = maya sys.modules['maya.cmds'] = None sys.modules['maya.mel'] = None sys.modules['maya.utils'] = None sys.modules['maya.standalone'] = None cmds = Stub() has_maya = False else: has_maya = True class TestCase(BaseTestCase): @requires_maya def setUp(self): cmds.file(new=True, force=True)
{ "content_hash": "915406ded7871daf630fd42f69a81071", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 51, "avg_line_length": 21.102564102564102, "alnum_prop": 0.6537059538274606, "repo_name": "westernx/mayatools", "id": "ecb1eb94e4ba8fe8d67d3dcfd758936bf3af5ff5", "size": "823", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/common.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "23494" }, { "name": "Makefile", "bytes": "2255" }, { "name": "Python", "bytes": "233847" } ], "symlink_target": "" }
""" Constants useful across modules. """ import pandas as pd # Record some data from the GTFS reference at # https://developers.google.com/transit/gtfs/reference/ columns = ["table", "table_required", "column", "column_required", "dtype"] rows = [ ["agency", True, "agency_id", False, "str"], ["agency", True, "agency_name", True, "str"], ["agency", True, "agency_url", True, "str"], ["agency", True, "agency_timezone", True, "str"], ["agency", True, "agency_lang", False, "str"], ["agency", True, "agency_phone", False, "str"], ["agency", True, "agency_fare_url", False, "str"], ["agency", True, "agency_email", False, "str"], ["calendar", False, "service_id", True, "str"], ["calendar", False, "monday", True, "int"], ["calendar", False, "tuesday", True, "int"], ["calendar", False, "wednesday", True, "int"], ["calendar", False, "thursday", True, "int"], ["calendar", False, "friday", True, "int"], ["calendar", False, "saturday", True, "int"], ["calendar", False, "sunday", True, "int"], ["calendar", False, "start_date", True, "str"], ["calendar", False, "end_date", True, "str"], ["calendar_dates", False, "service_id", True, "str"], ["calendar_dates", False, "date", True, "str"], ["calendar_dates", False, "exception_type", True, "int"], ["fare_attributes", False, "fare_id", True, "str"], ["fare_attributes", False, "price", True, "float"], ["fare_attributes", False, "currency_type", True, "str"], ["fare_attributes", False, "payment_method", True, "int"], ["fare_attributes", False, "transfers", True, "int"], ["fare_attributes", False, "transfer_duration", False, "int"], ["fare_rules", False, "fare_id", True, "str"], ["fare_rules", False, "route_id", False, "str"], ["fare_rules", False, "origin_id", False, "str"], ["fare_rules", False, "destination_id", False, "str"], ["fare_rules", False, "contains_id", False, "str"], ["feed_info", False, "feed_publisher_name", True, "str"], ["feed_info", False, "feed_publisher_url", True, "str"], ["feed_info", False, "feed_lang", True, "str"], ["feed_info", False, "feed_start_date", False, "str"], ["feed_info", False, "feed_end_date", False, "str"], ["feed_info", False, "feed_version", False, "str"], ["frequencies", False, "trip_id", True, "str"], ["frequencies", False, "start_time", True, "str"], ["frequencies", False, "end_time", True, "str"], ["frequencies", False, "headway_secs", True, "int"], ["frequencies", False, "exact_times", False, "int"], ["routes", True, "route_id", True, "str"], ["routes", True, "agency_id", False, "str"], ["routes", True, "route_short_name", True, "str"], ["routes", True, "route_long_name", True, "str"], ["routes", True, "route_desc", False, "str"], ["routes", True, "route_type", True, "int"], ["routes", True, "route_url", False, "str"], ["routes", True, "route_color", False, "str"], ["routes", True, "route_text_color", False, "str"], ["shapes", False, "shape_id", True, "str"], ["shapes", False, "shape_pt_lat", True, "float"], ["shapes", False, "shape_pt_lon", True, "float"], ["shapes", False, "shape_pt_sequence", True, "int"], ["shapes", False, "shape_dist_traveled", False, "float"], ["stops", True, "stop_id", True, "str"], ["stops", True, "stop_code", False, "str"], ["stops", True, "stop_name", True, "str"], ["stops", True, "stop_desc", False, "str"], ["stops", True, "stop_lat", True, "float"], ["stops", True, "stop_lon", True, "float"], ["stops", True, "zone_id", False, "str"], ["stops", True, "stop_url", False, "str"], ["stops", True, "location_type", False, "int"], ["stops", True, "parent_station", False, "str"], ["stops", True, "stop_timezone", False, "str"], ["stops", True, "wheelchair_boarding", False, "int"], ["stop_times", True, "trip_id", True, "str"], ["stop_times", True, "arrival_time", True, "str"], ["stop_times", True, "departure_time", True, "str"], ["stop_times", True, "stop_id", True, "str"], ["stop_times", True, "stop_sequence", True, "int"], ["stop_times", True, "stop_headsign", False, "str"], ["stop_times", True, "pickup_type", False, "int"], ["stop_times", True, "drop_off_type", False, "int"], ["stop_times", True, "shape_dist_traveled", False, "float"], ["stop_times", True, "timepoint", False, "int"], ["transfers", False, "from_stop_id", True, "str"], ["transfers", False, "to_stop_id", True, "str"], ["transfers", False, "transfer_type", True, "int"], ["transfers", False, "min_transfer_time", False, "int"], ["trips", True, "route_id", True, "str"], ["trips", True, "service_id", True, "str"], ["trips", True, "trip_id", True, "str"], ["trips", True, "trip_headsign", False, "str"], ["trips", True, "trip_short_name", False, "str"], ["trips", True, "direction_id", False, "int"], ["trips", True, "block_id", False, "str"], ["trips", True, "shape_id", False, "str"], ["trips", True, "wheelchair_accessible", False, "int"], ["trips", True, "bikes_allowed", False, "int"], ] GTFS_REF = pd.DataFrame(rows, columns=columns) #: Columns that must be formatted as integers when outputting GTFS INT_COLS = GTFS_REF.loc[GTFS_REF["dtype"] == "int", "column"].values.tolist() #: Columns that must be read as strings by Pandas STR_COLS = GTFS_REF.loc[GTFS_REF["dtype"] == "str", "column"].values.tolist() DTYPE = {col: str for col in STR_COLS} #: Valid distance units DIST_UNITS = ["ft", "mi", "m", "km"] #: Primary feed attributes FEED_ATTRS_1 = [ "agency", "calendar", "calendar_dates", "fare_attributes", "fare_rules", "feed_info", "frequencies", "routes", "shapes", "stops", "stop_times", "trips", "transfers", "dist_units", ] #: Secondary feed attributes; derived from primary ones FEED_ATTRS_2 = ["_trips_i", "_calendar_i", "_calendar_dates_g"] #: FEED_ATTRS = FEED_ATTRS_1 + FEED_ATTRS_2 #: WGS84 coordinate reference system for Geopandas WGS84 = {"init": "epsg:4326"} #: Colorbrewer 8-class Set2 colors COLORS_SET2 = [ "#66c2a5", "#fc8d62", "#8da0cb", "#e78ac3", "#a6d854", "#ffd92f", "#e5c494", "#b3b3b3", ]
{ "content_hash": "2e6324bd749a524863cd3f35206c7c0d", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 77, "avg_line_length": 40.825806451612905, "alnum_prop": 0.5756953223767383, "repo_name": "araichev/gtfstk", "id": "1a9d3b8a0b1dca017bfcb0d7edd84affb508953f", "size": "6328", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gtfstk/constants.py", "mode": "33188", "license": "mit", "language": [ { "name": "Jupyter Notebook", "bytes": "372162" }, { "name": "Python", "bytes": "266045" } ], "symlink_target": "" }
from __future__ import annotations from collections.abc import Callable, Iterable, Iterator, Mapping from typing import AnyStr, BinaryIO, IO, Optional, TypeVar, overload import xml.etree.ElementTree as ET from xml.sax.saxutils import escape, quoteattr from .util import itemize T = TypeVar("T") @overload def load_xml(fp: IO) -> dict[str, str]: ... @overload def load_xml(fp: IO, object_pairs_hook: type[T]) -> T: ... @overload def load_xml(fp: IO, object_pairs_hook: Callable[[Iterator[tuple[str, str]]], T]) -> T: ... def load_xml(fp, object_pairs_hook=dict): # type: ignore[no-untyped-def] r""" Parse the contents of the file-like object ``fp`` as an XML properties file and return a `dict` of the key-value pairs. Beyond basic XML well-formedness, `load_xml` only checks that the root element is named "``properties``" and that all of its ``<entry>`` children have ``key`` attributes. No further validation is performed; if any ``<entry>``\s happen to contain nested tags, the behavior is undefined. By default, the key-value pairs extracted from ``fp`` are combined into a `dict` with later occurrences of a key overriding previous occurrences of the same key. To change this behavior, pass a callable as the ``object_pairs_hook`` argument; it will be called with one argument, a generator of ``(key, value)`` pairs representing the key-value entries in ``fp`` (including duplicates) in order of occurrence. `load_xml` will then return the value returned by ``object_pairs_hook``. :param IO fp: the file from which to read the XML properties document :param callable object_pairs_hook: class or function for combining the key-value pairs :rtype: `dict` or the return value of ``object_pairs_hook`` :raises ValueError: if the root of the XML tree is not a ``<properties>`` tag or an ``<entry>`` element is missing a ``key`` attribute """ tree = ET.parse(fp) return object_pairs_hook(_fromXML(tree.getroot())) @overload def loads_xml(s: AnyStr) -> dict[str, str]: ... @overload def loads_xml(fp: IO, object_pairs_hook: type[T]) -> T: ... @overload def loads_xml( s: AnyStr, object_pairs_hook: Callable[[Iterator[tuple[str, str]]], T] ) -> T: ... def loads_xml(s, object_pairs_hook=dict): # type: ignore[no-untyped-def] r""" Parse the contents of the string ``s`` as an XML properties document and return a `dict` of the key-value pairs. Beyond basic XML well-formedness, `loads_xml` only checks that the root element is named "``properties``" and that all of its ``<entry>`` children have ``key`` attributes. No further validation is performed; if any ``<entry>``\s happen to contain nested tags, the behavior is undefined. By default, the key-value pairs extracted from ``s`` are combined into a `dict` with later occurrences of a key overriding previous occurrences of the same key. To change this behavior, pass a callable as the ``object_pairs_hook`` argument; it will be called with one argument, a generator of ``(key, value)`` pairs representing the key-value entries in ``s`` (including duplicates) in order of occurrence. `loads_xml` will then return the value returned by ``object_pairs_hook``. :param Union[str,bytes] s: the string from which to read the XML properties document :param callable object_pairs_hook: class or function for combining the key-value pairs :rtype: `dict` or the return value of ``object_pairs_hook`` :raises ValueError: if the root of the XML tree is not a ``<properties>`` tag or an ``<entry>`` element is missing a ``key`` attribute """ elem = ET.fromstring(s) return object_pairs_hook(_fromXML(elem)) def _fromXML(root: ET.Element) -> Iterator[tuple[str, str]]: if root.tag != "properties": raise ValueError("XML tree is not rooted at <properties>") for entry in root.findall("entry"): key = entry.get("key") if key is None: raise ValueError('<entry> is missing "key" attribute') yield (key, entry.text or "") def dump_xml( props: Mapping[str, str] | Iterable[tuple[str, str]], fp: BinaryIO, comment: Optional[str] = None, encoding: str = "UTF-8", sort_keys: bool = False, ) -> None: """ Write a series ``props`` of key-value pairs to a binary filehandle ``fp`` in the format of an XML properties file. The file will include both an XML declaration and a doctype declaration. :param props: A mapping or iterable of ``(key, value)`` pairs to write to ``fp``. All keys and values in ``props`` must be `str` values. If ``sort_keys`` is `False`, the entries are output in iteration order. :param BinaryIO fp: a file-like object to write the values of ``props`` to :param Optional[str] comment: if non-`None`, ``comment`` will be output as a ``<comment>`` element before the ``<entry>`` elements :param str encoding: the name of the encoding to use for the XML document (also included in the XML declaration) :param bool sort_keys: if true, the elements of ``props`` are sorted lexicographically by key in the output :return: `None` """ # This gives type errors <https://github.com/python/typeshed/issues/4793>: # fptxt = codecs.lookup(encoding).streamwriter(fp, errors='xmlcharrefreplace') # print('<?xml version="1.0" encoding={0} standalone="no"?>' # .format(quoteattr(encoding)), file=fptxt) # for s in _stream_xml(props, comment, sort_keys): # print(s, file=fptxt) fp.write( '<?xml version="1.0" encoding={0} standalone="no"?>\n'.format( quoteattr(encoding) ).encode(encoding, "xmlcharrefreplace") ) for s in _stream_xml(props, comment, sort_keys): fp.write((s + "\n").encode(encoding, "xmlcharrefreplace")) def dumps_xml( props: Mapping[str, str] | Iterable[tuple[str, str]], comment: Optional[str] = None, sort_keys: bool = False, ) -> str: """ Convert a series ``props`` of key-value pairs to a `str` containing an XML properties document. The document will include a doctype declaration but not an XML declaration. :param props: A mapping or iterable of ``(key, value)`` pairs to serialize. All keys and values in ``props`` must be `str` values. If ``sort_keys`` is `False`, the entries are output in iteration order. :param Optional[str] comment: if non-`None`, ``comment`` will be output as a ``<comment>`` element before the ``<entry>`` elements :param bool sort_keys: if true, the elements of ``props`` are sorted lexicographically by key in the output :rtype: str """ return "".join(s + "\n" for s in _stream_xml(props, comment, sort_keys)) def _stream_xml( props: Mapping[str, str] | Iterable[tuple[str, str]], comment: Optional[str] = None, sort_keys: bool = False, ) -> Iterator[str]: yield '<!DOCTYPE properties SYSTEM "http://java.sun.com/dtd/properties.dtd">' yield "<properties>" if comment is not None: yield "<comment>" + escape(comment) + "</comment>" for k, v in itemize(props, sort_keys=sort_keys): yield "<entry key={0}>{1}</entry>".format(quoteattr(k), escape(v)) yield "</properties>"
{ "content_hash": "e17952252587e719343fd2d956fabba2", "timestamp": "", "source": "github", "line_count": 184, "max_line_length": 87, "avg_line_length": 40.22826086956522, "alnum_prop": 0.6590110780870035, "repo_name": "jwodder/javaproperties", "id": "17f6eda5c6f760cac59eaa86193cde52ae558d86", "size": "7402", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/javaproperties/xmlprops.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "178800" } ], "symlink_target": "" }
import sys, paramiko, time, re, signal, os, getpass, argparse from subprocess import Popen, PIPE from ethip import ethip def stop(signal, frame): print exit(1) def host_exec(command, channel): if channel.recv_ready(): channel.recv(2048) channel.send('%s\n' % command) result = '' while not prompt in result: time.sleep(.1) if channel.recv_ready(): result += channel.recv(2048) return '\n'.join(result.split('\n')[1:-1]).strip() if __name__ == "__main__": parser_epilog = ("Example:\n\n" "%s root 10.5.42.3 10.5.42.255 10.5.45.255\n\n" "This command will establish an ssh session with the host 10.5.42.3 with the username root, then search for vmware VMs running on the host and search for their ip addresses via mac address in the subnets 10.5.42.x and 10.5.45.x. It then prompts to connect to one of the VMs. It attempts to start the VM if not running." % sys.argv[0]) parser = argparse.ArgumentParser(description="Remotely start vmware virtual machines and locate their ip address", epilog=parser_epilog, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("username", help="The username for logging into the vm host") parser.add_argument("hostname", help="The hostname or ip address of the vm host") parser.add_argument("subnets", nargs='+', help="The subnets to search for the VMs ip addresses") parser.add_argument("--searchpath", help="Specify the search path for vmx files, default is the signed in user's homedir", action="append") args = parser.parse_args() user = args.username host = args.hostname subnets = args.subnets searchpaths = ['$HOME'] if args.searchpath: searchpaths = args.searchpath signal.signal(signal.SIGINT, stop) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(host, username=user, password=getpass.getpass('Password: ')) channel = ssh.invoke_shell() time.sleep(.3) channel.recv(2048) channel.send('PS1=\'PROMPT>>\'\n') time.sleep(.3) channel.recv(2048) prompt = 'PROMPT>>' ether = {} for path in searchpaths: vmxs = host_exec('find %s -name *.vmx' % path, channel).split('\n') for vmx in vmxs: try: mac = host_exec('cat "%s" | grep "ethernet0.generatedAddress ="' % vmx.strip(), channel).strip().split()[-1][1:-1] ether[mac.lower()] = [vmx.strip().split('/')[-1].strip('.vmx'), None, vmx.strip()] except: continue ips = [(ether[key] + [key]) for key in ether.iterkeys()] hostname = host_exec('hostname', channel).strip().split('.')[0] print "\n\t0: %s%s" % (("%s (VMWare host)" % hostname).ljust(40), host) count = 1 for machine in ips: print "\t%d: %s%s" % (count, machine[0].ljust(40), machine[1] or machine[3]) count += 1 try: selection = int(raw_input('\nWhich machine would you like to connect to? '))-1 ips[selection] except: exit(0) # Connect to the host machine? if selection == -1: print '# ssh %s@%s' % (user, host) Popen(['/usr/bin/ssh', '%s@%s' % (user, host)]).wait() exit(0) # Make sure the machine is on print 'Turning on the machine...' error = host_exec('vmrun start "%s" nogui' % ips[selection][2], channel) if error and 'The file is already in use' not in error: print error if not ips[selection][1]: print 'Discovering IP address...' for subnet in subnets: resp = ethip.getip(ips[selection][3], subnet) if resp: ips[selection][1] = resp break if ips[selection][1]: print '# ssh %s@%s' % (user, ips[selection][1]) Popen(['/usr/bin/ssh', '%s@%s' % (user, ips[selection][1])]).wait() choice = raw_input('\nSuspend the machine (y/N)? ') + ' ' if choice.lower()[0] == 'y': print 'Suspending the machine...' host_exec('nohup vmrun suspend "%s" &' % ips[selection][2], channel) else: print 'IP address could not be found'
{ "content_hash": "07ae6dc232efc90db1e8585caaaaebe4", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 338, "avg_line_length": 40.27884615384615, "alnum_prop": 0.6068274051086178, "repo_name": "DavidMulder/vmsh", "id": "03879ece1cf901ebf8452c27f497e8565b544bc3", "size": "4207", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vmsh.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "4207" } ], "symlink_target": "" }
""" Processing tools for main content. """ from urllib.parse import urlunsplit, urlparse from collections import namedtuple from bs4 import BeautifulSoup # Compatible with tuples form urllib.parse URL = namedtuple('URL', 'scheme netloc path query fragment') URL.__new__.__defaults__ = ('http', '', '', '', '', '') def clean_discourse_html(html, base_url): base_url_parts = urlparse(base_url) soup = BeautifulSoup(html, 'html.parser') for link_tag in soup.find_all('a'): try: link_tag['href'] = make_absolute_link(link_tag['href'], base_url_parts) except KeyError: print('Missing href key in HTML cleanup; skipping') for link_tag in soup.find_all('img'): try: link_tag['src'] = make_absolute_link(link_tag['src'], base_url_parts) except KeyError: print('Missing src key in HTML cleanup; skipping') return soup.prettify() def make_absolute_link(discourse_link, base_url_parts): """Make any link from Discourse into an Absolute URL. Discourse provides link with a '//' prefix (e.g. //community.lsst.org/path) or relative, e.g. (/users/jsick). """ url_parts = urlparse(discourse_link, scheme='http') # reconstruct URL if len(url_parts.netloc) == 0: # relative (local) url; add the scheme and netloc return urlunsplit(URL(scheme=url_parts.scheme, netloc=base_url_parts.netloc, path=url_parts.path)) else: # treat as a global URL # this roundtrip process ensures the scheme is present return urlunsplit(URL(scheme=url_parts.scheme, netloc=url_parts.netloc, path=url_parts.path))
{ "content_hash": "14856f72a47771904f8c2cc9a287b9c4", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 67, "avg_line_length": 34.10909090909091, "alnum_prop": 0.5772921108742004, "repo_name": "lsst-sqre/community_mailbot", "id": "efaa59ae843840c840521fa590c6fef309ab2ecd", "size": "1894", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "community_mailbot/contentpipe.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "22249" } ], "symlink_target": "" }
from tastypie import fields from spa.api.v1.BaseResource import BaseResource from spa.models.release import ReleaseAudio class ReleaseAudioResource(BaseResource): release = fields.ToOneField('spa.api.v1.ReleaseResource.ReleaseResource', 'release') class Meta: queryset = ReleaseAudio.objects.all() resource_name = 'audio' filtering = { "release": ('exact',), } def dehydrate(self, bundle): bundle.data['waveform_url'] = bundle.obj.get_waveform_url() return bundle
{ "content_hash": "60016a7aa064dd011af3bd96f4cc847a", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 88, "avg_line_length": 32.8235294117647, "alnum_prop": 0.6523297491039427, "repo_name": "fergalmoran/dss", "id": "55f25abbaba790dacfee798acb0cae7dee627944", "size": "558", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spa/api/v1/ReleaseAudioResource.py", "mode": "33261", "license": "bsd-2-clause", "language": [ { "name": "CSS", "bytes": "1335630" }, { "name": "CoffeeScript", "bytes": "91082" }, { "name": "JavaScript", "bytes": "3576558" }, { "name": "Python", "bytes": "1543569" } ], "symlink_target": "" }
from unittest import TestCase, mock from airflow.providers.google.marketing_platform.sensors.search_ads import GoogleSearchAdsReportSensor API_VERSION = "api_version" GCP_CONN_ID = "google_cloud_default" class TestSearchAdsReportSensor(TestCase): @mock.patch( "airflow.providers.google.marketing_platform.sensors." "search_ads.GoogleSearchAdsHook" ) @mock.patch( "airflow.providers.google.marketing_platform.sensors." "search_ads.BaseSensorOperator" ) def test_poke(self, mock_base_op, hook_mock): report_id = "REPORT_ID" op = GoogleSearchAdsReportSensor( report_id=report_id, api_version=API_VERSION, task_id="test_task" ) op.poke(context=None) hook_mock.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, delegate_to=None, api_version=API_VERSION ) hook_mock.return_value.get.assert_called_once_with(report_id=report_id)
{ "content_hash": "495facd3ac7ceef3aedd2ddaa14f65c1", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 102, "avg_line_length": 35.48148148148148, "alnum_prop": 0.6837160751565762, "repo_name": "mtagle/airflow", "id": "898b8945023e8d42e936fe16d8ae7bd15051129e", "size": "1745", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "tests/providers/google/marketing_platform/sensors/test_search_ads.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "13715" }, { "name": "Dockerfile", "bytes": "17280" }, { "name": "HTML", "bytes": "148492" }, { "name": "JavaScript", "bytes": "25360" }, { "name": "Jupyter Notebook", "bytes": "2933" }, { "name": "Mako", "bytes": "1339" }, { "name": "Python", "bytes": "10006634" }, { "name": "Shell", "bytes": "217011" }, { "name": "TSQL", "bytes": "879" } ], "symlink_target": "" }
""" (c) Copyright 2016 Hewlett-Packard Enterprise Development Company, L.P. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import sys import falcon from oslo_config import cfg from paste import deploy from paste import urlmap import pkg_resources from freezer_api.cmd import api from freezer_api.common import config CONF = cfg.CONF # Define the minimum version of falcon at which we can use the "new" invocation # style for middleware (aka v1), i.e. the "middleware" named argument for # falcon.API. FALCON_MINVERSION_MIDDLEWARE = pkg_resources.parse_version('0.2.0b1') def root_app_factory(loader, global_conf, **local_conf): """Allows freezer to launch multiple applications at a time. It will allow freezer to manage multiple versions. """ if not CONF.enable_v1_api and '/v1' in local_conf: del local_conf['/v1'] if not CONF.enable_v2_api and '/v2' in local_conf: del local_conf['/v2'] return urlmap.urlmap_factory(loader, global_conf, **local_conf) def freezer_appv1_factory(global_conf, **local_conf): current_version = pkg_resources.parse_version( falcon.__version__ if hasattr(falcon, '__version__') else falcon.version) # Check the currently installed version of falcon in order to invoke it # correctly. if current_version < FALCON_MINVERSION_MIDDLEWARE: return api.build_app_v0() else: return api.build_app_v1() def freezer_appv2_factory(global_conf, **local_conf): return api.build_app_v2() def initialize_app(conf=None, name='main'): """ initializing app for paste to deploy it """ # register and parse arguments config.parse_args(args=sys.argv[1:]) # register logging opts config.setup_logging() # locate and load paste file conf = config.find_paste_config() app = deploy.loadapp('config:%s' % conf, name=name) return app
{ "content_hash": "d7929aea017143c350dc517e2ce18b21", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 79, "avg_line_length": 32.794520547945204, "alnum_prop": 0.70843776106934, "repo_name": "szaher/freezer-api", "id": "95df53deaa05af5bf832a8d6708fe0b7e393cc66", "size": "2394", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "freezer_api/service.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "1942" }, { "name": "PHP", "bytes": "20318" }, { "name": "Python", "bytes": "424120" }, { "name": "Shell", "bytes": "13072" } ], "symlink_target": "" }
import unittest from unittest import mock from template import * class TestTemplate(unittest.TestCase): def test_create_concret_class(self): obj = FireFighter() self.assertIsInstance(obj,FireFighter) obj = Lumberjack() self.assertIsInstance(obj,Lumberjack) obj = Postman() self.assertIsInstance(obj,Postman) obj = Manager() self.assertIsInstance(obj,Manager) def test_FireFighter(self): obj = FireFighter() salidaEsperada = ['Despertando','Comiendo el desayuno','Yendo al trabajo', 'Soy un bombero','Regresando del trabajo','Momento chill','Mimir'] with mock.patch('sys.stdout') as fake_stdout: obj.DailyToutine() fake_stdout.assert_has_calls([mock.call.write(salidaEsperada[0]), mock.call.write('\n'),mock.call.write(salidaEsperada[1]),mock.call.write('\n'), mock.call.write(salidaEsperada[2]),mock.call.write('\n'), mock.call.write(salidaEsperada[3]),mock.call.write('\n'), mock.call.write(salidaEsperada[4]),mock.call.write('\n'), mock.call.write(salidaEsperada[5]),mock.call.write('\n'), mock.call.write(salidaEsperada[6]),mock.call.write('\n')]) def test_Lumberjack(self): obj = Lumberjack() salidaEsperada = ['Despertando','Comiendo el desayuno','Yendo al trabajo', 'Soy un leñador','Regresando del trabajo','Momento chill','Mimir'] with mock.patch('sys.stdout') as fake_stdout: obj.DailyToutine() fake_stdout.assert_has_calls([mock.call.write(salidaEsperada[0]), mock.call.write('\n'),mock.call.write(salidaEsperada[1]),mock.call.write('\n'), mock.call.write(salidaEsperada[2]),mock.call.write('\n'), mock.call.write(salidaEsperada[3]),mock.call.write('\n'), mock.call.write(salidaEsperada[4]),mock.call.write('\n'), mock.call.write(salidaEsperada[5]),mock.call.write('\n'), mock.call.write(salidaEsperada[6]),mock.call.write('\n')]) def test_Postman(self): obj = Postman() salidaEsperada = ['Despertando','Comiendo el desayuno','Yendo al trabajo', 'Soy un cartero','Regresando del trabajo','Momento chill','Mimir'] with mock.patch('sys.stdout') as fake_stdout: obj.DailyToutine() fake_stdout.assert_has_calls([mock.call.write(salidaEsperada[0]), mock.call.write('\n'),mock.call.write(salidaEsperada[1]),mock.call.write('\n'), mock.call.write(salidaEsperada[2]),mock.call.write('\n'), mock.call.write(salidaEsperada[3]),mock.call.write('\n'), mock.call.write(salidaEsperada[4]),mock.call.write('\n'), mock.call.write(salidaEsperada[5]),mock.call.write('\n'), mock.call.write(salidaEsperada[6]),mock.call.write('\n')]) def test_Manager(self): obj = Manager() salidaEsperada = ['Despertando','Comiendo el desayuno','Yendo al trabajo', 'Soy un gerente','Regresando del trabajo','Viendo una serie','Mimir'] with mock.patch('sys.stdout') as fake_stdout: obj.DailyToutine() fake_stdout.assert_has_calls([mock.call.write(salidaEsperada[0]), mock.call.write('\n'),mock.call.write(salidaEsperada[1]),mock.call.write('\n'), mock.call.write(salidaEsperada[2]),mock.call.write('\n'), mock.call.write(salidaEsperada[3]),mock.call.write('\n'), mock.call.write(salidaEsperada[4]),mock.call.write('\n'), mock.call.write(salidaEsperada[5]),mock.call.write('\n'), mock.call.write(salidaEsperada[6]),mock.call.write('\n')]) if __name__ == '__main__': unittest.main()
{ "content_hash": "b12c91d388c3fe81ee68990cedcc42b5", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 87, "avg_line_length": 45.28395061728395, "alnum_prop": 0.6357688113413305, "repo_name": "AnhellO/DAS_Sistemas", "id": "c700f379b1c1bc9d963b46d0e6be1398efea9ed1", "size": "3669", "binary": false, "copies": "1", "ref": "refs/heads/ene-jun-2022", "path": "Ene-Jun-2022/juan-alejandro-calzoncit-rodriguez/Practica-6/template_test.py", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "8515" }, { "name": "Go", "bytes": "25845" }, { "name": "HTML", "bytes": "36671" }, { "name": "Python", "bytes": "716604" } ], "symlink_target": "" }
from sklearn2sql_heroku.tests.regression import generic as reg_gen reg_gen.test_model("LGBMRegressor" , "RandomReg_10" , "duckdb")
{ "content_hash": "eed55b0ffb8dd24eb557b731f1c69d67", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 66, "avg_line_length": 33.25, "alnum_prop": 0.7669172932330827, "repo_name": "antoinecarme/sklearn2sql_heroku", "id": "df86cd2215559cf75e39c570ca7c5cdbce1cdefc", "size": "133", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/regression/RandomReg_10/ws_RandomReg_10_LGBMRegressor_duckdb_code_gen.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "507043" }, { "name": "Procfile", "bytes": "37" }, { "name": "Python", "bytes": "1021137" }, { "name": "R", "bytes": "2521" } ], "symlink_target": "" }
"""The task-based multi-process processing analysis engine.""" import collections import os import time from plaso.containers import counts from plaso.containers import events from plaso.containers import reports from plaso.containers import tasks from plaso.engine import processing_status from plaso.lib import definitions from plaso.lib import errors from plaso.multi_process import analysis_process from plaso.multi_process import logger from plaso.multi_process import merge_helpers from plaso.multi_process import plaso_queue from plaso.multi_process import task_engine from plaso.multi_process import zeromq_queue class AnalysisMultiProcessEngine(task_engine.TaskMultiProcessEngine): """Task-based multi-process analysis engine. This class contains functionality to: * monitor and manage analysis tasks; * merge results returned by analysis worker processes. """ # pylint: disable=abstract-method _CONTAINER_TYPE_ANALYSIS_REPORT = reports.AnalysisReport.CONTAINER_TYPE _CONTAINER_TYPE_EVENT_TAG = events.EventTag.CONTAINER_TYPE _PROCESS_JOIN_TIMEOUT = 5.0 _QUEUE_TIMEOUT = 10 * 60 def __init__(self, worker_memory_limit=None, worker_timeout=None): """Initializes a task-based multi-process analysis engine. Args: worker_memory_limit (Optional[int]): maximum amount of memory a worker is allowed to consume, where None represents the default memory limit and 0 represents no limit. worker_timeout (Optional[float]): number of minutes before a worker process that is not providing status updates is considered inactive, where None or 0.0 represents the default timeout. """ if worker_memory_limit is None: worker_memory_limit = definitions.DEFAULT_WORKER_MEMORY_LIMIT super(AnalysisMultiProcessEngine, self).__init__() self._analysis_plugins = {} self._completed_analysis_processes = set() self._data_location = None self._event_filter_expression = None self._event_labels_counter = None self._event_queues = {} self._events_status = processing_status.EventsStatus() self._knowledge_base = None self._memory_profiler = None self._merge_task = None self._number_of_consumed_analysis_reports = 0 self._number_of_consumed_events = 0 self._number_of_consumed_event_tags = 0 self._number_of_consumed_sources = 0 self._number_of_produced_analysis_reports = 0 self._number_of_produced_events = 0 self._number_of_produced_event_tags = 0 self._number_of_produced_sources = 0 self._processing_profiler = None self._serializers_profiler = None self._session = None self._status = definitions.STATUS_INDICATOR_IDLE self._status_update_callback = None self._worker_memory_limit = worker_memory_limit self._worker_timeout = worker_timeout or definitions.DEFAULT_WORKER_TIMEOUT def _AnalyzeEvents(self, storage_writer, analysis_plugins, event_filter=None): """Analyzes events in a Plaso storage. Args: storage_writer (StorageWriter): storage writer. analysis_plugins (dict[str, AnalysisPlugin]): analysis plugins that should be run and their names. event_filter (Optional[EventObjectFilter]): event filter. Returns: collections.Counter: counter containing information about the events processed and filtered. Raises: RuntimeError: if a non-recoverable situation is encountered. """ self._status = definitions.STATUS_INDICATOR_RUNNING self._number_of_consumed_analysis_reports = 0 self._number_of_consumed_events = 0 self._number_of_consumed_event_tags = 0 self._number_of_consumed_sources = 0 self._number_of_produced_analysis_reports = 0 self._number_of_produced_events = 0 self._number_of_produced_event_tags = 0 self._number_of_produced_sources = 0 number_of_filtered_events = 0 logger.debug('Processing events.') filter_limit = getattr(event_filter, 'limit', None) for event in storage_writer.GetSortedEvents(): event_data_identifier = event.GetEventDataIdentifier() event_data = storage_writer.GetAttributeContainerByIdentifier( events.EventData.CONTAINER_TYPE, event_data_identifier) event_data_stream_identifier = event_data.GetEventDataStreamIdentifier() if event_data_stream_identifier: event_data_stream = storage_writer.GetAttributeContainerByIdentifier( events.EventDataStream.CONTAINER_TYPE, event_data_stream_identifier) else: event_data_stream = None event_identifier = event.GetIdentifier() event_tag = storage_writer.GetEventTagByEventIdentifer(event_identifier) if event_filter: filter_match = event_filter.Match( event, event_data, event_data_stream, event_tag) else: filter_match = None # pylint: disable=singleton-comparison if filter_match == False: number_of_filtered_events += 1 continue for event_queue in self._event_queues.values(): # TODO: Check for premature exit of analysis plugins. event_queue.PushItem((event, event_data, event_data_stream)) self._number_of_consumed_events += 1 if (event_filter and filter_limit and filter_limit == self._number_of_consumed_events): break logger.debug('Finished pushing events to analysis plugins.') # Signal that we have finished adding events. for event_queue in self._event_queues.values(): event_queue.PushItem(plaso_queue.QueueAbort(), block=False) logger.debug('Processing analysis plugin results.') # TODO: use a task based approach. plugin_names = list(analysis_plugins.keys()) while plugin_names: for plugin_name in list(plugin_names): if self._abort: break # TODO: temporary solution. task = tasks.Task() task.storage_format = definitions.STORAGE_FORMAT_SQLITE task.identifier = plugin_name merge_ready = self._CheckTaskReadyForMerge( definitions.STORAGE_FORMAT_SQLITE, task) if merge_ready: self._PrepareMergeTaskStorage(definitions.STORAGE_FORMAT_SQLITE, task) self._status = definitions.STATUS_INDICATOR_MERGING event_queue = self._event_queues[plugin_name] del self._event_queues[plugin_name] event_queue.Close() task_storage_reader = self._GetMergeTaskStorage( definitions.STORAGE_FORMAT_SQLITE, task) try: merge_helper = merge_helpers.AnalysisTaskMergeHelper( task_storage_reader, task.identifier) logger.debug('Starting merge of task: {0:s}'.format( merge_helper.task_identifier)) number_of_containers = self._MergeAttributeContainers( storage_writer, merge_helper) logger.debug('Merged {0:d} containers of task: {1:s}'.format( number_of_containers, merge_helper.task_identifier)) finally: task_storage_reader.Close() self._RemoveMergeTaskStorage( definitions.STORAGE_FORMAT_SQLITE, task) self._status = definitions.STATUS_INDICATOR_RUNNING # TODO: temporary solution. plugin_names.remove(plugin_name) events_counter = collections.Counter() events_counter['Events filtered'] = number_of_filtered_events events_counter['Events processed'] = self._number_of_consumed_events return events_counter def _CheckStatusAnalysisProcess(self, pid): """Checks the status of an analysis process. Args: pid (int): process ID (PID) of a registered analysis process. Raises: KeyError: if the process is not registered with the engine. """ # TODO: Refactor this method, simplify and separate concerns (monitoring # vs management). self._RaiseIfNotRegistered(pid) if pid in self._completed_analysis_processes: status_indicator = definitions.STATUS_INDICATOR_COMPLETED process_status = { 'processing_status': status_indicator} used_memory = 0 else: process = self._processes_per_pid[pid] process_status = self._QueryProcessStatus(process) if process_status is None: process_is_alive = False else: process_is_alive = True process_information = self._process_information_per_pid[pid] used_memory = process_information.GetUsedMemory() or 0 if self._worker_memory_limit and used_memory > self._worker_memory_limit: logger.warning(( 'Process: {0:s} (PID: {1:d}) killed because it exceeded the ' 'memory limit: {2:d}.').format( process.name, pid, self._worker_memory_limit)) self._KillProcess(pid) if isinstance(process_status, dict): self._rpc_errors_per_pid[pid] = 0 status_indicator = process_status.get('processing_status', None) if status_indicator == definitions.STATUS_INDICATOR_COMPLETED: self._completed_analysis_processes.add(pid) else: rpc_errors = self._rpc_errors_per_pid.get(pid, 0) + 1 self._rpc_errors_per_pid[pid] = rpc_errors if rpc_errors > self._MAXIMUM_RPC_ERRORS: process_is_alive = False if process_is_alive: rpc_port = process.rpc_port.value logger.warning(( 'Unable to retrieve process: {0:s} (PID: {1:d}) status via ' 'RPC socket: http://localhost:{2:d}').format( process.name, pid, rpc_port)) processing_status_string = 'RPC error' status_indicator = definitions.STATUS_INDICATOR_RUNNING else: processing_status_string = 'killed' status_indicator = definitions.STATUS_INDICATOR_KILLED process_status = { 'processing_status': processing_status_string} self._UpdateProcessingStatus(pid, process_status, used_memory) if status_indicator in definitions.ERROR_STATUS_INDICATORS: logger.error(( 'Process {0:s} (PID: {1:d}) is not functioning correctly. ' 'Status code: {2!s}.').format( process.name, pid, status_indicator)) self._TerminateProcessByPid(pid) def _MergeAttributeContainers(self, storage_writer, merge_helper): """Merges attribute containers from a task store into the storage writer. Args: storage_writer (StorageWriter): storage writer. merge_helper (AnalysisTaskMergeHelper): helper to merge attribute containers. Returns: int: number of containers merged. """ number_of_containers = 0 container = merge_helper.GetAttributeContainer() while container: number_of_containers += 1 if container.CONTAINER_TYPE == self._CONTAINER_TYPE_EVENT_TAG: storage_writer.AddOrUpdateEventTag(container) else: storage_writer.AddAttributeContainer(container) if container.CONTAINER_TYPE == self._CONTAINER_TYPE_ANALYSIS_REPORT: self._number_of_produced_analysis_reports += 1 elif container.CONTAINER_TYPE == self._CONTAINER_TYPE_EVENT_TAG: self._number_of_produced_event_tags += 1 for label in container.labels: self._event_labels_counter[label] += 1 self._event_labels_counter['total'] += 1 container = merge_helper.GetAttributeContainer() return number_of_containers def _StartAnalysisProcesses(self, analysis_plugins): """Starts the analysis processes. Args: analysis_plugins (dict[str, AnalysisPlugin]): analysis plugins that should be run and their names. """ logger.info('Starting analysis plugins.') for analysis_plugin in analysis_plugins.values(): self._analysis_plugins[analysis_plugin.NAME] = analysis_plugin process = self._StartWorkerProcess(analysis_plugin.NAME) if not process: logger.error('Unable to create analysis process: {0:s}'.format( analysis_plugin.NAME)) logger.info('Analysis plugins running') def _StartWorkerProcess(self, process_name): """Creates, starts, monitors and registers a worker process. Args: process_name (str): process name. Returns: MultiProcessWorkerProcess: extraction worker process or None on error. """ analysis_plugin = self._analysis_plugins.get(process_name, None) if not analysis_plugin: logger.error('Missing analysis plugin: {0:s}'.format(process_name)) return None queue_name = '{0:s} output event queue'.format(process_name) output_event_queue = zeromq_queue.ZeroMQPushBindQueue( name=queue_name, timeout_seconds=self._QUEUE_TIMEOUT) # Open the queue so it can bind to a random port, and we can get the # port number to use in the input queue. output_event_queue.Open() self._event_queues[process_name] = output_event_queue queue_name = '{0:s} input event queue'.format(process_name) input_event_queue = zeromq_queue.ZeroMQPullConnectQueue( name=queue_name, delay_open=True, port=output_event_queue.port, timeout_seconds=self._QUEUE_TIMEOUT) process = analysis_process.AnalysisProcess( input_event_queue, self._knowledge_base, self._session, analysis_plugin, self._processing_configuration, data_location=self._data_location, event_filter_expression=self._event_filter_expression, name=process_name) process.start() logger.info('Started analysis plugin: {0:s} (PID: {1:d}).'.format( process_name, process.pid)) try: self._StartMonitoringProcess(process) except (IOError, KeyError) as exception: logger.error(( 'Unable to monitor analysis plugin: {0:s} (PID: {1:d}) ' 'with error: {2!s}').format(process_name, process.pid, exception)) process.terminate() return None self._RegisterProcess(process) return process def _StopAnalysisProcesses(self, abort=False): """Stops the analysis processes. Args: abort (bool): True to indicated the stop is issued on abort. """ logger.debug('Stopping analysis processes.') self._StopMonitoringProcesses() if abort: # Signal all the processes to abort. self._AbortTerminate() # Wake the processes to make sure that they are not blocking # waiting for the queue new items. for event_queue in self._event_queues.values(): event_queue.PushItem(plaso_queue.QueueAbort(), block=False) # Try waiting for the processes to exit normally. self._AbortJoin(timeout=self._PROCESS_JOIN_TIMEOUT) for event_queue in self._event_queues.values(): event_queue.Close(abort=abort) if abort: # Kill any remaining processes. self._AbortKill() else: # Check if the processes are still alive and terminate them if necessary. self._AbortTerminate() self._AbortJoin(timeout=self._PROCESS_JOIN_TIMEOUT) for event_queue in self._event_queues.values(): event_queue.Close(abort=True) def _UpdateForemanProcessStatus(self): """Update the foreman process status.""" used_memory = self._process_information.GetUsedMemory() or 0 display_name = getattr(self._merge_task, 'identifier', '') self._processing_status.UpdateForemanStatus( self._name, self._status, self._pid, used_memory, display_name, self._number_of_consumed_sources, self._number_of_produced_sources, self._number_of_consumed_events, self._number_of_produced_events, self._number_of_consumed_event_tags, self._number_of_produced_event_tags, self._number_of_consumed_analysis_reports, self._number_of_produced_analysis_reports) self._processing_status.UpdateEventsStatus(self._events_status) def _UpdateProcessingStatus(self, pid, process_status, used_memory): """Updates the processing status. Args: pid (int): process identifier (PID) of the worker process. process_status (dict[str, object]): status values received from the worker process. used_memory (int): size of used memory in bytes. Raises: KeyError: if the process is not registered with the engine. """ self._RaiseIfNotRegistered(pid) if not process_status: return process = self._processes_per_pid[pid] status_indicator = process_status.get('processing_status', None) self._RaiseIfNotMonitored(pid) display_name = process_status.get('display_name', '') number_of_consumed_event_tags = process_status.get( 'number_of_consumed_event_tags', None) number_of_produced_event_tags = process_status.get( 'number_of_produced_event_tags', None) number_of_consumed_events = process_status.get( 'number_of_consumed_events', None) number_of_produced_events = process_status.get( 'number_of_produced_events', None) number_of_consumed_reports = process_status.get( 'number_of_consumed_reports', None) number_of_produced_reports = process_status.get( 'number_of_produced_reports', None) number_of_consumed_sources = process_status.get( 'number_of_consumed_sources', None) number_of_produced_sources = process_status.get( 'number_of_produced_sources', None) if status_indicator != definitions.STATUS_INDICATOR_IDLE: last_activity_timestamp = process_status.get( 'last_activity_timestamp', 0.0) if last_activity_timestamp: last_activity_timestamp += self._worker_timeout current_timestamp = time.time() if current_timestamp > last_activity_timestamp: logger.error(( 'Process {0:s} (PID: {1:d}) has not reported activity within ' 'the timeout period.').format(process.name, pid)) status_indicator = definitions.STATUS_INDICATOR_NOT_RESPONDING self._processing_status.UpdateWorkerStatus( process.name, status_indicator, pid, used_memory, display_name, number_of_consumed_sources, number_of_produced_sources, number_of_consumed_events, number_of_produced_events, number_of_consumed_event_tags, number_of_produced_event_tags, number_of_consumed_reports, number_of_produced_reports) def _UpdateStatus(self): """Update the status.""" # Make a local copy of the PIDs in case the dict is changed by # the main thread. for pid in list(self._process_information_per_pid.keys()): self._CheckStatusAnalysisProcess(pid) self._UpdateForemanProcessStatus() if self._status_update_callback: self._status_update_callback(self._processing_status) # pylint: disable=too-many-arguments def AnalyzeEvents( self, session, knowledge_base_object, storage_writer, data_location, analysis_plugins, processing_configuration, event_filter=None, event_filter_expression=None, status_update_callback=None, storage_file_path=None): """Analyzes events in a Plaso storage. Args: session (Session): session in which the events are analyzed. knowledge_base_object (KnowledgeBase): contains information from the source data needed for processing. storage_writer (StorageWriter): storage writer. data_location (str): path to the location that data files should be loaded from. analysis_plugins (dict[str, AnalysisPlugin]): analysis plugins that should be run and their names. processing_configuration (ProcessingConfiguration): processing configuration. event_filter (Optional[EventObjectFilter]): event filter. event_filter_expression (Optional[str]): event filter expression. status_update_callback (Optional[function]): callback function for status updates. storage_file_path (Optional[str]): path to the session storage file. Returns: ProcessingStatus: processing status. Raises: KeyboardInterrupt: if a keyboard interrupt was raised. ValueError: if analysis plugins are missing. """ if not analysis_plugins: raise ValueError('Missing analysis plugins') abort_kill = False keyboard_interrupt = False queue_full = False self._analysis_plugins = {} self._data_location = data_location self._event_filter_expression = event_filter_expression self._events_status = processing_status.EventsStatus() self._knowledge_base = knowledge_base_object self._processing_configuration = processing_configuration self._session = session self._status_update_callback = status_update_callback self._storage_file_path = storage_file_path stored_event_labels_counter = {} if storage_writer.HasAttributeContainers('event_label_count'): stored_event_labels_counter = { event_label_count.label: event_label_count for event_label_count in storage_writer.GetAttributeContainers( 'event_label_count')} self._event_labels_counter = collections.Counter() if storage_writer.HasAttributeContainers('parser_count'): parsers_counter = { parser_count.name: parser_count.number_of_events for parser_count in storage_writer.GetAttributeContainers( 'parser_count')} total_number_of_events = parsers_counter['total'] else: total_number_of_events = 0 for stored_session in storage_writer.GetSessions(): total_number_of_events += stored_session.parsers_counter['total'] self._events_status.total_number_of_events = total_number_of_events # Set up the storage writer before the analysis processes. self._StartTaskStorage(definitions.STORAGE_FORMAT_SQLITE) self._StartAnalysisProcesses(analysis_plugins) self._StartProfiling(self._processing_configuration.profiling) # Start the status update thread after open of the storage writer # so we don't have to clean up the thread if the open fails. self._StartStatusUpdateThread() try: self._AnalyzeEvents( storage_writer, analysis_plugins, event_filter=event_filter) for key, value in self._event_labels_counter.items(): event_label_count = stored_event_labels_counter.get(key, None) if event_label_count: event_label_count.number_of_events += value storage_writer.UpdateAttributeContainer(event_label_count) else: event_label_count = counts.EventLabelCount( label=key, number_of_events=value) storage_writer.AddAttributeContainer(event_label_count) self._status = definitions.STATUS_INDICATOR_FINALIZING except errors.QueueFull: queue_full = True self._abort = True except KeyboardInterrupt: keyboard_interrupt = True self._abort = True finally: self._processing_status.aborted = self._abort session.aborted = self._abort # Stop the status update thread after close of the storage writer # so we include the storage sync to disk in the status updates. self._StopStatusUpdateThread() self._StopProfiling() # Update the status view one last time before the analysis processses are # stopped. self._UpdateStatus() if queue_full: # TODO: handle abort on queue full more elegant. abort_kill = True else: try: self._StopAnalysisProcesses(abort=self._abort) except KeyboardInterrupt: keyboard_interrupt = True abort_kill = True if abort_kill: self._AbortKill() # The abort can leave the main process unresponsive # due to incorrectly finalized IPC. self._KillProcess(os.getpid()) try: self._StopTaskStorage( definitions.STORAGE_FORMAT_SQLITE, session.identifier, abort=self._abort) except (IOError, OSError) as exception: logger.error('Unable to stop task storage with error: {0!s}'.format( exception)) if self._abort: logger.debug('Analysis aborted.') else: logger.debug('Analysis completed.') # Update the status view one last time. self._UpdateStatus() # Reset values. self._analysis_plugins = {} self._data_location = None self._event_filter_expression = None self._knowledge_base = None self._processing_configuration = None self._session = None self._status_update_callback = None self._storage_file_path = None if keyboard_interrupt: raise KeyboardInterrupt return self._processing_status
{ "content_hash": "6946dbe31a6fa0802a85547570fd3fc4", "timestamp": "", "source": "github", "line_count": 695, "max_line_length": 80, "avg_line_length": 35.35107913669065, "alnum_prop": 0.6785786967316537, "repo_name": "log2timeline/plaso", "id": "f83e24ed622b928622db7d84ac2146657a4fbc52", "size": "24593", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "plaso/multi_process/analysis_engine.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "4301" }, { "name": "Makefile", "bytes": "122" }, { "name": "PowerShell", "bytes": "1305" }, { "name": "Python", "bytes": "5345186" }, { "name": "Shell", "bytes": "27279" }, { "name": "YARA", "bytes": "507" } ], "symlink_target": "" }
import threading import logging import random import time from termcolor import colored from concolica import emulator from concolica import interlocked from concolica import serialisation from concolica.vulnerabilities import * import smt.bitvector as bv _log = logging.getLogger('concolica') max_threads = 24 active_threads = threading.BoundedSemaphore(max_threads) available_states = threading.Semaphore(0) exit_event = threading.Event() def run_single_threaded(initial_states, x86_64, scoring_function=None, culling_function=None): states = list(initial_states) while len(states) > 0: try: s = states.pop() ns = emulator.single_step(s, x86_64) for n in ns: if culling_function is not None: if culling_function(n): continue if scoring_function is not None: n.score = scoring_function(n) states.append(n) states.sort(key=lambda x: x.score) else: states.append(n) except StateException, v: v.state.log.vulnerability(v) yield v def run_threaded(initial_states, x86_64, scoring_function=None, culling_function=None): global active_threads global available_states states = interlocked.List(initial_states) for s in initial_states: available_states.release() def run(states): while not exit_event.is_set(): active_threads.acquire() while available_states.acquire(blocking=False): try: s = states.pop() ns = emulator.single_step(s, x86_64) for n in ns: if culling_function is not None: if culling_function(n): continue if scoring_function is not None: n.score = scoring_function(n) states.append(n) states.sort(key=lambda x: x.score) else: states.append(n) available_states.release() except StateException, v: v.state.log.vulnerability(v) v.state.log.debug('saving vuln state {}'.format(v.state.id)) serialisation.save('vuln_state_{}'.format(v.state.id), v) data = '' if isinstance(v, ArbitraryRead): v.state.solver.add(v.address == bv.Constant(v.address.size, 0xc01db33f)) s = v.state m = v.state.solver.model() for i in range(0, 0x4000): name = 'ttf_{:x}'.format(i) if name in m: data += chr(m[name].value) else: data += '#' print colored(data, 'white', 'on_red', attrs=['bold']) print data.encode('hex') with open('font_{}.ttf'.format(v.state.id), 'wb') as tmp: tmp.write(data) active_threads.release() time.sleep(1.0) workers = [] for i in range(0, max_threads): t = threading.Thread(target=run, args=(states,)) t.start() workers.append(t) all_idle_count = 0 try: while not exit_event.is_set(): time.sleep(1) # shut up pycharm I know I am a bad man idle_count = active_threads._Semaphore__value _log.debug('idle threads: {}'.format(idle_count)) if idle_count == max_threads: all_idle_count += 1 if all_idle_count == 3: exit_event.set() else: all_idle_count = 0 except KeyboardInterrupt: exit_event.set() for t in workers: t.join()
{ "content_hash": "e7e259e2514d30c622c1a37cba2a7bba", "timestamp": "", "source": "github", "line_count": 131, "max_line_length": 96, "avg_line_length": 31.061068702290076, "alnum_prop": 0.5001228803145736, "repo_name": "c01db33f/concolica", "id": "2644430f8f25b091c04a6860d1439c1003071456", "size": "4726", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "threaded.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "130307" } ], "symlink_target": "" }
import redis import random import string forSelect = string.ascii_letters + string.digits def generate_code(count, length): for x in range(count): Re = "" for y in range(length): Re += random.choice(forSelect) yield Re def save_code(): r = redis.Redis(host='127.0.0.1', port='6379', password='linyii') codes = generate_code(200, 20) p = r.pipeline() for code in codes: p.sadd('code', code) p.execute() return r.scard('code') if __name__ == '__main__': save_code()
{ "content_hash": "197234ca7f6d647211d402a6790f5181", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 69, "avg_line_length": 20.25925925925926, "alnum_prop": 0.5850091407678245, "repo_name": "Yrthgze/prueba-sourcetree2", "id": "bb6f4480a9bffb217e1a143e750b44b4199238ed", "size": "696", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Lyndon1994/0003.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "3948" }, { "name": "C++", "bytes": "5518" }, { "name": "CSS", "bytes": "3474" }, { "name": "HTML", "bytes": "1101085" }, { "name": "Java", "bytes": "141" }, { "name": "JavaScript", "bytes": "5282" }, { "name": "Jupyter Notebook", "bytes": "324817" }, { "name": "Mako", "bytes": "412" }, { "name": "Python", "bytes": "535355" } ], "symlink_target": "" }
from unittest import skipIf, skipUnless from django.contrib.gis.db.models import fields from django.contrib.gis.geos import MultiPolygon, Polygon from django.core.exceptions import ImproperlyConfigured from django.db import connection, migrations, models from django.db.migrations.migration import Migration from django.db.migrations.state import ProjectState from django.test import ( TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from ..utils import mysql, oracle, spatialite try: GeometryColumns = connection.ops.geometry_columns() HAS_GEOMETRY_COLUMNS = True except NotImplementedError: HAS_GEOMETRY_COLUMNS = False class OperationTestCase(TransactionTestCase): available_apps = ['gis_tests.gis_migrations'] def tearDown(self): # Delete table after testing if hasattr(self, 'current_state'): self.apply_operations('gis', self.current_state, [migrations.DeleteModel('Neighborhood')]) super().tearDown() @property def has_spatial_indexes(self): if mysql: with connection.cursor() as cursor: return connection.introspection.supports_spatial_index(cursor, 'gis_neighborhood') return True def get_table_description(self, table): with connection.cursor() as cursor: return connection.introspection.get_table_description(cursor, table) def assertColumnExists(self, table, column): self.assertIn(column, [c.name for c in self.get_table_description(table)]) def assertColumnNotExists(self, table, column): self.assertNotIn(column, [c.name for c in self.get_table_description(table)]) def apply_operations(self, app_label, project_state, operations): migration = Migration('name', app_label) migration.operations = operations with connection.schema_editor() as editor: return migration.apply(project_state, editor) def set_up_test_model(self, force_raster_creation=False): test_fields = [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100, unique=True)), ('geom', fields.MultiPolygonField(srid=4326)) ] if connection.features.supports_raster or force_raster_creation: test_fields += [('rast', fields.RasterField(srid=4326, null=True))] operations = [migrations.CreateModel('Neighborhood', test_fields)] self.current_state = self.apply_operations('gis', ProjectState(), operations) def assertGeometryColumnsCount(self, expected_count): self.assertEqual( GeometryColumns.objects.filter(**{ '%s__iexact' % GeometryColumns.table_name_col(): 'gis_neighborhood', }).count(), expected_count ) def assertSpatialIndexExists(self, table, column, raster=False): with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) if raster: self.assertTrue(any( 'st_convexhull(%s)' % column in c['definition'] for c in constraints.values() if c['definition'] is not None )) else: self.assertIn([column], [c['columns'] for c in constraints.values()]) def alter_gis_model(self, migration_class, model_name, field_name, blank=False, field_class=None, field_class_kwargs=None): args = [model_name, field_name] if field_class: field_class_kwargs = field_class_kwargs or {'srid': 4326, 'blank': blank} args.append(field_class(**field_class_kwargs)) operation = migration_class(*args) old_state = self.current_state.clone() operation.state_forwards('gis', self.current_state) with connection.schema_editor() as editor: operation.database_forwards('gis', editor, old_state, self.current_state) class OperationTests(OperationTestCase): def setUp(self): super().setUp() self.set_up_test_model() def test_add_geom_field(self): """ Test the AddField operation with a geometry-enabled column. """ self.alter_gis_model(migrations.AddField, 'Neighborhood', 'path', False, fields.LineStringField) self.assertColumnExists('gis_neighborhood', 'path') # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(2) # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists('gis_neighborhood', 'path') @skipUnless(HAS_GEOMETRY_COLUMNS, "Backend doesn't support GeometryColumns.") def test_geom_col_name(self): self.assertEqual( GeometryColumns.geom_col_name(), 'column_name' if oracle else 'f_geometry_column', ) @skipUnlessDBFeature('supports_raster') def test_add_raster_field(self): """ Test the AddField operation with a raster-enabled column. """ self.alter_gis_model(migrations.AddField, 'Neighborhood', 'heatmap', False, fields.RasterField) self.assertColumnExists('gis_neighborhood', 'heatmap') # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists('gis_neighborhood', 'heatmap', raster=True) def test_add_blank_geom_field(self): """ Should be able to add a GeometryField with blank=True. """ self.alter_gis_model(migrations.AddField, 'Neighborhood', 'path', True, fields.LineStringField) self.assertColumnExists('gis_neighborhood', 'path') # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(2) # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists('gis_neighborhood', 'path') @skipUnlessDBFeature('supports_raster') def test_add_blank_raster_field(self): """ Should be able to add a RasterField with blank=True. """ self.alter_gis_model(migrations.AddField, 'Neighborhood', 'heatmap', True, fields.RasterField) self.assertColumnExists('gis_neighborhood', 'heatmap') # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists('gis_neighborhood', 'heatmap', raster=True) def test_remove_geom_field(self): """ Test the RemoveField operation with a geometry-enabled column. """ self.alter_gis_model(migrations.RemoveField, 'Neighborhood', 'geom') self.assertColumnNotExists('gis_neighborhood', 'geom') # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(0) @skipUnlessDBFeature('supports_raster') def test_remove_raster_field(self): """ Test the RemoveField operation with a raster-enabled column. """ self.alter_gis_model(migrations.RemoveField, 'Neighborhood', 'rast') self.assertColumnNotExists('gis_neighborhood', 'rast') def test_create_model_spatial_index(self): if not self.has_spatial_indexes: self.skipTest('No support for Spatial indexes') self.assertSpatialIndexExists('gis_neighborhood', 'geom') if connection.features.supports_raster: self.assertSpatialIndexExists('gis_neighborhood', 'rast', raster=True) @skipUnlessDBFeature("supports_3d_storage") @skipIf(spatialite, "Django currently doesn't support altering Spatialite geometry fields") def test_alter_geom_field_dim(self): Neighborhood = self.current_state.apps.get_model('gis', 'Neighborhood') p1 = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) Neighborhood.objects.create(name='TestDim', geom=MultiPolygon(p1, p1)) # Add 3rd dimension. self.alter_gis_model( migrations.AlterField, 'Neighborhood', 'geom', False, fields.MultiPolygonField, field_class_kwargs={'srid': 4326, 'dim': 3} ) self.assertTrue(Neighborhood.objects.first().geom.hasz) # Rewind to 2 dimensions. self.alter_gis_model( migrations.AlterField, 'Neighborhood', 'geom', False, fields.MultiPolygonField, field_class_kwargs={'srid': 4326, 'dim': 2} ) self.assertFalse(Neighborhood.objects.first().geom.hasz) @skipIfDBFeature('supports_raster') class NoRasterSupportTests(OperationTestCase): def test_create_raster_model_on_db_without_raster_support(self): msg = 'Raster fields require backends with raster support.' with self.assertRaisesMessage(ImproperlyConfigured, msg): self.set_up_test_model(force_raster_creation=True) def test_add_raster_field_on_db_without_raster_support(self): msg = 'Raster fields require backends with raster support.' with self.assertRaisesMessage(ImproperlyConfigured, msg): self.set_up_test_model() self.alter_gis_model( migrations.AddField, 'Neighborhood', 'heatmap', False, fields.RasterField )
{ "content_hash": "f94037d4d6dedca8936e9b78db1349e6", "timestamp": "", "source": "github", "line_count": 227, "max_line_length": 104, "avg_line_length": 40.99559471365639, "alnum_prop": 0.657317859445519, "repo_name": "theo-l/django", "id": "22bda55302696169a964bbe1dc03805bdce63eb2", "size": "9306", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "tests/gis_tests/gis_migrations/test_operations.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "54515" }, { "name": "HTML", "bytes": "172728" }, { "name": "JavaScript", "bytes": "247742" }, { "name": "Makefile", "bytes": "125" }, { "name": "Python", "bytes": "11279991" }, { "name": "Shell", "bytes": "809" }, { "name": "Smarty", "bytes": "130" } ], "symlink_target": "" }