hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7fcf8c04bfee9a81a78aefffecb7fb16cd7ee1e5
| 19,028
|
py
|
Python
|
suiko/createDiff.py
|
nakamura196/tei
|
7aa62bc0603bbff03f96a3dbaad82d8feb6126ba
|
[
"Apache-2.0"
] | null | null | null |
suiko/createDiff.py
|
nakamura196/tei
|
7aa62bc0603bbff03f96a3dbaad82d8feb6126ba
|
[
"Apache-2.0"
] | null | null | null |
suiko/createDiff.py
|
nakamura196/tei
|
7aa62bc0603bbff03f96a3dbaad82d8feb6126ba
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import difflib
import xml.etree.ElementTree as ET
tmp_path = "data/template.xml"
prefix = ".//{http://www.tei-c.org/ns/1.0}"
xml = ".//{http://www.w3.org/XML/1998/namespace}"
tree = ET.parse(tmp_path)
ET.register_namespace('', "http://www.tei-c.org/ns/1.0")
root = tree.getroot()
body = root.find(prefix + "body")
p = ET.Element("{http://www.tei-c.org/ns/1.0}p")
body.append(p)
a = "鍾伯敬先生批評水滸傳卷之二第二囘王教頭私走延安府九紋龍大閙史家村詩曰千古幽扄一旦開天罡地煞出泉臺自來無事多生事本爲禳災却惹灾社稷從今雲擾擾后戈到處閙垓垓高俅奸侫𨿽堪恨洪信從今釀禍胎話說當時住持眞人對洪太尉說道太尉不知此殿中當𥘉是祖老天師洞玄眞人傳下法符囑付道此殿内鎭鎻着三十六貟天罡星七十二座地煞星共是一百單八箇魔君在𥚃面上立石碑鑿着龍章鳳篆天符鎭住在此若還放他出世必惱下方生靈如今太尉走了怎生是好他日必爲後患洪太尉聽罷渾身冷汗捉顫不住急急收拾行李引了從人下山囘京眞人并道衆送官巳罷自囘宫内修整殿宇竪立石碑不在話下再說洪太尉在路上分付從人教把走妖魔一節休說與外人知道恐天子知而見責於路無話星夜囘至京師進得汴梁城聞人所說天師在東京禁院做了七晝夜好事普施符籙禳救災病瘟疫盡消軍民安泰天師辤朝乘鶴駕雲自囘龍虎山去了洪太尉次日早朝見了天子奏說天師乘鶴駕雲先到京師臣等驛站而來𦂯得到此仁宗准奏賞賜洪信復還舊職亦不在話下後來仁宗天子在位共四十二年晏駕無有太子傳位濮安懿王𠃔讓之子太祖皇帝的孫立帝號曰英宗在位四年傳位與太子神宗天子在位一十八年傳位與太子哲宗皇帝登基那時天下𥁞皆太平四方無事且說東京開封府汴梁宣武軍一箇浮浪破落戸子弟姓高排行第二自小不成家業只好刺鎗使棒最是踢得好脚氣毬京師人口順不呌高二却都呌他做高毬後來𤼵跡便將氣毬那字去了毛傍添作立人便攺作姓高名俅這人吹彈歌舞刺鎗使棒相撲頑耍頗能詩書詞賦若論仁義禮智信行忠良即是不㑹只在東京城𥚃城外㨍閑因㨍了一箇生鉄王員外兒子使錢每日三瓦兩舍風花雪月被他父親開封府裡告了一𥿄文狀府尹把高俅斷了四十春杖迭配出界𤼵放東京城裡人民不許容他在家宿食高俅無計柰何只得來淮西臨淮州投逩一箇開賭坊的閒漢柳大郞名喚柳世權他平生專好惜客養閒人招納四方干隔澇漢子高俅投托得柳大郞家一住三年後來哲宗天子因拜南郊感得風調雨順放𡩖恩大赦天下那高俅在臨淮州因得了赦宥罪犯思鄉要囘東京這柳世權却和東京城裏金梁橋下開生薬鋪的董將士是親叔寫了一封書札收拾些人事盤纏賫𤼵高俅囘東京投逩董將士家過活當時高俅辭了柳大郞背上包褁離了臨淮州迤𨓦囘到東京竟來金梁橋下董生薬家下了這封書董將士一見高俅看了柳世權來書自肚裡𪨆思道這高俅我家如何安着得他若是箇志誠老實的人可以容他在家出入也教孩兒們學些好他却是箇㨍閑的破落戸没信行的人亦且當𥘉有過犯來被開封府斷配出境的人倘或留住在家中倒惹得孩兒們不學好了待他不收留又撇不過柳大郞面皮當時只得權且歡天喜地相留在家宿歇每日酒食管待住了十數日董將士思量出一個緣由將出一套衣服寫了一封書簡對高俅說道小人家下螢火之光照人不亮恐後悞了足下我轉薦足下與小蘇學士處乆後也得箇出身足下意内如何高俅大喜謝了董將士董將士使箇人將着書簡引領高俅竟到學士府内門吏轉報小蘇學士出來見了高俅看罷來書知道高俅原是㨍閑浮浪的人心下想道我這里如何安着得他不如做箇人情薦他去駙馬王晉卿府𥚃做箇親隨人都喚他做小王都太尉便喜歡這樣的人當時囘了董將士書札留高俅在府裏住了一夜次日寫了一封書呈使箇幹人送高俅去那小王都太尉處這太尉廼是哲宗皇帝妹夫神宗皇帝的駙馬他喜愛風流人物正用這樣的人一見小蘇學士差人馳書送這高俅來拜見了便喜隨卽冩囘書收留高俅在府内做箇親隨自此高俅遭際在王都尉府中出入如同家人一般自古道口逺日疏日近日親忽一日小王都太尉慶誕生辰分付府中安排筵宴專靖小舅端王這端王乃是神宗天子第十一子哲宗皇帝御弟見掌東駕排號九大王是箇聰明俊俏人物這浮浪子弟門風早閒之事無一般不曉無一般不㑹更無盤不愛更兼琴棋書𦘕儒釋道教無所不通踢毬打彈品竹調𢇁吹彈歌舞自不必說當日王都尉府中准備筵宴水陸俱偹但見香焚寳鼎花挿金瓶仙音院競奏新聲教坊司頻逞妙藝水晶壼内盡都是紫府瓊浆琥珀盃中滿泛着瑶池玉液玳瑁盤堆仙桃異果玻瓈碗供熊掌駝蹄鱗鱗膾切銀絲細細茶烹玉蕊紅裙舞女盡隨着象板鸞簘翠袖歌姬簇捧定龍笙鳳管兩行珠翠立堦前一𣲖笙歌臨座上且說這端王來王都尉府中赴宴都尉設席請端王居中坐定太尉對席相陪酒進數盃食供兩套那端王起身浄手偶來書院𥚃少歇猛見書案上一對兒羊脂玉碾成的鎭紙獅子極是做得好細巧玲瓏端王拿起獅子不落手看了一囘道好王都尉見端王心愛便說道再有一個玉龍筆架也是這個匠人一手做的𨚫不在手頭明日取來一併相送端王大喜道深謝厚意想那筆架必是更妙王都尉道明日取出來送至宫中便見端王又謝了兩個依舊入席飲宴至暮盡醉方散端王相别囘宫去了次日小王都太尉取出玉龍筆架和兩個鎭𥿄玉獅子着一個小金盒子盛了用黄羅包袱包了冩了一封書呈却使高俅送去高俅領了王都尉鈞㫖將着兩般玉玩器懐中揣了書呈逕投端王宫中來把門官吏轉報與院公沒多時院公出來問你是那個府𥚃來的人高俅施禮罷荅道小人是王駙馬府中特送玉玩器來進大王院公道殿下在庭心衷和小黄門踢氣毬你自過去高俅道相煩引進院公引到庭前高俅看時見端王頭戴軟紗唐巾身穿紫綉龍袍腰繋文武雙穗絛把綉龍袍前襟拽札起揣在縧兒邊足穿一雙嵌金線飛鳳靴三五個小黄門相伴着蹴氣毬高俅不敢過去衝撞立在從人背後伺候也是高俅合當𤼵跡時運到來那箇氣毬騰地起來端王接個不着向人叢𥚃直滚到高俅身邊那髙俅見氣毬來也是一時的胆量使个鸳鸯拐踢还端王端王見了大喜便問道你是甚人高俅向前跪下道小的是王都尉親隨受東人使令賫送兩般玉玩器來進獻大王有書呈在此拜上端王聽罷笑道姐夫直如此掛心高俅取出書呈進上端王開盒子看了玩器都遍與堂候官收了去那端王且不理玉玩器下落𨚫先問高俅道你原來㑹踢氣毬你喚做甚麽高俅叉手跪覆道小的呌做高俅胡踢得幾脚端王道好你便下塲來踢一囘要高俅拜道小的是何等樣人敢與恩王下脚端王道這是齊雲社名爲天下圓但踢何傷高俅再拜道怎敢三囘五次告辭端王定要他踢高俅只得叩頭謝罪觧膝下場𦂯踢幾脚端王喝采高俅只得把平生本事都使出來奉呈端王那身分模樣這氣毬一似鰾膠粘在身上的端王大喜那里肯放高俅囘府去就留在宫中過了一夜次日排個筵㑹專請王都尉宫中赴宴却說王都尉當日晚不見高俅囘來正疑思間只見次日門子報道九大王差人來傳令旨請太尉到宫中赴宴王都尉出來見了幹人看了令㫖隨卽上馬來到九大王府前下馬入宫來見了端王端王大喜稱謝兩盤玉玩器入席飮宴間端王說道這高毬踢得兩脚好氣毬孤欲索此人做親隨如何王都尉荅道殿下既用此人就留在宫中伏侍殿下端王歡喜執盃相謝二人又閒話一囘至晚席散王都尉自囘駙馬府去不在話下且說端王自從索得高俅做伴之後就留在宫中宿食高俅自此遭除端王每日跟着寸步不離却在宫中未及兩箇月哲宗皇帝宴駕無有太子文武百官商議冊立端王爲天子立帝號曰徽宗便是玉清教主微妙道君皇帝登基之後一向無事忽一日與高俅道朕欲要擡舉你但有邊功方可陞遷先教樞密院與你入名只是做隨駕遷轉的人後來没半年之間直擡舉高俅做到殿帥府太尉□事且說高俅得□□□師府太尉選揀吉日良辰去殿師府裏到任所有一應合属公吏衙將都軍禁軍馬步人等盡來𠫵拜各呈手本開報花名高殿帥一一㸃過於内只欠一名八十萬禁軍教頭王進半月之前巳有病狀在官患病未痊不曽入衙門管事高殿帥大怒喝道胡說既有手本呈來𨚫不是那厮抗拒官府搪塞下官此人卽係推病在家快與我拿來隨卽差人到王進家來捉拿王進且說這王進却無妻子止有一箇老母年巳六旬之上牌頭與教頭王進說道如今高殿帥新來上任㸃你不着軍正司禀說染患在家見有病患狀在官高殿帥焦燥那里肻信定要拿你只道是教頭詐病在家教頭只得去走一遭若還不去定連累衆人小人也有罪犯王進聽罷只得捱着病來進得殿帥府前叅見太尉拜了四拜躬身唱箇喏起來立在一邊高俅道你那厮便是都軍教頭王昇的兒子王進禀道小人便是高俅喝道這厮你爺是街市上使花棒賣藥的你省的甚麽武藝前官没眼叅你做個教頭如何敢小覷我不伏俺㸃視你托誰的勢要推病在家安閒快樂王進告道小人怎敢其實患病未痊高太尉罵道賊配軍你既害病如何來得王進又告道太尉呼喚安敢不來高殿帥大怒喝令左右教拿下王進加力與我打這厮衆多牙將都是和王進好的只得與軍正司同告道今日是太尉上任好日頭權免此人這一次高太尉喝道你這賊配軍且看衆將之面饒恕你今日之犯明日却和你理㑹王進謝罪罷起來擡頭看了認得是高俅出得衙門歎口氣道俺的性命今畨難保了俺道是甚麽高殿帥却原來正是東京㨍閑的圓社高二比先時曾學使棒被我父親一棒打翻三四箇月將息不起有此之仇他今日𤼵跡得做殿帥府太尉正待要報仇我不想正属他管自古道不怕官只怕管俺如何與他爭得怎生奈何是好囘到家中悶悶不巳對娘說知此事母子二人抱頭而哭娘道我兒三十六着走爲上着只恐没處走王進道母親說得是兒子𪨆思也是這般計較只有延安府老种經畧相公鎭守邊庭他手下軍官多有曾到京師愛兒子使鎗棒的極多何不逃去投奔他們那里是用人去處足可安身立命兒娘兩個商議定了其母又道我兒和你要私走只恐門前兩個牌軍是殿帥府撥來伏侍你的他若得知須走不脫王進道不妨母親放心兒子自有道理措置他當下日晚未昏王進先呌張牌入來分付道你先吃了些晚飯我使你一處去幹事張牌道教頭使小人那里去王進道我因前日病患許下酸棗門外岳廟𥚃香願明日早要去燒炷頭香你可今晚先去分付廟祝教他來日早開些廟門等我來燒炷頭香就要三牲獻劉李王你就廟𥚃歇了等我張牌應先吃了晚飯呌了安置望廟中去了當夜子母二人收拾了行李衣服細軟銀兩做一担兒打挾了又装兩個料袋袱駝拴在馬上等到五更天色未明王進呌起李牌分付道你與我將這些銀兩去岳廟𥚃和張牌買個三牲煮熟在那里等候我買些紙燭隨後便來李牌將銀子望廟中去了王進自去備了馬牽出後槽將料袋袱駝搭上把索子拴縛牢了牽在後門外扶娘上了馬家中粗重都弃了鎻上前後門挑了担兒跟在馬後趂五更天色未明乘勢出了西華門取路望延安府來且說兩個牌軍買了福物煮熟在廟等到巳牌也不見來李牌心焦走囘到家中𪨆時見鎻了門兩頭無路𪨆了半日並無有人曾見看看待晚岳廟𥚃張牌疑忌一直逩囘家來又和李牌𪨆了一黄昏看看黑了兩個見他當夜不歸又不見了他老娘次日兩個牌軍又去他親戚之家訪問亦無𪨆𠙚兩個恐怕連累只得去殿帥府首告王敎頭棄家在逃子母不知去向高太尉見告了大怒道賊配軍在逃看那厮待走那里去隨即押下文書行開諸州各府捉拿逃軍王進二人首告免其罪責不在話下且說王教頭母子二人自離了東京在路免不得飢飡渴飮夜住曉行在路上一月有餘忽一日天色將晚王進挑着擔兒跟在娘的馬後口裡與母親說道天可憐見慚愧了我子母兩個脫了這天羅地網之厄此去延安府不遠了高太尉便要差人拿我也拿不着了子母兩箇歡喜在路上不覺錯過了宿頭走了這一晚不遇着一處村坊那里去投宿是好正没理會處只見遠遠地林子𥚃閃出一道燈光來王進看了道好了遮莫去那里陪個小心借宿一宵明日早行當時轉入林子裡來看時却是一所大莊院一週遭都是土墙墙外却有二三百株大柳𣗳看那莊院但見前通官道後靠溪岡一週遭楊柳綠陰濃四下里喬松青似染草堂高起盡按五運山莊亭舘低軒直造𠋣山臨水轉屋𧢲牛羊滿地打麥場鵝鴨成羣出園廣野負傭莊客有千人家眷軒昂女使兄童難計數正是家有餘粮鷄犬飽戸多書籍子孫賢當時王教頭來到莊前敲門多時只見一個莊客出來王進放下担兒與他施禮莊客道來俺莊上有甚事王進荅道實不相瞞小人子母二人貪行了些路程錯過了宿店來到這里前不巴村後不巴店欲投貴莊借宿一宵明日早行依例拜納房金萬望周全方便莊客道既是如此且等一等待我去問莊主太公肻時但歇不妨王進又道大哥方便莊客入去多時出來說道莊主太公教你兩個入來王進請娘下了馬王進挑着担兒就牽了馬隨莊客到𥚃面打麥場上歇下担兒把馬拴在柳𣗳上子母兩個直到草堂上來見太公那太公年近六旬之上鬚髮皆白頭戴遮塵暖帽身穿直縫𡩖𥘎腰繋皂𢇁縧足穿熟皮靴王進見了便拜太公連忙道客人休拜且請起來你們是行路的人辛苦風霜且坐一坐王進母子兩個敘禮罷都坐定太公問道你們是那里來如何昏晚到此王進荅道小人姓張原是京師人今來消折了本錢無可營用要去延安府投逩親眷不想今日路上貪行了些程途錯過了宿店欲投貴莊假宿一宵來日早行房金依例拜納太公道不妨如今世上人那箇頂着房屋走俚你母子二位敢未打火呌莊客安排飯來没多時就𠫇上放開條卓子莊客托出一桶盤四樣菜蔬一盤牛肉鋪放卓子上先盪酒來下太公道村落中無甚相待休得見怪王進起身謝道小人子母無做相擾得𫏂厚意此恩難報太公道休這般說且請吃酒一靣勸了五七盃酒搬出飯來二人吃了收拾碗碟太公起身引王進子母到客房中安歇王進告道小人母親騎的頭口相煩寄養草料望乞應付一𤼵拜還太公道這個亦不妨我家也有頭口騾馬教莊客牽去後槽一𤼵喂養草料亦不用憂心王進謝了挑那担兒到客房𥚃來莊客㸃上燈火一靣提湯來洗了脚太公自囘𥚃面去了王進子母二人謝了莊客掩上房門收拾歇息次日睡到天曉不見起來莊主太公來到客房前過聼得王進子母在房中聲喚太公問道客官天曉好起了王進聼得慌忙出房來見太公施禮說道小人起多時了夜來多多攪擾甚是不當太公問道誰人如此聲喚王進道實不敢瞞太公說老母鞍馬勞倦昨夜心疼病發太公道既然如此客人休要煩惱教你老母且在老夫莊上住幾日我有個醫心疼的方呌莊客去縣𥚃撮藥來與你老母親吃教他放心慢慢地將息王進謝了話休絮繁自此王進子母兩個在太公莊上服藥住了五七日覺道母親病患痊了王進收拾要行當日因來後槽看馬只見空地上一箇後生脫膊着剌着一身靑龍銀盤也似一個面皮約有十八九歲拿條棒在那里使王進看了半晌不覺笑口道這棒也使得好了只是有破綻嬴不得眞好漢那後生聽得大怒喝道你是甚麽人敢來笑話我的本事俺經了七八個有名的師父我不信倒不如你你敢和我扠一扠麽說猶未了太公到來喝那後生不得無禮那後生道𡬡耐這厮笑話我的棒法太公道客人莫不會使鎗棒王進道頗曉得些敢問長上這後生是宅上的誰太公道是老漢的兒子王進道既然是宅内小官人若愛學時小人㸃撥他端正如何太公道恁地時十分好便教那後生來拜師父那後生那里肻拜心中越怒道阿爹休聽這厮胡說若吃他贏得我這條棒時我便拜他爲師王進道小官人若是不當村時較量一棒耍子那後生就空地當中把一條棒使得風車兒似轉向王進道你來你來怕的不筭好漢王進只是笑不肻動手太公道客官既是肻教小頑時使一棒何妨王進笑道恐衝撞了令郞時須不好看太公道這個不妨若是打折了手脚也是他自作自受王進道恕無禮去鎗架上拿了一條棒在手𥚃來到空地上使箇旗鼓那後生看了一看拿條棒滚將入來逕逩王進王進托地拖了棒便走那後生掄着棒又赶入來王進囘身把棒望空地里劈將下來那後生見棒劈來用棒來隔王進却不打下來將棒一掣却望後生懐𥚃直搠將來只一繳那後生的棒丟在一邊撲地望後倒了王進連𢗅撇下棒向前扶住道休恠休恠那後生爬將起來便去傍邉掇條凳子納王進坐便拜道我枉自經了許多師家原來不值半分師父没奈何只得請教王進道我子母二人連日在此攪擾宅上無恩可報當以効力太公大喜呌那後生穿了衣裳一同來後堂坐下呌莊客殺一個羊安排了酒食果品之類就請王進的母親一同赴席四個人坐定一面把盞太公起身勸了一盃酒說道師父如此高強必是個教頭小兒有眼不識㤗山王進笑道奸不厮欺俏不厮瞞小人不姓張俺是東京八十萬禁軍教頭王進的便是這鎗棒終日搏弄爲因新任一個高太尉原被先父打翻今做殿帥府太尉懐挾舊仇要奈何王進小人不合属他所管和他爭不得只得子母二人逃上延安府去投托老种經畧相公處勾當不想來到這里得遇長上父子二位如此看待又蒙救了老母病患連日管顧甚是不當既然令郞肯學時小人一力奉教只是令郞學的都是花棒只好看上陣無用小人從新㸃撥他太公見說了便道我𧠇可知輸了快來再拜師父那後生又拜了王進太公道教頭在上老漢祖居在這華陰縣界前面便是少華山這村便喚做史家村村中緫有三四百家都姓史老漢的兒子從小不務農業只愛刺鎗使棒母親說他不得嘔氣死了老漢只得隨他性子不知使了多少錢財投師父教他又請高手匠人與他刺了這身花綉肩臂胸膛總有九條龍滿縣人口順都呌他做九紋龍史進教頭今日既到這里一𤼵成全了他亦好老漢自當重重酧謝王進大喜道太公放心既然如此說時小人一發教了令郞方去自當日爲始吃了酒食留住王教頭子母二人在莊上史進每日求王教頭㸃撥十八般武藝一一從頭指教那十八般武藝矛鎚弓弩銃鞭簡劒鏈撾斧鉞并戈㦸牌棒與鎗杈話說這史進每日在莊上管待王教頭母子二人指教武藝史太公自去華陰縣中承當里正不在話下不覺荏苒光陰早過半年之上正是窓外日光彈指過席間花影坐前移一盃未進笙歌送堦下辰牌又報時前後得半年之上史進把這十八般武藝從新學得十分精熟多得王進盡心指教㸃撥得件件都有奥妙王進見他學得精熟了自思在此𨿽好只是不了一日想起來相辭要上延安府去史進那里肻放說道師父只在此間過了小弟奉養你母子二人以終天年多少是好王進道賢弟多蒙你好心在此十分之好只恐高太尉追捕到來負累了你恐教賢弟亦遭縲絏之厄不當穩便以此兩難我一心要去延安府投着在老种經畧處勾當那里是鎭守邉庭用人之際足可安身立命史進并太公苦留不住只得安排一個筵席送行托出一盤兩個叚子一百兩花銀謝師次日王進收拾了擔兒備了馬子母二人相辭史太公史進請娘乘了馬望延安府路途進發史進呌莊客挑了担兒親送十里之程中心難捨史進當時拜别了師父洒淚分手和莊客自囘王教頭依舊自挑了担兒跟着馬和娘兩個自取関西路里去了話中不說王進去投軍役只說史進囘到莊上每日只是打熬氣力亦且壯年又没老小半夜三更起來演習武藝白日𥚃只在莊後射弓走馬不到半載之間史進父親太公𣑱患病症數日不起史進使人逺近請醫士看治不能痊可嗚呼哀哉太公殁了史進一面備棺椁衣殮請僧修設好事追齋理七薦史太公又請道士建立齋醮超度生天整做了十數壇好事功果道場選了吉日良時出䘮安塟滿村中三四百史家莊戸都來送䘮掛孝理𣩵在村西山上祖墳内了史進家自此無人管業史進又不肯務農只要𪨆人使家生較量鎗棒自史太公死後又早過了三四個月日時當六月中旬炎天正𤍠那一日史進無可消遣捉箇交床坐在打麥場邊柳陰樹下乘凉對面松林透過風來史進喝采道好凉風正乘凉俚只見一個人採頭採腦在那里張望史進喝道作怪誰在那里張俺莊上史進跳起身來轉過樹背後打一看時認得是獵戸摽兎李吉史進喝道李吉張我莊内做甚麽莫不來相脚頭李吉向前聲喏道大郞小人要𪨆莊上矮丘乙郞吃碗酒因見大郞在此乘凉不敢過來衝撞史進道我且問你往常時你只是擔些野味來我莊上賣我又不㑹虧了你如何一向不將來賣與我敢是欺負我没錢李吉荅道小人怎敢一向没有野味以此不敢來史進道胡說偌大一箇少華山恁地廣濶不信没有箇獐兒兎兒李吉道大郎原來不知如今近日上面添了一夥強人札下箇山寨在上面聚集着五七百箇小嘍囉有百十疋好馬爲頭那個大王喚做神機軍師朱武第二箇喚做跳澗虎陳逹第三箇喚做白花蛇楊春這三箇爲頭打家刼舍華陰縣𥚃不敢捉他出三千貫賞錢召人拿他誰敢上去惹他因此上小人們不敢上山打捕野味那討來賣史進道我也聽得說有強人不想那厮們如此大弄必然要惱人李吉你今後有野味時𪨆些來李吉唱箇喏自去了史進歸到𠫇前𪨆思這厮們大弄必要來𧂭惱村坊既然如此便呌莊客揀兩頭肥水牛來殺了莊内自有造下的好酒先燒了一陌順溜𥿄便呌莊客去請這當村𥚃三四百史家莊戸都到家中草堂上序齒坐下教莊客一面把盞勸酒史進對衆人說道我聽得少華山上有三箇強人聚集着五七百小嘍囉打家劫舍這厮們既然大弄必然早晚要來俺村中囉唕我今特請你衆人來啇議倘若那厮們來時各家准備我莊上打起梆子你衆人可各執鎗棒前來救應你各家有事亦是如此遍相救護共保村坊如若強人自來都是我來理㑹衆人道我等村農只靠大郞做主梆子嚮時誰敢不來當晚衆人謝酒各自分付囘家准備器械自此史進修整門戸墻垣安排莊院拴束衣甲整頓刀馬隄防賊㓂不在話下且說少華山寨中三個頭領坐定商議爲頭的神機軍師朱武𨿽無本事廣有謀畧朱武當與陳逹楊春說道如今我聽知華隂縣𥚃出三千貫賞錢召人捉我們誠恐來時要與他厮殺只是山寨錢粮欠少如何不去刼擄些來以供山寨之用聚積些粮食在寨𥚃防備官軍來時好和他打熬跳澗虎陳逹道說得是如今便去華陰縣𥚃先問他借粮看他如何白花蛇楊春道不要華隂縣去只去蒲城縣萬無一失陳逹道蒲城縣人户稀少錢粮不多不如只打華陰縣那里人民豊富錢粮廣有楊春道哥哥不知若去打華陰縣時須從史家村過那箇九紋龍史進是箇大虫不可去撩撥他他如何肯放我們過去陳逹道兄弟好懦弱一箇村坊過去不得怎地敢抵敵官軍楊春道哥哥不可小覷了他那人端的了得朱武道我也曾聞他十分英雄說這人真有本事兄弟休去罷陳逹呌將起來說道你兩個閉了烏嘴長别人志氣滅自巳威風也只是一箇人須不三頭六臂我不信喝呌小嘍囉快脩我的馬來如今便去先打史家莊後取華陰縣朱武楊春再三諌勸陳逹那里肻聽隨卽披掛上馬㸃了一百四五十小嘍囉嗚鑼擂鼓下山望史家村去了且說史進正在莊内整製刀馬只見莊客報知此事史進聼得就莊上敲起梆子來那莊前莊後莊東莊西三四百史家莊戸聽得梆子嚮都拖鎗拽棒聚取三四百人一齊都到史家莊上看了史進頭戴一字巾身披朱紅甲上穿靑錦襖下着抹綠靴腰繋皮𦞂膊前後鉄𢲅心一張弓一壼箭手𥚃拿一把三尖兩刃四竅八環刀莊客牽過那疋火炭赤馬史進上了馬綽了刀前面擺着三四十壯健的莊客後面列着八九十村蠢的鄉夫各史家莊戸都跟在後頭一齊納喊直到村北路口擺開却早望見來軍但見紅旗閃閃赤幟翩翩小嘍囉亂搠叉鎗莾撞漢齊擔刀斧頭巾歪整渾如三月桃花衲襖𦂳拴却似九秋落葉箇箇圎睜横死眼人人輙起夜叉心那少華山陳逹引了人馬飛逩到山坡下便將小嘍囉擺開史進看時見陳逹頭戴乾紅凹面巾身披裹金生鉄甲上穿一領紅衲襖脚穿一對吊墩靴腰繋七尺攅線搭膊坐騎一疋高頭白馬手中横着丈八㸃鋼矛小嘍囉兩勢下納喊二貟將就馬上相見陳逹在馬上看着史進欠身施禮史進喝道汝等殺人放火打家刼舍犯着迷天大罪都是該死的人你也須有耳朶好大胆直來太歲頭上動土陳逹在馬上荅道俺山寨𥚃欠少些粮食欲徃華阴縣借粮經由貴莊假一條路並不敢動一根草可放我們過去囘來自當拜謝史進道胡說俺家見當里正正要來拿你這夥賊今日到來經由我村中過却不拿你到放你過去本縣知道須連累于我陳逹道四海之内皆兄弟也相煩借一條路史進道甚麽閑話我便肻時有一個不肻你問得他肻便去陳逹道好漢教我問誰史進道你問得我手𥚃這口刀肯便放你去陳逹大怒道赶人不要赶上休得要逞精神史進也怒掄手中刀驟坐下馬來战陳逹陳逹也拍馬挺鎗來迎史進兩個交馬但見一來一往一上一下一來一往有如深水戲珠龍一上一下却似半岩爭食虎左盤右旋好似張飛敵吕布前廻後轉渾如敬德戰秦瓊九紋龍忿怒三尖刀只望頂門飛跳澗虎生嗔丈八矛不離心坎刺好手中間逞好手紅心𥚃面奪紅心史進陳逹兩個𩰖了多時只見戰馬咆哮踢起手中軍器鎗刀來往各防架隔遮攔兩個𩰖到間深𥚃史進賣個破綻讓陳逹把鎗望心窩𥚃搠來史進却把腰一閃陳逹和鎗攧入懐𥚃來史進輕舒猿臂疑紅狼腰只一挾把陳逹輕輕摘離了嵌花鞍疑疑揪住了線𦞂膊丟在馬前受降那疋𢧐馬撥風也似去了史進呌莊客將陳逹綁縛了衆人把小嘍囉一赶都走了史進囘到莊上將陳逹綁在庭心内柱上等待一𤼵拿了那兩個賊首一併解官請賞且把酒來賞了衆人教權且散衆人喝采不枉了史大郞如此豪傑休說衆人歡喜飲酒却說朱武楊春兩個正在寨𥚃猜疑捉模不定且教小嘍囉再去探聽消息只見囘去的人牽着空馬逩到山前只呌道苦也陳家哥哥不聽二位哥哥所說送了性命朱武問其緣故小嘍囉備說交𨦟一節怎當史進英勇朱武道我的言語不聼果有此禍楊春道我們盡數都去和他死併如何朱武道亦是不可他尚自輸了你如何併得他過我有一條苦計若救他不得我和你都休楊春問道如何苦計朱武付耳低言說道只除恁地楊春道好計我和你便去事不冝遲再說史進正在莊上忿怒未消只見莊客飛報道山寨𥚃朱武楊春自來了史進道這厮合休我教他兩箇一𤼵解官快牽過馬來一面打起梆子衆人早都到來史進上了馬正待出莊們只見朱武楊春步行巳到莊前兩個雙雙跪下擎着兩眼淚史進下馬來喝道你兩個跪下如何說朱武哭道小人等三箇累被官司逼迫不得巳上山落草當𥘉𤼵願道不求同日生只願同日死𨿽不及関張劉備的義氣其心則同今日小弟陳逹不聽好言誤犯虎威巳被英雄擒捉在貴莊無計懇求今來一逕就死望英雄將我三人一𤼵解官請賞誓不皺眉我等就英雄手内請死並無怨心史進聽了𪨆思道他們直恁義氣我若拿他去解官請賞時反敎天下好漢們耻笑我不英雄自古道大虫不吃伏肉史進便道你兩個且跟我進來朱武楊春並無惧怯隨了史進直到後𠫇前跪下又教史進綁縛史進三囘五次呌起來那兩個那里肻起來惺惺惜惺惺好漢識好漢史進道你們既然如此義氣深重我若送了你們不是好漢我放陳逹還你如何朱武道休得連累了英雄不當𥡷便𡩬可把我們去解官請賞史進道如何使得你肻吃我酒食麽朱武道一死尚然不惧何况酒肉乎當時史進大喜解放陳逹就後𠫊上座置酒設席管待三人朱武楊春陳逹拜謝大恩酒至數盃少添春色酒罷三人謝了史進囘山去了史進送出莊門自囘莊上却說朱武等三人歸到寨中坐下朱武道我們不是這條苦計怎得性命在此𨿽然救了一人𨚫也難得史進爲義氣上放了我們過幾日俻些禮物送去謝他救命之恩話休絮繁過了十數日朱武等三人收拾得三十兩蒜條金使兩個小嘍羅趂月黑夜送去史家莊上當夜𥘉更時分小嘍囉敲門莊客報知史進史進火急披衣來到門前問小嘍囉有甚話說小嘍囉道三個頭領再三拜覆特地使小校送些薄禮酧謝大郎不殺之恩不要推欲望乞笑留取出金子逓與史進𥘉時推却次後𪨆思道既然送來囘禮可酧受了金子呌莊客置酒管待小校吃了半夜酒把些零碎銀兩賞了小校囘山去了又過半月有餘朱武等三人在寨中商議擄掠得一串好大珠子又使小嘍囉連夜送來史家莊上史進受了不在話下又過了半月史進𪨆思道也難得這三個敬重我我也俻些禮物囘奉他次日呌莊客𪨆個裁縫自去縣𥚃買了三匹紅錦裁成三領錦襖子又㨂肥羊煮了三箇將大盒子盛了委兩個莊客去送史進莊上有個爲頭的莊客王四此人頗能荅應官府口舌利便滿莊人都呌他做王伯當史進教他同一個得力莊客挑了盒担直送到山下小嘍囉問了備細引到山寨𥚃見了朱武等三個頭領大喜受了錦襖子并肥羊酒禮把十兩銀子賞了莊客每人吃了十數碗酒下山囘歸莊内見了史進說道山上頭領多多上覆史進自此常常與朱武等三人往來不時間只是王四去山寨𥚃送物事不則一日寨𥚃頭領也頻頻地使人送金銀來與史進荏苒光陰時遇八月中秋到來史進要和三人說話約至十五夜來莊上賞月飮酒先使莊客王四賫一封請書直去少華山上請朱武陳逹楊春來莊上赴席王四馳書逕到山寨𥚃見了三位頭領下了來書朱武看了大喜三個應𠃔隨即寫封囘書賞了王四五兩銀子吃了十來碗酒王四下得山來正撞着如常送物事來的小嘍囉一把抱住那里肯放又拖去山路邉村酒店裡吃了十數碗酒王四相别了囘莊一面走着被山風一吹酒却湧上來浪浪蹌蹌一步一攧走不得十里之路見座林子逩到𥚃面望着那綠茸茸莎草地上撲地倒了原來摽兎李吉正在那山坡下張兔兒認得是史家莊上王四赶入林子𥚃來扶他那里扶得動只見王四𦞂膊裏突出銀子來李吉𪨆思道這厮醉了那里討得許多何不拿他些也是天罡星合當聚㑹自然生出機㑹來李吉解那𦞂膊望地下只一抖那封囘書和銀子都抖出來李吉拿起頗識幾字將書拆開看時見上面寫着少華山朱武陳逹楊春中間多有兼文带武的言語𨚫不識得只認得三箇名字李吉道我做獵戸幾時能勾𤼵跡筭命道我今年有大財却在這里華陰縣𥚃見出三千貫賞錢捕捉他三箇賊人尀耐史進那厮前日我去他莊上𪨆矮丘乙郞他道我來相脚頭躧盤你原來倒和賊人來往銀子并書都拿去了望華隂縣𥚃來出首却說莊客王四一覺直睡到二更方醒覺來看見月光微微照在身上王四吃了一驚跳將起來却見四邊都是松樹便去腰裡摸時𦞂膊和書都不見了四下里𪨆時只見空𦞂膊在莎草地上王四只管呌苦𪨆思道銀子不打𦂳這封囘書却怎生好正不知被甚人拿去了眉頭一縱計上心來自道若囘去莊上說脱了囘書大郞必然焦燥定是赶我出去不知只說不曾有囘書那里查照計較定了飛也似取路歸來莊上却好五更天氣史進見王四囘來問道你如何方𦂯歸來王四道托主人福廕寨中三箇頭領都不肻放留住王四吃了半夜酒因此囘來遲了史進又問曾有囘書麽王四道三箇頭領要寫囘書却是小人道三位頭領既然凖來赴席何必囘書小人又有盃酒路上恐有些失支脫節不是耍處史進聽了大喜說道不枉了諸人呌做賽伯當眞箇了得王四應道小人怎敢差遲路上不曾住脚一直逩囘莊上史進道既然如此敎人去縣𥚃買些果品案酒伺候不覺中秋節至是日晴明得好史進當日分付家中莊客宰了一腔大羊殺了百十箇鷄鵝准俻下酒食筵宴看看天色晚來怎見得好個中秋但見午夜𥘉長黄昏巳半一輪月掛如銀水盤如晝賞翫正冝人清影十分圓滿桂花玉兎交馨簾櫳高捲金盃頻勸酒歡笑賀昇平年年當此節酩酊醉醺醺莫辭終夕飮銀漢露華新且說少華山上朱武陳逹楊春三箇頭領分付小嘍囉看守寨栅只帶三五箇做伴將了朴刀各跨口腰刀不騎鞍馬步行下山逕來到史家莊上史進接着各叙禮罷請入後園莊内巳安排下筵宴史進請三位頭領上坐史進對席相陪便呌莊客把前後莊門拴了一面飮酒莊内莊客輪流把盏一邊割羊勸酒酒至數盃却早東邉推起那輪明月但見桂花離海嶠雲葉散天衢彩霞照萬里如銀素魄映千山似水一輪𠁊塏能分宇宙澄清四海團圝射映乾坤皎潔影橫曠野驚獨宿之烏鴉光射平湖照雙栖之鴻鴈氷輪展出三千里玉兎平吞四百州史進正和三箇頭領在後園飲酒賞翫中秋叙說舊話新言只聽得墻外一聲喊起火把亂明史進大驚跳起身來分付三位賢友且坐待我去看喝呌莊客不要開門掇條梯子上墻打一看時只見是華陰縣縣尉在馬上引着兩箇都頭带着三四百土兵圍住莊院史進和三箇頭領没做理會外面火把光中照見鋼叉朴刀五股叉留客住擺得似麻林一般兩箇都頭口𥚃呌道不要走了強賊不是這夥人來捉史進并三個頭領有分教史進先殺了一兩個人結識了十數個好漢大閙動河北直使天罡地煞一齊相㑹直教蘆花深處屯兵士荷葉陰中治戰舡畢竟史進與三個頭領怎地脫身且聼下囘分解總評神機軍師朱武妙計只是投降若非史進英雄拿去解官請賞何如或曰此正見神機處何𤼵一笑鍾伯敬先生批評水滸傳卷之二(終)"
b = "第二囘王教頭私走延安府九紋龍大閙史家村千古幽扃一旦開天罡地殺出星臺自來無事多生事本爲禳災𨚫惹災社覆從今雲擾擾兵戈到处閙垓垓高俅奸侫眞堪恨洪信從今釀禍胎當時道官對洪太尉說是老祖天師洞玄眞人鎭鎖着三十六員天罡星七十二座地殺星共一百單八個魔君上立石碑刻著龍章鳳篆天符鎭住若放他出世必害下方生靈如今放脫怎生是好太尉听了大驚收拾同衆人囘京在路分付衆人把走妖魔事情休説恐天子知而見責囘至京師听知天師在東京做了七晝夜好事普施符籙瘟疫寧息軍民安痊天師囘龍虎山去了太尉次日朝見天子奏說天師駕雲已先到京師臣從馹傳而來靣君復命仁宗准奏賞賜洪信仁宗在位四十二年傳位英宗英宗在位四年傳位神宗神宗在位一十八年傳位哲宗天下太平四方無事且說東京開封府汴梁宣武軍一個浮浪子弟姓高名俅自㓜好使鎗棒相拍頑耍踢得好氣毬京師人都呌他做高毬後來發跡將氣毬毬字去了毛傍添作立人名爲高俅在東京城𥚃因幫生鉄王員外兒子賭錢被他父親告了府尹把高俅㫁了四十杖迭配出界不許東京城𥚃人民容藏高俅無計只得投奔淮州柳世雄家三年後宋哲宗因拜南郊大赦天下高俅要囘東京這柳世雄𨚫和東京城𥚃金梁橋下開生藥鋪董將士是親寫了封書與高俅去投董將士家過活高俅辞了柳大郎囘東京至董家呈上世雄的書董將士看畢尋思□高俅是個浪子着留了他必定教壞吾兒待不收留又負了柳大郎靣情只得权且留下一日将士思計對高俅曰弟欲留公在家恐悞了足下我轉薦足下與蘇學士處乆後必得出身足下如何高俅大喜董將士卽使人持書引高俅到學士府學士出堂見了高俅看了來書心下想道不如薦他去鬪馬王晉卿府𥚃做個親隨人次日脩書一封使人送高俅去王都太尉處這太尉乃是哲宗的妹夫神宗皇帝的駙馬他喜愛風流人物當時駙馬見蘇學士這人送高俅來拜見了卽隨寫囘書收留高俅做個親隨人忽一日王太尉慶生辰安排酒宴專請小𠢎端王這端王乃神宗第十一子哲宗御弟掌東駕排號九大王是聰明俊俏人物更兼琴棋書畫踢球打彈品竹調絲无有不能當日王都尉府中在偹筵宴但見香焚寶鼎花揷金瓶仙音院競奏新聲教坊司頻逞妙藝水晶壺內尽都是紫府瓊漿琥珀盃中滿泛着瑤池玉液玳瑁盤堆着仙桃異果玻璃碗俱是熊掌駝蹄鱗鱗膾切銀絲細細茶烹玉蕋紅裙舞女尽随着象板鸞簫翠袖歌姬簇捧定龍笙鳳管兩行珠翠立階前一派笙歌臨座上端王來都尉府中赴宴酒進數盃端王起身淨手來書院𥚃見案上一對羊脂玉碾成的鎭紙獅子極做得細巧端王拿起看了一會曰好王都尉見端王心愛便說曰再有一個玉龍筆架也是那匠人做的明日一併相送端王大喜称謝依舊入席至暮方散次日王都尉取出玉龍筆架鎭紙玉獅子使高俅送投端王府中來院公出見引到庭前高俅看見端王頭戴軟紗唐巾身穿紫綉袍腰繋王帶足穿嵌金線靴與三五個小黃門相伴踢氣球高俅立在從人背後伺侯也是高俅合當發跡那個氣球直滚到高俅身邉那高俅見氣球來到身邉便使個鴛鴦拐踢还端王端王大喜問曰你是甚麽人高俅跪下曰小人是王都尉親随使令賫送兩般玉玩噐献上大王有書在上端王看了玩噐卽令收了便問高俅你原來會踢氣球喚作甚名高俅跪荅小人名喚高俅這氣球胡乱踢得幾腳端王曰你便踢一囘高俅拜曰小的是何䓁樣人敢與大王下腳端王曰這是齊雲社名為天下圓但踢何妨高俅叩頭觧膝上塲𦂯踢幾腳端王喝采高俅把平生本事都使出來那氣毬一似鰾膠粘在身上端王大喜留住高俅次日設宴請王都尉赴宴王都尉見了令旨隨卽來到宮中端王先謝玉玩噐後入席飮宴間端王曰這高俅踢得兩腳好氣毬孤欲用此人就當伏侍端王執盃相謝至脫筵罷王都尉自囘端王自得高俅未及兩月哲宗来有太子文武商議共立端王嗣哲宗之位爲天子號曰徽宗皇帝登基之後擡舉高俅做到殿帥府太尉之聀高俅卽選吉日到任所有一應牙將都軍禁軍馬步兵䓁都來𠫭拜只欠一名乃八十萬禁軍教頭王進軍政司稟曰半月之前已有病狀不曾入衙高俅怒曰此人推病在家隨卽差人拿王進且說這王進止有老母无妻子牌軍來拿王進只得捱病入府𠫭見拜了高俅曰你是都軍教頭王昇的兒子王進稟曰小人便是高俅喝曰你是街市上使花棒賣营藥的你如何敢不伏我點視詐病在家王進告曰小人怎敢是寔患病高俅罵曰你既患病如何來得唱令左右拿下王進與我重打衆牙將皆稟曰今日是老爺上任好日权免這次高太尉喝曰且看衆將之靣饒你明日理會王進起來認得是高俅出衙門咲曰只道是甚麽高殿帥原來是東京幫閑的圓社的高二先時曾學使棒被我父親一棒打番他今日要報前仇囘到家中對娘說知此事母子抱頭而哭王進曰兒子尋思不如逃去延安府老种經畧相公名下投他方可安身母曰門前兩個牌軍是殿帥撥來的他若得知便走不脫王進曰不妨兒子自有道理當晚對兩個牌軍說我因前日患病在酸棗門外岳庙𥚃許下香愿明日要去燒香你今晚去買三牲先去对他說知二人先領命去了當夜子母收拾行李出了西華門望延安而去且說兩個牌軍買了福物在庙中䓁到次日巳牌不見來二人心焦走囘見鎖了門直尋到晚不見踪跡兩人恐怕連累及巳卽投殿師府中首先說王進棄家逃走不知去向高太尉大怒卽押文書行關各州府捉拿不題且說王進子母自離東京在路月餘一日天晚不斍路過宿店捱到一處是一所大庄王進到庄前敲門有一庄客出來王進施礼曰小人母子貪行些路錯過客店來投貴庄借宿明早便行庄客入報出來言曰太公教你兩人進去王進同母入到草堂見太公各叙礼畢太公問曰客官貴處因甚昏晚到此王進曰小人姓張原是京師人要去延安府投奔親眷太公曰既如此但村中无甚相待休得見怪王進謝曰多蒙厚意无恩可報晚飯畢太公引王進子母到客房安歇王進曰小人的馬相煩寄养一発还錢太公曰我家也有頭口呌庄客牽去後槽喂养王進謝了各自安歇次日大明王進收拾要行來後槽看馬只見空地上有一個後生脫膊刺着一身青龍拿一條棍在那里使王進咲曰只有些破綻那後生听得喝道你是甚人敢咲我的本事俺曾經七八个明師倒不如你麽說犹未了太公來到喝那後生不得无礼那後生曰闘耐這廝咲我的棍法太公曰客官莫會使棒王進曰畧曉得些敢問這後生是誰太公曰是老漢的兒子進曰既然是小官人小人點撥他端正如何太公曰恁的極好便喚那後生來呌師父後生曰爹爹休聽這厮胡說他若贏得我一棍我便拜他爲師王進曰小官人若不相信請較量一棒耍那後生拿一條棒使得似風車兒樣轉呌王進曰你來你來王進只是咲不肯動手太公曰客官既肯見教小頑使一棒何妨王進咲曰只恐冲撞了令郎太公道這個不妨客官只管上塲王進曰恕罪了拿一條棒在手使個旗皷势那後生輪棒滾將過來王進托地拖了棒便走那後生又趕入來王進囘身舉棒望空劈將下來那後生用棒來隔王進𨚫不打下來提棒望後生懷𥚃只一鈢那後生的棒丟在一邉撲地倒了王進連忙進前扶住曰休怪休怪那後生扒將起來便拜曰俺自經了許多教師不如客官願請賜教王進曰俺母子在此多擾當效力報恩太公大喜教庄客安排酒食就請王進的母親一同赴席太公曰師父如此高强必然是個教頭小兒有眼不識泰山王進曰寔不相瞞小人不姓張乃是東京八十萬禁軍教頭王進便是爲因新任高太尉原被先父打畨今做殿帥府太尉怀挾旧仇因此母子二人迯上延安府老种經畧相公処勾當不想得遇太公如此看待若令郎肯學小人顧奉教太公□老漢祖居華隂縣界內前靣便是少華山這村喚做史家莊老漢這個兒子自㓜不務農業只愛刺鎗使棒母親說他不得嘔氣死了老漢只得隨他性子不知去了多少錢財投師這身花綉刺有九條龍人都呌他做九紋龍史進教頭既到這里望乞賜教自當重謝王進曰既然如此必當奉命自此留住王進母子在庄上每日教史進點撥他十八般武藝矛錘弓弩銃鞭簡劍鏈撾斧鉞并戈戟牌棒與鎗爬𨚫說史進留王進指教武藝不覺半年王進把十八般兵噐教得史進精熟王進相辞要行史進曰師父只在我家我奉養師父母子以終天年王進曰雖蒙好意只恐高太尉知道連累不便史進太公苦留不住設宴送行托出一盤緞子百兩花銀謝師次日王進收拾望延安府去了史進送了一程囘庄每日演習武藝當時六月炎天史進坐在柳隂樹下乘涼見一獵戶呌做摽兎李吉行過史進問曰你往常挑野味在我庄上來賣這一向爲何不來李吉曰小人不說大郎不知近日少華山上添了一夥强人聚有七百餘人爲頭的大王喚做神机軍師朱武第二個喚做跳澗虎陳達第三個喚作白花蛇楊春官兵不敢促他小人因此不敢上山打獵那討野味史進听了尋思這賊終乆來我庄上便教庄客殺牛聚集四百餘庄人飮酒對衆人曰我今听得少華山上有一夥強人恐早晚間要來我村中打我特請你衆人商議他倘若來我村中時你們各執鎗棒前來救應一家有事各家救護衆人曰我們村農只靠大郎作主梆子响時誰敢不來當日衆人囘家准偹噐械不題𨚫說少華山神机軍師朱武廣有智畧一日與陳達楊春計議曰我听知華隂縣𥚃出三千貫賞錢招人來捉我們軍兵來時要與他們厮殺目今山寨缺少錢粮如之奈何陳達曰便去革隂縣𥚃借粮看他如何楊春曰不要去華隂縣只去浦城縣万无一失陳達曰浦城縣錢粮稀少只去打華隂縣錢粮更多楊春曰若去打華隂縣時須從史家村過聞知九紋龍史進有万人之敵他如何肯放我們過去陳達曰量一村坊過去不得尚敢抵敵官軍長他人之志氣滅自巳的威風遂點嘍啰披掛下山去了史進正在庄上整頓弓馬只見庄客報說賊到史進呌敲起梆子那四百庄人都到史進頭戴一字巾身穿硃紅甲前後鉄掩心一張弓一壺箭手提一把三尖刃騎一⽦火炭赤馬庄人随後吶喊直到庄前排開陣势見陳達頭頂乾紅盔身披鏤金甲坐下一⽦高鞍馬手牛點鋼鎗二將相見陳達馬上欠身施礼史進喝曰汝等強盜敢來太𡻕頭上動土陳達曰因我山寨欠缺錢粮欲往華隂縣借粮經由貴村借路過去不敢動你一根草囘日重謝史進曰我家正當甲長放你過去本縣知道必連累我陳達曰四海之內皆兄弟也借路一過不妨史進不允陳達大怒挺鎗刺來史進拍馬來迎二人閗了五十合史進使個破綻讓陳達一鎗望心窩𥚃搠來史進𨚫把腰一閃陳達和鎗撲入怀𥚃史進輕舒猿臂只一挾把陳達捉過馬來衆嘍囉都走了史進囘到庄上將陳達綁在柱上偹酒來賞了衆人俱各准偹𨚫說朱武楊春正在寨中嘍啰報說二頭領被捉去了朱武嘆曰不听吾言果有此禍楊春曰奈何朱武曰我有一條計可以救他楊春曰有何計朱武附耳低言春曰好計和你便去史進正在庄上庄客來報曰少華山朱武楊春都來了史進便提刀上馬正出庄門只見朱武楊春都到䨇䨇跪下史進喝曰你二人跪下如何朱武哭曰小人三個因被官司累次逼迫不得巳上山落草三人當初發願不願同生只求同死雖不及關張劉偹其心則同今陳達誤犯被促我三人義不貪生特來請死大郎將我三人觧官請賞誓不皱眉史進听了他們如此義氣我若拿他觧官反被天下好漢耻咲便曰你二人跟我進來朱武楊春随了史進直到厛前跪下又請綁縛史進曰惺惺惜惺惺好漢惜好漢你們既如此義氣我若送了你們不是好漢放陳達还你如何朱武曰休得連累了將軍寧可將我們觧官史進曰不可卽令入了陳達就置酒疑待三人飮罷拜辞史進三人囘到寨中朱武曰雖然是計亦難得史進好意我們須要報謝随卽收拾得三十條金使両個嘍囉趂月送與史進嘍羅到史進庄內將金献上告達三人酧謝不殺之恩史進受了金子教庄客將酒相待囘山半月朱武䓁擄得一出大珠子又使嘍囉送來史進又受了尋思難得這三個敬重我也討些礼囘奉他次日教三個裁縫做了三件錦襖殺了一腔肥羊令庄客送至山寨見了一個頭領朱武䓁大喜收了礼物欵待來人白金五両庄客拜別囘來史進自此與朱武往來荏苒光陰將近八月中秋要請三人至十五日夜來庄上賞月先令庄客王四送書去請三位頭領看書大喜卽冩下囘書賞銀下山遇着嘍囉又拖去酒店中吃了數碗相別囘程走不到十里酒𨚫湧上來便醉倒了那摽兎李吉正在山坡下來認得是史家庄的王四逕來扶他見王四𦞂膊𥚃突出銀子來李吉尋思這厮醉了這銀子何不拿他的去李吉觧下𦞂膊一抖那封囘書和銀子都抖出來李吉將書拆開見書上冩着少華山朱武三人名字李吉曰闘耐史進原來與強盜來往把書望華隂縣出首去了王四睡到三更方醒看見月光跳將起來四邉都是松樹忙去腰間摸時𦞂膊并書都不見了哭曰銀子不打𦂳失了這封書如何是好心生一計只說不曽有回書來到庄上史進問曰你往何方𦂯囘來王四曰托主人福蔭寨中頭領留我吃了半夜酒因此回遲史進又問曰曽有回書否王四曰他要脩囘書是小人說若拿囘書恐路上不便史進大喜排起筵宴伺侯朱武三人分付嘍囉看守寨門只帶三五個作伴各藏短刀下山來到庄上史進接着各敘礼畢請入後園分賓主坐定令庄客把前後庄門拴了一靣飮酒酒至數杯只見東邉推起那輪明月但見秋夜初長黃錯巳半一輪月掛如銀氷盤如昼翫正空人淸影十分圓滿桂花玉兎交馨簾籠高捲金盃頻觀酒觀咲賀昇平當此節酩酊醉燻燻莫辭終夕醉銀漢露華新且說史進正和三人飮酒只聽得牆外𠴁起火把亂明三人大驚史進曰三位休慌待我去看掇條梯子傍墻一看只見縣尉在馬上引兩個都頭領四百士兵圍住庄院都頭大呌不要走了強盜這夥人來捉進進直使大罡地殺一齊相會正是芦花深處藏將士荷葉隂中聚𢧐舡畢竟史進與三個頭領怎的脫身且聽下囘分觧"
id_a = "A006267-002"
id_b = "A006371-002"
title_a = "鍾伯敬先生批評水滸伝一百巻一百回 第二回"
title_b = "新刻全像忠義水滸誌伝二十五巻一百十五回 第二回"
sourceDesc = root.find(prefix + "sourceDesc")
listWit = ET.Element("{http://www.tei-c.org/ns/1.0}listWit")
sourceDesc.append(listWit)
witness = ET.Element("{http://www.tei-c.org/ns/1.0}witness")
listWit.append(witness)
witness.set("xml:id", id_a)
witness.text = title_a
witness = ET.Element("{http://www.tei-c.org/ns/1.0}witness")
listWit.append(witness)
witness.set("xml:id", id_b)
witness.text = title_b
teiHeader = root.find(prefix + "teiHeader")
encodingDesc = ET.Element("{http://www.tei-c.org/ns/1.0}encodingDesc")
teiHeader.append(encodingDesc)
variantEncoding = ET.Element("{http://www.tei-c.org/ns/1.0}variantEncoding")
encodingDesc.append(variantEncoding)
variantEncoding.set("method", "parallel-segmentation")
variantEncoding.set("location", "internal")
s = difflib.SequenceMatcher(None, a, b)
old_ele = p
for tag, i1, i2, j1, j2 in s.get_opcodes():
if tag == "delete":
app = ET.Element("{http://www.tei-c.org/ns/1.0}app")
p.append(app)
rdg = ET.Element("{http://www.tei-c.org/ns/1.0}rdg")
app.append(rdg)
rdg.set("wit", "#"+id_a)
rdg.text = a[i1:i2]
old_ele = app
elif tag == "insert":
app = ET.Element("{http://www.tei-c.org/ns/1.0}app")
p.append(app)
rdg = ET.Element("{http://www.tei-c.org/ns/1.0}rdg")
app.append(rdg)
rdg.set("wit", "#"+id_b)
rdg.text = b[j1:j2]
old_ele = app
elif tag == "replace":
app = ET.Element("{http://www.tei-c.org/ns/1.0}app")
p.append(app)
rdg = ET.Element("{http://www.tei-c.org/ns/1.0}rdg")
app.append(rdg)
rdg.set("wit", "#"+id_a)
rdg.text = a[i1:i2]
rdg = ET.Element("{http://www.tei-c.org/ns/1.0}rdg")
app.append(rdg)
rdg.set("wit", "#"+id_b)
rdg.text = b[j1:j2]
old_ele = app
elif tag == "equal":
old_ele.tail = a[i1:i2]
else:
print(tag)
tree.write("data/diff.xml", encoding="utf-8")
| 184.737864
| 11,603
| 0.947393
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 50,656
| 0.968566
|
7fd026487b4ed720e388b3ddeb8812e59526c4f0
| 6,342
|
py
|
Python
|
tests/python/pants_test/pants_run_integration_test.py
|
WamBamBoozle/pants
|
98cadfa1a5d337146903eb66548cfe955f2627b3
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/pants_run_integration_test.py
|
WamBamBoozle/pants
|
98cadfa1a5d337146903eb66548cfe955f2627b3
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/pants_run_integration_test.py
|
WamBamBoozle/pants
|
98cadfa1a5d337146903eb66548cfe955f2627b3
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
import unittest
from collections import namedtuple
from operator import eq, ne
from pants.base.build_environment import get_buildroot
from pants.fs.archive import ZIP
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_mkdir, safe_open
PantsResult = namedtuple('PantsResult', ['command', 'returncode', 'stdout_data', 'stderr_data'])
class PantsRunIntegrationTest(unittest.TestCase):
"""A base class useful for integration tests for targets in the same repo."""
PANTS_SUCCESS_CODE = 0
PANTS_SCRIPT_NAME = 'pants'
@classmethod
def has_python_version(cls, version):
"""Returns true if the current system has the specified version of python.
:param version: A python version string, such as 2.6, 3.
"""
try:
subprocess.call(['python%s' % version, '-V'])
return True
except OSError:
return False
def workdir_root(self):
# We can hard-code '.pants.d' here because we know that will always be its value
# in the pantsbuild/pants repo (e.g., that's what we .gitignore in that repo).
# Grabbing the pants_workdir config would require this pants's config object,
# which we don't have a reference to here.
root = os.path.join(get_buildroot(), '.pants.d', 'tmp')
safe_mkdir(root)
return root
def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
**kwargs):
config = config.copy() if config else {}
# We add workdir to the DEFAULT section, and also ensure that it's emitted first.
default_section = config.pop('DEFAULT', {})
default_section['pants_workdir'] = '%s' % workdir
ini = ''
for section, section_config in [('DEFAULT', default_section)] + config.items():
ini += '\n[%s]\n' % section
for key, val in section_config.items():
ini += '%s: %s\n' % (key, val)
ini_file_name = os.path.join(workdir, 'pants.ini')
with safe_open(ini_file_name, mode='w') as fp:
fp.write(ini)
env = os.environ.copy()
env.update(extra_env or {})
pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
pants_command = [pants_script,
'--kill-nailguns',
'--no-pantsrc',
'--config-override={0}'.format(ini_file_name),
'--print-exception-stacktrace'] + command
proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
(stdout_data, stderr_data) = proc.communicate(stdin_data)
return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
stderr_data.decode("utf-8"))
def run_pants(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
"""Runs pants in a subprocess.
:param list command: A list of command line arguments coming after `./pants`.
:param config: Optional data for a generated ini file. A map of <section-name> ->
map of key -> value. If order in the ini file matters, this should be an OrderedDict.
:param kwargs: Extra keyword args to pass to `subprocess.Popen`.
:returns a tuple (returncode, stdout_data, stderr_data).
"""
with temporary_dir(root_dir=self.workdir_root()) as workdir:
return self.run_pants_with_workdir(command, workdir, config, stdin_data, extra_env, **kwargs)
def bundle_and_run(self, target, bundle_name, args=None):
"""Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle.
:param target: target name to compile
:param bundle_name: resulting bundle filename (minus .jar extension)
:param args: optional arguments to pass to executable
:return: stdout as a string on success, raises an Exception on error
"""
pants_run = self.run_pants(['bundle', '--archive=zip', target])
self.assert_success(pants_run)
# TODO(John Sirois): We need a zip here to suck in external library classpath elements
# pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a
# contextmanager that yields its results while the tmpdir workdir is still active and change
# this test back to using an un-archived bundle.
with temporary_dir() as workdir:
ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir)
optional_args = []
if args:
optional_args = args
java_run = subprocess.Popen(['java',
'-jar',
'{bundle_name}.jar'.format(bundle_name=bundle_name)]
+ optional_args,
stdout=subprocess.PIPE,
cwd=workdir)
stdout, _ = java_run.communicate()
java_returncode = java_run.returncode
self.assertEquals(java_returncode, 0)
return stdout
def assert_success(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=True, msg=msg)
def assert_failure(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=False, msg=msg)
def assert_result(self, pants_run, value, expected=True, msg=None):
check, assertion = (eq, self.assertEqual) if expected else (ne, self.assertNotEqual)
if check(pants_run.returncode, value):
return
details = [msg] if msg else []
details.append(' '.join(pants_run.command))
details.append('returncode: {returncode}'.format(returncode=pants_run.returncode))
def indent(content):
return '\n\t'.join(content.splitlines())
if pants_run.stdout_data:
details.append('stdout:\n\t{stdout}'.format(stdout=indent(pants_run.stdout_data)))
if pants_run.stderr_data:
details.append('stderr:\n\t{stderr}'.format(stderr=indent(pants_run.stderr_data)))
error_msg = '\n'.join(details)
assertion(value, pants_run.returncode, error_msg)
| 41.45098
| 99
| 0.677231
| 5,642
| 0.889625
| 0
| 0
| 319
| 0.0503
| 0
| 0
| 2,235
| 0.352412
|
7fd3371311cc6675c8548300ec8d2acf6af4b1ea
| 2,036
|
py
|
Python
|
ireporterApp/migrations/0001_initial.py
|
George-Okumu/IReporter-Django
|
5962984ce0069cdf048dbf91686377568a7cf55b
|
[
"MIT"
] | null | null | null |
ireporterApp/migrations/0001_initial.py
|
George-Okumu/IReporter-Django
|
5962984ce0069cdf048dbf91686377568a7cf55b
|
[
"MIT"
] | 1
|
2021-10-06T20:15:11.000Z
|
2021-10-06T20:15:11.000Z
|
ireporterApp/migrations/0001_initial.py
|
George-Okumu/IReporter-Django
|
5962984ce0069cdf048dbf91686377568a7cf55b
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.8 on 2021-10-13 16:04
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import ireporterApp.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='RedFlag',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=43)),
('description', models.TextField(max_length=100)),
('status', models.CharField(default='received', max_length=20)),
('redFlag_image', models.ImageField(blank=True, null=True, upload_to=ireporterApp.models.project_upload)),
('redFlag_video', models.CharField(blank=True, max_length=20, null=True)),
('redFlag_location', models.CharField(blank=True, max_length=20, null=True)),
('created_at', models.DateTimeField(auto_now=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='redflag', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Intervention',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject', models.TextField(max_length=200)),
('description', models.TextField()),
('location', models.TextField(max_length=90)),
('upload_image', models.ImageField(null=True, upload_to='')),
('video', models.CharField(blank=True, max_length=20, null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='intervention', to=settings.AUTH_USER_MODEL)),
],
),
]
| 45.244444
| 147
| 0.623281
| 1,850
| 0.908644
| 0
| 0
| 0
| 0
| 0
| 0
| 274
| 0.134578
|
7fd3a16afbbba984f178b48eb62fc0be86afc9a5
| 1,090
|
py
|
Python
|
ethronsoft/gcspypi/parsers/list.py
|
JuergenSimon/gcspypi
|
80ac843e6702161915db45949c470d749aaabfda
|
[
"BSD-2-Clause"
] | null | null | null |
ethronsoft/gcspypi/parsers/list.py
|
JuergenSimon/gcspypi
|
80ac843e6702161915db45949c470d749aaabfda
|
[
"BSD-2-Clause"
] | null | null | null |
ethronsoft/gcspypi/parsers/list.py
|
JuergenSimon/gcspypi
|
80ac843e6702161915db45949c470d749aaabfda
|
[
"BSD-2-Clause"
] | null | null | null |
from ethronsoft.gcspypi.package.package_manager import PackageManager
from ethronsoft.gcspypi.utilities.console import Console
from ethronsoft.gcspypi.parsers.commons import init_repository
def handle_(config, data):
with Console(verbose=config.get("verbose", False), exit_on_error=True) as c:
repo = init_repository(c, config["repository"])
pkg_mgr = PackageManager(repo, console=c, installer=None, is_python3 = config.get("python3", False))
for path in sorted(pkg_mgr.list_items(data["package"], from_cache=True)):
c.output(path.split("/")[-1])
class ListParser(object):
def __init__(self, subparsers):
self.name = "list"
list_parser = subparsers.add_parser(self.name,
description="""Displays all versions of a certain package
or all content of the repository if package name is omitted""")
list_parser.add_argument("package", nargs="?", default="", help="Package Name")
def handle(self, config, data):
handle_(config, data)
| 49.545455
| 108
| 0.665138
| 501
| 0.459633
| 0
| 0
| 0
| 0
| 0
| 0
| 224
| 0.205505
|
7fd55e4cd2783cbb99a566e8a1ee6ac0b5a0d931
| 18,880
|
py
|
Python
|
library/oci_dhcp_options.py
|
slmjy/oci-ansible-modules
|
4713699064f4244b4554b5b2f97b5e5443fa2d6e
|
[
"Apache-2.0"
] | 106
|
2018-06-29T16:38:56.000Z
|
2022-02-16T16:38:56.000Z
|
library/oci_dhcp_options.py
|
slmjy/oci-ansible-modules
|
4713699064f4244b4554b5b2f97b5e5443fa2d6e
|
[
"Apache-2.0"
] | 122
|
2018-09-11T12:49:39.000Z
|
2021-05-01T04:54:22.000Z
|
library/oci_dhcp_options.py
|
slmjy/oci-ansible-modules
|
4713699064f4244b4554b5b2f97b5e5443fa2d6e
|
[
"Apache-2.0"
] | 78
|
2018-07-04T05:48:54.000Z
|
2022-03-09T06:33:12.000Z
|
#!/usr/bin/python
# Copyright (c) 2017, 2018, 2019, Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_dhcp_options
short_description: Create,update and delete OCI Dhcp Options
description:
- Creates OCI Dhcp Options
- Update OCI Dhcp Options, if present, with a new display name
- Update OCI Dhcp Options, if present, by appending new options to existing options
- Update OCI Dhcp Options, if present, by purging existing options and replacing them with
specified ones
- Delete OCI Dhcp Options, if present.
version_added: "2.5"
options:
compartment_id:
description: Identifier of the compartment under which this
Dhcp Options would be created. Mandatory for create
operation.Optional for delete and update. Mutually exclusive
with dhcp_id.
required: false
vcn_id:
description: Identifier of the Virtual Cloud Network to which the
Dhcp Options should be attached. Mandatory for create
operation. Optional for delete and update. Mutually exclusive
with dhcp_id.
required: false
dhcp_id:
description: Identifier of the Dhcp Options. Mandatory for delete and update.
required: false
aliases: ['id']
display_name:
description: Name of the Dhcp Options. A user friendly name. Does not have to be unique,
and could be changed. If not specified, a default name would be provided.
required: false
aliases: ['name']
options:
description: A set of DHCP options. Mandatory for create and update.
required: false
suboptions:
type:
description: The specific DHCP option.
required: true
choices: ['DomainNameServer', 'SearchDomain']
server_type:
description: Applicable only for the I(type='DomainNameServer').Describes the
type of the server.
required: true
choices: ['VcnLocalPlusInternet', 'CustomDnsServer']
custom_dns_servers:
description: Applicable only for the I(type='DomainNameServer') and I(server_type='CustomDnsServer').
Maximum three DNS server ips are allowed as part of this option.
required: false
search_domain_names:
description: Applicable only for the I(type='SearchDomain').A single search domain name
according to RFC 952 and RFC 1123. Do not include this option with an empty
list of search domain names, or with an empty string as the value for any search
domain name.
required: true
purge_dhcp_options:
description: Purge existing Dhcp Options which are not present in the provided
Dhcp Options. If I(purge_dhcp_options=no), provided options would be
appended to existing options. I(purge_dhcp_options) and I(delete_dhcp_options)
are mutually exclusive.
required: false
default: 'yes'
type: bool
delete_dhcp_options:
description: Delete existing Dhcp Options which are present in the Dhcp Options provided by
I(options). If I(delete_dhcp_options=yes), options provided by I(options) would be
deleted from existing options, if they are part of existing dhcp options.
If they are not part of existing dhcp options, they will be ignored.
I(delete_dhcp_options) and I(purge_dhcp_options) are mutually exclusive.
required: false
default: 'no'
type: bool
state:
description: Create,update or delete Dhcp Options. For I(state=present), if it
does not exist, it gets created. If it exists, it gets updated.
required: false
default: 'present'
choices: ['present','absent']
author:
- "Debayan Gupta(@debayan_gupta)"
extends_documentation_fragment: [ oracle, oracle_creatable_resource, oracle_wait_options, oracle_tags ]
"""
EXAMPLES = """
#Note: These examples do not set authentication details.
#Create/update Dhcp Options
- name: Create Dhcp options
oci_dhcp_options:
compartment_id: 'ocid1.compartment..xdsc'
name: 'ansible_dhcp_options'
vcn_id: 'ocid1.vcn..aaaa'
options:
- type: 'DomainNameServer'
server_type: 'VcnLocalPlusInternet'
custom_dns_servers: []
- type: 'SearchDomain'
search_domain_names: ['ansibletestvcn.oraclevcn.com']
freeform_tags:
region: 'east'
defined_tags:
features:
capacity: 'medium'
state: 'present'
# Update Dhcp Options by appending new options
- name: Update Dhcp Options by appending new options
oci_dhcp_options:
id: 'ocid1.dhcpoptions.oc1.aaa'
purge_dhcp_options: 'no'
options:
- type: 'DomainNameServer'
server_type: 'CustomDnsServer'
custom_dns_servers: ['10.0.0.8']
- type: 'SearchDomain'
search_domain_names: ['ansibletestvcn.oraclevcn.com']
state: 'present'
# Update Dhcp Options by purging existing options
- name: Update Dhcp Options by purging existing options
oci_dhcp_options:
dhcp_id: 'ocid1.dhcpoptions.oc1.aaa'
options:
- type: 'DomainNameServer'
server_type: 'CustomDnsServer'
custom_dns_servers: ['10.0.0.8', '10.0.0.10', '10.0.0.12']
- type: 'SearchDomain'
search_domain_names: ['ansibletestvcn.oraclevcn.com']
state: 'present'
# Update Dhcp Options by deleting existing options
- name: Update Dhcp Options by deleting existing options
oci_dhcp_options:
dhcp_id: 'ocid1.dhcpoptions.oc1.aaa'
options:
- type: 'DomainNameServer'
server_type: 'CustomDnsServer'
custom_dns_servers: ['10.0.0.8', '10.0.0.10', '10.0.0.12']
delete_dhcp_options: 'yes'
state: 'present'
#Delete Dhcp Options
- name: Delete Dhcp Options
oci_dhcp_options:
dhcp_id: 'ocid1.dhcpoptions..xdsc'
state: 'absent'
"""
RETURN = """
dhcp_options:
description: Attributes of the created/updated Dhcp Options.
For delete, deleted Dhcp Options description will
be returned.
returned: success
type: complex
contains:
compartment_id:
description: The identifier of the compartment containing the Dhcp Options
returned: always
type: string
sample: ocid1.compartment.oc1.xzvf..oifds
display_name:
description: Name assigned to the Dhcp Options during creation
returned: always
type: string
sample: ansible_dhcp_options
id:
description: Identifier of the Dhcp Options
returned: always
type: string
sample: ocid1.dhcpoptions.oc1.axdf
vcn_id:
description: Identifier of the Virtual Cloud Network to which the
Dhcp Options is attached.
returned: always
type: string
sample: ocid1.vcn..ixcd
lifecycle_state:
description: The current state of the Dhcp Options
returned: always
type: string
sample: AVAILABLE
options:
description: A list of dhcp options.
returned: always
type: list
sample: [{"custom_dns_servers": [],"server_type": "CustomDnsServer","type": "DomainNameServer"},
{"search_domain_names": ["myansiblevcn.oraclevcn.com"],"type": "SearchDomain"}]
time_created:
description: Date and time when the Dhcp Options was created, in
the format defined by RFC3339
returned: always
type: datetime
sample: 2016-08-25T21:10:29.600Z
sample: {
"compartment_id":"ocid1.compartment.oc1..xxxxxEXAMPLExxxxx",
"freeform_tags":{"region":"east"},
"defined_tags":{"features":{"capacity":"medium"}},
"display_name":"ansible_dhcp_options",
"id":"ocid1.dhcpoptions.oc1.phx.xxxxxEXAMPLExxxxx",
"lifecycle_state":"AVAILABLE",
"options":[
{
"custom_dns_servers":[],
"server_type":"VcnLocalPlusInternet",
"type":"DomainNameServer"
},
{
"search_domain_names":["ansibletestvcn.oraclevcn.com"],
"type":"SearchDomain"
},
{
"custom_dns_servers":["10.0.0.8"],
"server_type":"CustomDnsServer",
"type":"DomainNameServer"
}
],
"time_created":"2017-11-26T16:41:06.996000+00:00",
"vcn_id":"ocid1.vcn.oc1.phx.xxxxxEXAMPLExxxxx"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.oracle import oci_utils
try:
from oci.core import VirtualNetworkClient
from oci.exceptions import ServiceError, MaximumWaitTimeExceeded, ClientError
from oci.util import to_dict
from oci.core.models import (
CreateDhcpDetails,
DhcpDnsOption,
UpdateDhcpDetails,
DhcpSearchDomainOption,
)
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
def create_or_update_dhcp_options(virtual_network_client, module):
result = dict(changed=False, dhcp_options="")
dhcp_id = module.params.get("dhcp_id")
exclude_attributes = {"display_name": True}
try:
if dhcp_id:
existing_dhcp_options = oci_utils.get_existing_resource(
virtual_network_client.get_dhcp_options, module, dhcp_id=dhcp_id
)
result = update_dhcp_options(
virtual_network_client, existing_dhcp_options, module
)
else:
result = oci_utils.check_and_create_resource(
resource_type="dhcp_options",
create_fn=create_dhcp_options,
kwargs_create={
"virtual_network_client": virtual_network_client,
"module": module,
},
list_fn=virtual_network_client.list_dhcp_options,
kwargs_list={
"compartment_id": module.params.get("compartment_id"),
"vcn_id": module.params.get("vcn_id"),
},
module=module,
exclude_attributes=exclude_attributes,
model=CreateDhcpDetails(),
)
except ServiceError as ex:
module.fail_json(msg=ex.message)
except MaximumWaitTimeExceeded as ex:
module.fail_json(msg=str(ex))
except ClientError as ex:
module.fail_json(msg=ex.args[0])
return result
def create_dhcp_options(virtual_network_client, module):
options = get_options_objects(module.params["options"])
create_dhcp_details = CreateDhcpDetails()
for attribute in create_dhcp_details.attribute_map:
create_dhcp_details.__setattr__(attribute, module.params.get(attribute))
create_dhcp_details.options = options
result = oci_utils.create_and_wait(
resource_type="dhcp_options",
create_fn=virtual_network_client.create_dhcp_options,
kwargs_create={"create_dhcp_details": create_dhcp_details},
client=virtual_network_client,
get_fn=virtual_network_client.get_dhcp_options,
get_param="dhcp_id",
module=module,
)
return result
def update_dhcp_options(virtual_network_client, existing_dhcp_options, module):
if existing_dhcp_options is None:
raise ClientError(
Exception(
"No Dhcp Options with id "
+ module.params.get("dhcp_id")
+ " is found for update"
)
)
result = dict(dhcp_options=to_dict(existing_dhcp_options), changed=False)
name_tag_changed = False
options_changed = False
input_options = module.params.get("options")
update_dhcp_details = UpdateDhcpDetails()
existing_options = existing_dhcp_options.options
attributes_to_compare = ["display_name", "freeform_tags", "defined_tags"]
for attribute in attributes_to_compare:
name_tag_changed = oci_utils.check_and_update_attributes(
update_dhcp_details,
attribute,
module.params.get(attribute),
getattr(existing_dhcp_options, attribute),
name_tag_changed,
)
if input_options is not None:
if input_options:
options, options_changed = oci_utils.get_component_list_difference(
get_options_objects(input_options),
get_hashed_options(existing_options),
module.params.get("purge_dhcp_options"),
module.params.get("delete_dhcp_options"),
)
if options_changed:
update_dhcp_details.options = options
else:
update_dhcp_details.options = existing_options
if name_tag_changed or options_changed:
result = oci_utils.update_and_wait(
resource_type="dhcp_options",
update_fn=virtual_network_client.update_dhcp_options,
kwargs_update={
"dhcp_id": existing_dhcp_options.id,
"update_dhcp_details": update_dhcp_details,
},
client=virtual_network_client,
get_fn=virtual_network_client.get_dhcp_options,
get_param="dhcp_id",
module=module,
)
return result
def get_hashed_options(options):
hashed_options = []
if options is None:
return hashed_options
for option in options:
dhcp_option = None
if option.type == "DomainNameServer":
dhcp_option = oci_utils.create_hashed_instance(DhcpDnsOption)
dhcp_option.type = "DomainNameServer"
server_type = option.server_type
dhcp_option.server_type = server_type
if server_type == "CustomDnsServer":
dhcp_option.custom_dns_servers = option.custom_dns_servers
else:
dhcp_option.custom_dns_servers = []
elif option.type == "SearchDomain":
dhcp_option = oci_utils.create_hashed_instance(DhcpSearchDomainOption)
dhcp_option.type = "SearchDomain"
dhcp_option.search_domain_names = option.search_domain_names
hashed_options.append(dhcp_option)
return hashed_options
def get_options_objects(options):
dhcp_options = []
for option in options:
dhcp_option = None
if option["type"] == "DomainNameServer":
dhcp_option = oci_utils.create_hashed_instance(DhcpDnsOption)
dhcp_option.type = "DomainNameServer"
server_type = option["server_type"]
dhcp_option.server_type = server_type
if server_type == "CustomDnsServer":
dhcp_option.custom_dns_servers = option.get("custom_dns_servers", None)
else:
dhcp_option.custom_dns_servers = []
elif option["type"] == "SearchDomain":
dhcp_option = oci_utils.create_hashed_instance(DhcpSearchDomainOption)
dhcp_option.type = "SearchDomain"
search_domain_names = option["search_domain_names"]
if search_domain_names:
dhcp_option.search_domain_names = option["search_domain_names"]
else:
raise ClientError("search_domain_names field should not be empty")
dhcp_options.append(dhcp_option)
return dhcp_options
def delete_dhcp_options(virtual_network_client, module):
return oci_utils.delete_and_wait(
resource_type="dhcp_options",
client=virtual_network_client,
get_fn=virtual_network_client.get_dhcp_options,
kwargs_get={"dhcp_id": module.params["dhcp_id"]},
delete_fn=virtual_network_client.delete_dhcp_options,
kwargs_delete={"dhcp_id": module.params["dhcp_id"]},
module=module,
)
def main():
module_args = oci_utils.get_taggable_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
compartment_id=dict(type="str", required=False),
display_name=dict(type="str", required=False, aliases=["name"]),
vcn_id=dict(type="str", required=False),
dhcp_id=dict(type="str", required=False, aliases=["id"]),
state=dict(
type="str",
required=False,
default="present",
choices=["present", "absent"],
),
options=dict(type=list, required=False),
purge_dhcp_options=dict(type="bool", required=False, default=True),
delete_dhcp_options=dict(type="bool", required=False, default=False),
)
)
module = AnsibleModule(
argument_spec=module_args,
mutually_exclusive=[["purge_dhcp_options", "delete_dhcp_options"]],
)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module")
virtual_network_client = oci_utils.create_service_client(
module, VirtualNetworkClient
)
state = module.params["state"]
if state == "present":
result = create_or_update_dhcp_options(virtual_network_client, module)
elif state == "absent":
result = delete_dhcp_options(virtual_network_client, module)
module.exit_json(**result)
if __name__ == "__main__":
main()
| 39.251559
| 117
| 0.610911
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 10,867
| 0.575583
|
7fd5f598f64deef95bef0002d5fa94fb5341e2f5
| 4,066
|
py
|
Python
|
canistream.py
|
otakucode/canistream
|
42682b05eeaf98d6bd13125508c871a5cc5cb885
|
[
"MIT"
] | null | null | null |
canistream.py
|
otakucode/canistream
|
42682b05eeaf98d6bd13125508c871a5cc5cb885
|
[
"MIT"
] | null | null | null |
canistream.py
|
otakucode/canistream
|
42682b05eeaf98d6bd13125508c871a5cc5cb885
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
__author__ = 'otakucode'
import argparse
import sys
from urllib import parse
from bs4 import BeautifulSoup
import requests
def get_title(search_type, search_title):
search_encoded = parse.quote(search_title)
page = requests.get('http://www.canistream.it/search/{0}/{1}'.format(search_type, search_encoded),
headers={'referer' : 'http://www.canistream.it/',
'Content-Type' : 'application/x-www-form-urlencoded',
'User-Agent' : 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:37.0) Gecko/20100101 Firefox/37.0'})
soup = BeautifulSoup(page.content)
searchresult = soup.find(class_="search-result row")
if searchresult is None:
return None
movie_id = searchresult['rel']
proper_title = searchresult['data1']
return (movie_id, proper_title)
def query_availability(movie_id, availability_type):
results = requests.get('http://www.canistream.it/services/query',
headers={'referer' : 'http://www.canistream.it/',
'User-Agent' : 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:37.0) Gecko/20100101 Firefox/37.0'},
params={'movieId' : movie_id,
'attributes' : 1,
'mediaType' : availability_type})
return results.json()
def get_availability(movie_id, verbose, omits):
all_queries = ['Streaming', 'Rental', 'Purchase', 'xfinity']
queries = []
for query in all_queries:
if omits is None or query not in omits:
queries.append(query)
availability = ''
firstone = True
for q in queries:
result = query_availability(movie_id, q.lower())
if result:
if verbose:
availability += "\n" + q + ": "
else:
if firstone == True:
firstone = False
else:
availability += ', '
services = []
for key in result.keys():
services.append(result[key]['friendlyName'])
if key == 'apple_itunes_purchase':
# Fix bug in canistream.it which lists wrong friendlyName for iTunes purchases
services[-1] = 'Apple iTunes Purchase'
if result[key]['price'] != 0:
services[-1] += ' (${0})'.format(result[key]['price'])
availability += ', '.join(services)
return availability
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Search www.canistream.it for movie availability.')
parser.add_argument('movie', metavar='Title', type=str, help='title to search for')
#parser.add_argument('--tv', help='search for TV show instead of movie')
parser.add_argument('--verbose', '-v', action='store_true')
parser.add_argument('--no-streaming', action='append_const', const='streaming', dest='omits', help='do not search for streaming availability')
parser.add_argument('--no-rental', action='append_const', const='rental', dest='omits', help='do not search for rental availability')
parser.add_argument('--no-purchase', action='append_const', const='purchase', dest='omits', help='do not search for purchase availability')
parser.add_argument('--no-xfinity', action='append_const', const='xfinity', dest='omits', help='do not search for xfinity availability')
args = parser.parse_args()
print("Searching...", end='')
sys.stdout.flush()
movie = get_title('movie', args.movie)
if movie is None:
print("\rNo titles matching '{0}' found.".format(args.movie))
sys.exit()
(movie_id, proper_title) = movie
results = get_availability(movie_id, args.verbose, args.omits)
if len(results) == 0:
print('\r"{0}" is not currently available.'.format(proper_title))
else:
print('\r"{0}" is available via: '.format(proper_title), end='')
print(results)
| 37.302752
| 146
| 0.599361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,357
| 0.333743
|
7fd64d5d9687aeafd41778f375063551f567e46f
| 67
|
py
|
Python
|
homeassistant/components/hardware/const.py
|
liangleslie/core
|
cc807b4d597daaaadc92df4a93c6e30da4f570c6
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
homeassistant/components/hardware/const.py
|
liangleslie/core
|
cc807b4d597daaaadc92df4a93c6e30da4f570c6
|
[
"Apache-2.0"
] | 24,710
|
2016-04-13T08:27:26.000Z
|
2020-03-02T12:59:13.000Z
|
homeassistant/components/hardware/const.py
|
liangleslie/core
|
cc807b4d597daaaadc92df4a93c6e30da4f570c6
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""Constants for the Hardware integration."""
DOMAIN = "hardware"
| 16.75
| 45
| 0.716418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 55
| 0.820896
|
7fd65ccdc6806c6f41c85d9bb13f95232c26ec00
| 2,646
|
py
|
Python
|
packages/_debug_app/app.py
|
shellyln/red-agate
|
71847872caded631b4783f3baaf5a3e2a0b495a0
|
[
"0BSD"
] | 14
|
2017-12-03T15:57:17.000Z
|
2021-07-11T12:57:24.000Z
|
packages/_debug_app/app.py
|
shellyln/red-agate
|
71847872caded631b4783f3baaf5a3e2a0b495a0
|
[
"0BSD"
] | 10
|
2020-02-25T08:20:38.000Z
|
2020-09-03T22:00:18.000Z
|
packages/_debug_app/app.py
|
shellyln/red-agate
|
71847872caded631b4783f3baaf5a3e2a0b495a0
|
[
"0BSD"
] | 4
|
2018-03-30T16:09:44.000Z
|
2022-01-03T19:26:16.000Z
|
#!/usr/bin/env python3
import json
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../red-agate/')
# pylint: disable=import-error, wrong-import-position
from redagate_lambda import call, LambdaInternalErrorException
# pylint: enable=import-error, wrong-import-position
if __name__ == '__main__':
from flask import Flask, abort, Response
app = Flask(__name__)
@app.errorhandler(LambdaInternalErrorException)
def internal_error_handler(e):
return 'Internal Server Error', 500
@app.route('/')
def run_report():
with open('./src/reports/barcode.data.json') as f:
event = json.loads(f.read())
event['eventName'] = '/'
return call(command=["node", "dist/app.js"], event=event)
@app.route('/pdf')
def run_pdf_report():
with open('./src/reports/barcode.data.json') as f:
event = json.loads(f.read())
event['eventName'] = '/pdf'
return Response(response=call(command=["node", "dist/app.js"], event=event), mimetype="application/pdf")
@app.route('/billing')
def run_billing_report():
with open('./src/reports/barcode.data.json') as f:
event = json.loads(f.read())
event['eventName'] = '/billing'
return call(command=["node", "dist/app.js"], event=event)
@app.route('/kanban')
def run_kanban_report():
with open('./src/reports/barcode.data.json') as f:
event = json.loads(f.read())
event['eventName'] = '/kanban'
return call(command=["node", "dist/app.js"], event=event)
@app.route('/fba-a4')
def run_fba_report():
with open('./src/reports/barcode.data.json') as f:
event = json.loads(f.read())
event['eventName'] = '/fba-a4'
return call(command=["node", "dist/app.js"], event=event)
@app.route('/barcode')
def run_barcode_test_report():
with open('./src/reports/barcode.data.json') as f:
event = json.loads(f.read())
event['eventName'] = '/barcode-test'
return call(command=["node", "dist/app.js"], event=event)
port = int(os.environ['PORT']) if os.environ.get('PORT') is not None else None
# To debug with VSCode, set debug=True, use_debugger=False, use_reloader=False.
# debug - whether to enable debug mode and catch exceptions.
# use_debugger - whether to use the internal Flask debugger.
# use_reloader - whether to reload and fork the process on exception.
app.run(debug=True, use_debugger=False, use_reloader=False, port=port)
| 37.8
| 116
| 0.620937
| 0
| 0
| 0
| 0
| 1,736
| 0.656085
| 0
| 0
| 963
| 0.363946
|
7fd69c3d5f382287835cb80d361531b2ea2f11db
| 1,290
|
py
|
Python
|
cmsplugin_cascade/migrations/0009_cascadepage.py
|
teklager/djangocms-cascade
|
adc461f7054c6c0f88bc732aefd03b157df2f514
|
[
"MIT"
] | 139
|
2015-01-08T22:27:06.000Z
|
2021-08-19T03:36:58.000Z
|
cmsplugin_cascade/migrations/0009_cascadepage.py
|
teklager/djangocms-cascade
|
adc461f7054c6c0f88bc732aefd03b157df2f514
|
[
"MIT"
] | 286
|
2015-01-02T14:15:14.000Z
|
2022-03-22T11:00:12.000Z
|
cmsplugin_cascade/migrations/0009_cascadepage.py
|
teklager/djangocms-cascade
|
adc461f7054c6c0f88bc732aefd03b157df2f514
|
[
"MIT"
] | 91
|
2015-01-16T15:06:23.000Z
|
2022-03-23T23:36:54.000Z
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cms', '0013_urlconfrevision'),
('cmsplugin_cascade', '0008_sortableinlinecascadeelement'),
]
operations = [
migrations.CreateModel(
name='CascadePage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('settings', models.JSONField(blank=True, default={}, help_text='User editable settings for this page.')),
('glossary', models.JSONField(blank=True, default={}, help_text='Store for arbitrary page data.')),
('extended_object', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='cms.Page')),
('public_extension', models.OneToOneField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='draft_extension', to='cmsplugin_cascade.CascadePage')),
],
options={
'db_table': 'cmsplugin_cascade_page',
'verbose_name': 'Cascade Page Settings',
'verbose_name_plural': 'Cascade Page Settings',
},
),
]
| 44.482759
| 199
| 0.628682
| 1,213
| 0.94031
| 0
| 0
| 0
| 0
| 0
| 0
| 401
| 0.310853
|
7fd7f3a7ab836b1162a754535f994bd325636a89
| 1,679
|
py
|
Python
|
es_import_poet.py
|
ly3too/chinese-poetry
|
47362e5b7bf3976c986765eb8eb9b82e771e0771
|
[
"MIT"
] | null | null | null |
es_import_poet.py
|
ly3too/chinese-poetry
|
47362e5b7bf3976c986765eb8eb9b82e771e0771
|
[
"MIT"
] | null | null | null |
es_import_poet.py
|
ly3too/chinese-poetry
|
47362e5b7bf3976c986765eb8eb9b82e771e0771
|
[
"MIT"
] | 1
|
2020-11-27T06:49:33.000Z
|
2020-11-27T06:49:33.000Z
|
from elasticsearch_dsl import *
import os
from glob import glob
import json
import re
from . import to_zh_cn
class Poet(Document):
dynasty = Text()
author = Text()
title = Text(analyzer='jieba_index', search_analyzer='jieba_search')
paragraphs = Text(analyzer='jieba_index', search_analyzer='jieba_search')
class Index:
name = 'poetry_shi'
settings = {
"number_of_shards": 3,
"number_of_replicas": 1
}
class Author(Document):
name = Text()
desc = Text(analyzer='jieba_index', search_analyzer='jieba_search')
class Index:
name = 'author'
settings = {
"number_of_shards": 3,
"number_of_replicas": 1
}
def do_es_import():
"""
import data from current dir
"""
Poet.init()
Author.init()
patt = re.compile(r'^[a-zA-Z]+\.([a-zA-Z]+)\.')
cur_dir = os.path.dirname((os.path.abspath(__file__)))
data_files = glob("{}/json/poet.*.json".format(cur_dir))
for file in data_files:
with open(file, 'r') as f:
data = json.load(f)
dynasty = patt.findall(os.path.basename(file))[0]
for item in data:
item["dynasty"] = dynasty
one = Poet(**to_zh_cn(item))
one.save()
data_files = glob("{}/json/authors.*.json".format(cur_dir))
for file in data_files:
with open(file, 'r') as f:
data = json.load(f)
dynasty = patt.findall(os.path.basename(file))[0]
for item in data:
item["dynasty"] = dynasty
one = Author(**to_zh_cn(item))
one.save()
| 27.080645
| 77
| 0.561048
| 618
| 0.368076
| 0
| 0
| 0
| 0
| 0
| 0
| 318
| 0.189398
|
7fd89c7d4555eeef4b73bc37f963bc2cf833445b
| 5,785
|
py
|
Python
|
pyenvgui/gui/components/_version_management.py
|
ulacioh/pyenv-gui
|
03c3b102d78b474f103f7e828533a684f3e87ff6
|
[
"BSD-3-Clause"
] | 2
|
2020-05-18T04:37:37.000Z
|
2020-06-01T03:33:48.000Z
|
pyenvgui/gui/components/_version_management.py
|
ulacioh/pyenv-manager
|
03c3b102d78b474f103f7e828533a684f3e87ff6
|
[
"BSD-3-Clause"
] | null | null | null |
pyenvgui/gui/components/_version_management.py
|
ulacioh/pyenv-manager
|
03c3b102d78b474f103f7e828533a684f3e87ff6
|
[
"BSD-3-Clause"
] | null | null | null |
from threading import Thread
import tkinter as tk
from tkinter import ttk, messagebox
from . import pyenv
from ._custom_widgets import Treeview
class VersionManagementFrame(ttk.Frame):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Installed versions options
self.installed_options = ttk.LabelFrame(
self, text='Installed versions'
)
self.tree = Treeview(self.installed_options)
self.tree.grid(column=0, row=0, columnspan=2)
self.tree.insert_items(
list(pyenv.installed) + ['system'],
highlight=pyenv.global_version[0]
)
ttk.Button(
self.installed_options,
text='Set as global',
command=lambda: self._set_as_global(
self.tree.selected_item
)
).grid(column=0, row=1, sticky="nsew")
ttk.Button(
self.installed_options, text='Uninstall',
command=lambda: self._uninstall(
self.tree.selected_item
)
).grid(column=1, row=1, sticky="nsew")
self.installed_options.grid(column=0, row=0)
# Install a new version options
self.install_options = ttk.LabelFrame(
self, text='Install a new version'
)
self.versions_combo = ttk.Combobox(
self.install_options, state="readonly"
)
self.versions_combo.set(' Select a version...')
self.versions_combo['values'] = pyenv.available
self.versions_combo.grid(column=0, row=1, sticky="nsew", padx=5)
self.install_button = ttk.Button(
self.install_options,
text='Install',
command=lambda: Thread(
target=self._start_installation,
args=(self.versions_combo.get(),)
).start()
)
self.install_button.grid(column=1, row=1, sticky="nsew", padx=5)
tk.Label(
self.install_options, text='Logs:'
).grid(column=0, row=2, pady=(5, 1))
self.terminal_output = tk.Text(
self.install_options,
font=("Helvetica", 8),
height=17,
width=70
)
self.terminal_output.grid(column=0, row=3, columnspan=2)
self.terminal_output.grid_propagate(False)
self.install_options.grid(column=1, row=0, sticky='ns')
def _set_as_global(self, version):
version = version['text'][2:]
if version and version != pyenv.global_version[0]:
if pyenv.global_version[0] is not version:
if version == 'system':
del pyenv.global_version
else:
pyenv.global_version = [version]
self.tree.clean()
self.tree.insert_items(
list(pyenv.installed) + ['system'],
highlight=version
)
def _uninstall(self, version):
version = version['text'][2:]
if version:
if version == 'system':
messagebox.showerror(
title='Error',
message='You can not uninstall the system version from here.'
)
else:
op = messagebox.askquestion(
title='Uninstall',
message=f'Are you sure you want to uninstall version {version}?'
)
if op == 'yes':
pyenv.uninstall(version)
if version == pyenv.global_version[0]:
del pyenv.global_version
self.tree.clean()
self.tree.insert_items(
list(pyenv.installed) + ['system'],
highlight=pyenv.global_version[0]
)
messagebox.showinfo(
title='Uninstall',
message=f'{version} version has been uninstalled.'
)
def _start_installation(self, version):
if version and version != ' Select a version...':
self.install_button.config(state='disable')
ps = None
if version in pyenv.installed:
op = messagebox.askquestion(
title='Install',
message=f'Version {version} already exists.\nDo you want to continue?'
)
if op == 'yes':
ps = pyenv.install(version, verbose=True, force=True)
else:
ps = pyenv.install(version, verbose=True)
if ps:
self.terminal_output.delete('1.0', tk.END)
while ps.poll() == None:
output = ps.stdout.readline().decode()
if output:
self.terminal_output.insert(tk.END, output)
self.terminal_output.see(tk.END)
if ps.returncode:
messagebox.showerror(
title='Install',
message='Something went wrong during the installation.'
)
else:
messagebox.showinfo(
title='Install',
message=f'Python {version} was installed successfully.'
)
self.tree.clean()
self.tree.insert_items(
list(pyenv.installed) + ['system'],
highlight=pyenv.global_version[0]
)
self.install_button.config(state='normal')
| 33.247126
| 94
| 0.498876
| 5,636
| 0.974244
| 0
| 0
| 0
| 0
| 0
| 0
| 700
| 0.121003
|
7fd923442eb40472eb4d67dc3ec8b44e90f1f18e
| 636
|
py
|
Python
|
Week11/765.py
|
bobsingh149/LeetCode
|
293ed4931960bf5b9a3d5c4331ba4dfddccfcd55
|
[
"MIT"
] | 101
|
2021-02-26T14:32:37.000Z
|
2022-03-16T18:46:37.000Z
|
Week11/765.py
|
bobsingh149/LeetCode
|
293ed4931960bf5b9a3d5c4331ba4dfddccfcd55
|
[
"MIT"
] | null | null | null |
Week11/765.py
|
bobsingh149/LeetCode
|
293ed4931960bf5b9a3d5c4331ba4dfddccfcd55
|
[
"MIT"
] | 30
|
2021-03-09T05:16:48.000Z
|
2022-03-16T21:16:33.000Z
|
class Solution:
def minSwapsCouples(self, row: List[int]) -> int:
parent=[i for i in range(len(row))]
for i in range(1,len(row),2):
parent[i]-=1
def findpath(u,parent):
if parent[u]!=u:
parent[u]=findpath(parent[u],parent)
return parent[u]
for i in range(0,len(row),2):
u_parent=findpath(row[i],parent)
v_parent=findpath(row[i+1],parent)
parent[u_parent]=v_parent
return (len(row)//2)-sum([1 for i in range(0,len(row),2) if parent[i]==parent[i+1]==i])
| 33.473684
| 96
| 0.496855
| 626
| 0.984277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
7fda7ecbf9da0226a54341ecb40e210f62c31957
| 1,951
|
py
|
Python
|
proj/python/Test/dictStock.py
|
jumib/BlackTensor
|
d66a4fb5289dbe86104900072284e4a881f55645
|
[
"MIT"
] | null | null | null |
proj/python/Test/dictStock.py
|
jumib/BlackTensor
|
d66a4fb5289dbe86104900072284e4a881f55645
|
[
"MIT"
] | null | null | null |
proj/python/Test/dictStock.py
|
jumib/BlackTensor
|
d66a4fb5289dbe86104900072284e4a881f55645
|
[
"MIT"
] | null | null | null |
import requests
# host = 'localhost:8080'
# path = '/member/changeAppId'
# payload = {'UserId' : userId }
# r = requests.get('localhost:8080/member/changeAppId', params=payload)
# import requests
# import json
#
# # GET
# res = requests.get('http://localhost:8080/member/changeAppId')
# print(str(res.status_code) + " | " + res.text)
#
# # POST (JSON)
# headers = {'Content-Type': 'application/json; chearset=utf-8'}
# payload = {'UserId' : 'userId' }
# res = requests.post('http://localhost:8080/member/changeAppId', payload=json.dumps(payload), headers=headers)
# print(str(res.status_code) + " | " + res.text)
#
# class DictStock:
# @app.route('/history/buy')
# def PythonServerResponse(self, itemName, m_date, openPrice, highPrice, lowPrice, currentPrice, volumn, tradingValue):
# print("It's operate")
# # self.myViewController = vc.ViewController()
# json_object = {
# "name": itemName,
# "일자": m_date,
# "시가": openPrice,
# "고가": highPrice,
# "저가": lowPrice,
# "현재가": currentPrice,
# "거래량": volumn,
# "거래대금": tradingValue
# }
# json_string = json.dumps(json_object)
# print(json_string)
# # return jsonify(json_object)
#
# app.run()
# # # data = {
# # # # 'itemName' : itemName,
# # # 'date' : m_date,
# # # 'openPrice' : openPrice
# # # }
# # # json_data = json.dumps(data)
# # # print(json_data)
# #
# #
# # # import json
# # #
# # # json_object = {
# # # "id": 1,
# # # "username": "Bret",
# # # "email": "Sincere@april.biz",
# # # "address": {
# # # "street": "Kulas Light",
# # # "suite": "Apt. 556",
# # # "city": "Gwenborough",
# # # "zipcode": "92998-3874"
# # # },
# # # "admin": False,
# # # "hobbies": None
# # # }
# # #
# # # json_string = json.dumps(json_object)
# # # print(json_string)
| 26.364865
| 123
| 0.538186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,899
| 0.955712
|
7fdaa5ddf18fe9267f5687f8511f00b862b0cdb0
| 1,549
|
py
|
Python
|
screen_scan.py
|
vjeranc/puzpirobot
|
3d6d0014cbd3092add56295aa463e3b31b750733
|
[
"MIT"
] | null | null | null |
screen_scan.py
|
vjeranc/puzpirobot
|
3d6d0014cbd3092add56295aa463e3b31b750733
|
[
"MIT"
] | null | null | null |
screen_scan.py
|
vjeranc/puzpirobot
|
3d6d0014cbd3092add56295aa463e3b31b750733
|
[
"MIT"
] | null | null | null |
from PIL import ImageGrab, Image
import cv2 as cv
import numpy as np
import match.template_matching as tm
import match.bilging as b
from mss import mss
def grab_screen():
img_src = ImageGrab.grab()
return cv.cvtColor(np.array(img_src.convert('RGB')), cv.COLOR_RGB2BGR)
class ScreenGrabber(object):
def grab(self):
return grab_screen()
class ScreenshotGrabber(object):
def grab(self):
return cv.imread('screenshot.png')
class FastScreenGrabber(object):
def grab(self):
with mss() as sct:
monitor = sct.monitors[1]
sct_img = sct.grab(monitor)
# Convert to PIL/Pillow Image
return cv.cvtColor(np.array(Image.frombytes('RGB', sct_img.size, sct_img.bgra, 'raw', 'BGRX').convert('RGB')), cv.COLOR_RGB2BGR)
paths = [("A", './images/whiteblue_square.png', True, 0.9),
("B", './images/greenblue_diamond.png', True, 0.9),
("C", './images/lightblue_circle.png', True, 0.9),
("D", './images/lightyellow_circle.png', True, 0.9),
("E", './images/darkblue_square.png', True, 0.9),
("F", './images/lightblue_square.png', True, 0.9),
("G", './images/lightblue_diamond.png', True, 0.9),
("X", './images/puffer.png', False, 0.5),
("Y", './images/crab.png', False, 0.5),
("Z", './images/jellyfish.png', False, 0.5)]
patterns = [tm.build_pattern(p, n, shape=(45, 45), circle_mask=c, threshold=t)
for n, p, c, t in paths]
b.track_board_state(ScreenshotGrabber(), patterns)
| 32.270833
| 140
| 0.615881
| 512
| 0.330536
| 0
| 0
| 0
| 0
| 0
| 0
| 385
| 0.248547
|
7fdaeb9d10001a9b68a81dc49605856be1d46461
| 1,917
|
py
|
Python
|
simulacoes/queue-sim/src/eventoSaidaFilaZero.py
|
paulosell/ADS29009
|
a85bc0fe19993e3e6624c2605a362605b67c2311
|
[
"MIT"
] | null | null | null |
simulacoes/queue-sim/src/eventoSaidaFilaZero.py
|
paulosell/ADS29009
|
a85bc0fe19993e3e6624c2605a362605b67c2311
|
[
"MIT"
] | null | null | null |
simulacoes/queue-sim/src/eventoSaidaFilaZero.py
|
paulosell/ADS29009
|
a85bc0fe19993e3e6624c2605a362605b67c2311
|
[
"MIT"
] | null | null | null |
from src.event import Event
from src.rng import prng
from src.eventoChegadaFilaUm import EventoChegadaFilaUm
from src.eventoChegadaFilaDois import EventoChegadaFilaDois
class EventoSaidaFilaZero(Event):
def __init__(self,t,i):
super().__init__(t,i)
def processEvent(self, simulador):
num = simulador.u.ulcm()
if num <= 0.5:
rdn = simulador.chegada_um.exp()
chegadaFilaUm = EventoChegadaFilaUm(simulador.simtime,self.id)
simulador.scheduleEvent(chegadaFilaUm)
simulador.eventos1 = simulador.eventos1 + 1
elif num > 0.5 and num <= 0.8:
rdn = simulador.chegada_dois.exp()
chegadaFilaDois = EventoChegadaFilaDois(simulador.simtime,self.id)
simulador.scheduleEvent(chegadaFilaDois)
simulador.eventos2 = simulador.eventos2 + 1
else:
pass
if(len(simulador.queue_zero) > 0):
e = simulador.queue_zero.pop(0)
time = simulador.simtime+simulador.servico_zero.exp()
saida = EventoSaidaFilaZero(time,e.id)
simulador.scheduleEvent(saida)
simulador.fila_soma.append(time-e.time)
else:
simulador.server_zero = False
simulador.eventos = simulador.eventos+1
from src.eventoChegadaFilaZero import EventoChegadaFilaZero
rdn = simulador.chegada_zero.exp()
chegadaFilaZero = EventoChegadaFilaZero(simulador.simtime+rdn,simulador.eventos)
simulador.scheduleEvent(chegadaFilaZero)
"""
for ids in simulador.fila_tempos_zero:
if ids[0] == self.id:
dif = self.time - ids[1]
print(dif)
simulador.fila_soma.append(dif) """
| 38.34
| 111
| 0.586333
| 1,516
| 0.790819
| 0
| 0
| 0
| 0
| 0
| 0
| 206
| 0.10746
|
7fdbabe114f7c62834574d41bba0f9f62e53ca0f
| 242
|
py
|
Python
|
examples/camera/simple.py
|
Hikki12/remio
|
17942bffe3c0619d3435b1a12399b116d4c800e3
|
[
"Apache-2.0"
] | null | null | null |
examples/camera/simple.py
|
Hikki12/remio
|
17942bffe3c0619d3435b1a12399b116d4c800e3
|
[
"Apache-2.0"
] | null | null | null |
examples/camera/simple.py
|
Hikki12/remio
|
17942bffe3c0619d3435b1a12399b116d4c800e3
|
[
"Apache-2.0"
] | null | null | null |
"""Single simple camera example."""
import time
from remio import Camera
# Initialize Single Camera device
camera = Camera(name="webcam", src=0, size=[400, 400])
camera.start()
while True:
print("Doing some tasks...")
time.sleep(2)
| 20.166667
| 54
| 0.698347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 97
| 0.400826
|
7fdecf5212432030558339550547b97267095dde
| 1,481
|
py
|
Python
|
write_cluster_wrappers.py
|
jrbourbeau/cr-composition
|
e9efb4b713492aaf544b5dd8bb67280d4f108056
|
[
"MIT"
] | null | null | null |
write_cluster_wrappers.py
|
jrbourbeau/cr-composition
|
e9efb4b713492aaf544b5dd8bb67280d4f108056
|
[
"MIT"
] | 7
|
2017-08-29T16:20:04.000Z
|
2018-06-12T16:58:36.000Z
|
write_cluster_wrappers.py
|
jrbourbeau/cr-composition
|
e9efb4b713492aaf544b5dd8bb67280d4f108056
|
[
"MIT"
] | 1
|
2018-04-03T20:56:40.000Z
|
2018-04-03T20:56:40.000Z
|
#!/usr/bin/env python
from __future__ import print_function
import os
import stat
import comptools as comp
here = os.path.abspath(os.path.dirname(__file__))
wrapper_path = os.path.join(here, 'wrapper.sh')
wrapper_virtualenv_path = os.path.join(here, 'wrapper_virtualenv.sh')
wrapper = """#!/bin/bash -e
eval `/cvmfs/icecube.opensciencegrid.org/py2-v3/setup.sh`
{icecube_env_script} \\
{wrapper_virtualenv_path} \\
python $*
"""
virtualenv_wrapper = """#!/bin/sh
source {virtualenv_activate}
$@
"""
icecube_env_script = os.path.join(comp.paths.metaproject,
'build',
'env-shell.sh')
virtualenv_activate = os.path.join(comp.paths.virtualenv_dir,
'bin',
'activate')
print('Writing wrapper script {}...'.format(wrapper_path))
with open(wrapper_path, 'w') as f:
lines = wrapper.format(icecube_env_script=icecube_env_script,
wrapper_virtualenv_path=wrapper_virtualenv_path)
f.write(lines)
print('Writing wrapper script {}...'.format(wrapper_virtualenv_path))
with open(wrapper_virtualenv_path, 'w') as f:
lines = virtualenv_wrapper.format(virtualenv_activate=virtualenv_activate)
f.write(lines)
def make_executable(path):
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
make_executable(wrapper_path)
make_executable(wrapper_virtualenv_path)
| 27.425926
| 78
| 0.673194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 352
| 0.237677
|
7fdf5c3be33fa3dac4e441a667a56bd88641def7
| 3,899
|
py
|
Python
|
labml_nn/gan/dcgan/__init__.py
|
BioGeek/annotated_deep_learning_paper_implementations
|
e2516cc3063cdfdf11cda05f22a10082297aa33e
|
[
"MIT"
] | 1
|
2021-09-17T18:16:17.000Z
|
2021-09-17T18:16:17.000Z
|
labml_nn/gan/dcgan/__init__.py
|
BioGeek/annotated_deep_learning_paper_implementations
|
e2516cc3063cdfdf11cda05f22a10082297aa33e
|
[
"MIT"
] | null | null | null |
labml_nn/gan/dcgan/__init__.py
|
BioGeek/annotated_deep_learning_paper_implementations
|
e2516cc3063cdfdf11cda05f22a10082297aa33e
|
[
"MIT"
] | null | null | null |
"""
---
title: Deep Convolutional Generative Adversarial Networks (DCGAN)
summary: A simple PyTorch implementation/tutorial of Deep Convolutional Generative Adversarial Networks (DCGAN).
---
# Deep Convolutional Generative Adversarial Networks (DCGAN)
This is a [PyTorch](https://pytorch.org) implementation of paper
[Unsupervised Representation Learning with Deep Convolutional Generative Adversarial Networks](https://papers.labml.ai/paper/1511.06434).
This implementation is based on the [PyTorch DCGAN Tutorial](https://pytorch.org/tutorials/beginner/dcgan_faces_tutorial.html).
"""
import torch.nn as nn
from labml import experiment
from labml.configs import calculate
from labml_helpers.module import Module
from labml_nn.gan.original.experiment import Configs
class Generator(Module):
"""
### Convolutional Generator Network
This is similar to the de-convolutional network used for CelebA faces,
but modified for MNIST images.
<img src="https://pytorch.org/tutorials/_images/dcgan_generator.png" style="max-width:90%" />
"""
def __init__(self):
super().__init__()
# The input is $1 \times 1$ with 100 channels
self.layers = nn.Sequential(
# This gives $3 \times 3$ output
nn.ConvTranspose2d(100, 1024, 3, 1, 0, bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(True),
# This gives $7 \times 7$
nn.ConvTranspose2d(1024, 512, 3, 2, 0, bias=False),
nn.BatchNorm2d(512),
nn.ReLU(True),
# This gives $14 \times 14$
nn.ConvTranspose2d(512, 256, 4, 2, 1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(True),
# This gives $28 \times 28$
nn.ConvTranspose2d(256, 1, 4, 2, 1, bias=False),
nn.Tanh()
)
self.apply(_weights_init)
def forward(self, x):
# Change from shape `[batch_size, 100]` to `[batch_size, 100, 1, 1]`
x = x.unsqueeze(-1).unsqueeze(-1)
x = self.layers(x)
return x
class Discriminator(Module):
"""
### Convolutional Discriminator Network
"""
def __init__(self):
super().__init__()
# The input is $28 \times 28$ with one channel
self.layers = nn.Sequential(
# This gives $14 \times 14$
nn.Conv2d(1, 256, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# This gives $7 \times 7$
nn.Conv2d(256, 512, 4, 2, 1, bias=False),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.2, inplace=True),
# This gives $3 \times 3$
nn.Conv2d(512, 1024, 3, 2, 0, bias=False),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.2, inplace=True),
# This gives $1 \times 1$
nn.Conv2d(1024, 1, 3, 1, 0, bias=False),
)
self.apply(_weights_init)
def forward(self, x):
x = self.layers(x)
return x.view(x.shape[0], -1)
def _weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight.data, 1.0, 0.02)
nn.init.constant_(m.bias.data, 0)
# We import the [simple gan experiment](../original/experiment.html) and change the
# generator and discriminator networks
calculate(Configs.generator, 'cnn', lambda c: Generator().to(c.device))
calculate(Configs.discriminator, 'cnn', lambda c: Discriminator().to(c.device))
def main():
conf = Configs()
experiment.create(name='mnist_dcgan')
experiment.configs(conf,
{'discriminator': 'cnn',
'generator': 'cnn',
'label_smoothing': 0.01})
with experiment.start():
conf.run()
if __name__ == '__main__':
main()
| 32.22314
| 137
| 0.606566
| 2,224
| 0.570403
| 0
| 0
| 0
| 0
| 0
| 0
| 1,501
| 0.384971
|
7fdfdb7709a1fc9542844590029ad949b49fd04c
| 2,846
|
py
|
Python
|
src/flexible_models/flexible_GPT2.py
|
AlexDuvalinho/AITextGenerator
|
02b96e40612209b5f7599674f9cd0e867f74fc59
|
[
"MIT"
] | 36
|
2021-01-13T03:33:49.000Z
|
2022-03-31T00:37:16.000Z
|
src/flexible_models/flexible_GPT2.py
|
bytjn1416124/AITextGenerator
|
01ff72ebf373018d0d708cdd2018229fd386f73b
|
[
"MIT"
] | 5
|
2021-03-08T15:51:30.000Z
|
2021-08-16T11:56:56.000Z
|
src/flexible_models/flexible_GPT2.py
|
bytjn1416124/AITextGenerator
|
01ff72ebf373018d0d708cdd2018229fd386f73b
|
[
"MIT"
] | 16
|
2021-02-20T05:04:47.000Z
|
2022-03-22T01:56:27.000Z
|
from .flexible_model import FlexibleModel
from src.utils import GPT2_BLOCK_SIZE
import torch
from src.flexible_models.GPT2_lm_segment_model import GPT2LMSegmentModel
class FlexibleGPT2(FlexibleModel):
"""
A FlexibleGPT2 model is simply the combination of a huggingface gpt2 transformers model and
a decoding strategy
"""
def __init__(self, model, tokenizer, decoding_strategy):
"""
Initializes a GPT2 model.
:param model: huggingface gpt2 transformers
:param tokenizer: huggingface gpt2 tokenizers
:param decoding_strategy: dict of parameters for huggingface transformers.generate methods
"""
super().__init__()
self.model = model
self.model.eval()
if torch.cuda.is_available():
model.cuda()
self.tokenizer = tokenizer
tokenizer.pad_token = tokenizer.eos_token
self.decoding_strategy = decoding_strategy
self.max_length = GPT2_BLOCK_SIZE
self.min_length = 0
self.set_decoding_strategy(decoding_strategy)
if isinstance(self.model, GPT2LMSegmentModel):
self.model.set_special_tokens(self.tokenizer)
def set_decoding_strategy(self, decoding_strategy):
self.decoding_strategy = decoding_strategy
self.max_length = decoding_strategy[
'max_length'] if 'max_length' in decoding_strategy.keys() else GPT2_BLOCK_SIZE
self.min_length = decoding_strategy['min_length'] if 'min_length' in decoding_strategy.keys() else 0
def predict(self, input_ids, nb_samples=1):
"""
Performs GPT-2 generation on strings of any length.
:param input_ids: torch.tensor of shape (batch_size, max_length)
:param nb_samples: nb_sample to generate for each input example
:return: list of strings of len batch_size * nb_samples
"""
# If inputs_ids consist of a single example, we create from it a batch of 1 example
if len(input_ids.shape) == 1:
input_ids = input_ids.view(1, -1)
# We use a mask so that GPT2 does not take into account the PAD token during generation time
mask = (input_ids != self.tokenizer.pad_token_id).long()
self.decoding_strategy['max_length'] = self.max_length
self.decoding_strategy['min_length'] = 10
if torch.cuda.is_available():
input_ids = input_ids.cuda()
mask = mask.cuda()
outputs_id = self.model.generate(input_ids=input_ids,
pad_token_id=self.tokenizer.pad_token_id,
eos_token_id=self.tokenizer.eos_token_id,
attention_mask=mask,
num_return_sequences=nb_samples,
**self.decoding_strategy)
outputs_id = outputs_id.detach().cpu()
# only keep the token corresponding to the generation part
# this is because transformers.generate methods also return the input part
truncated_outputs_id = outputs_id[:, input_ids.shape[1]:]
return [self.tokenizer.decode(truncated_outputs_id[i], skip_special_tokens=False).replace('<|endoftext|>', '')
for i in range(outputs_id.shape[0])]
| 35.575
| 112
| 0.757554
| 2,676
| 0.940267
| 0
| 0
| 0
| 0
| 0
| 0
| 996
| 0.349965
|
7fe0f727107b9ce99344df8215be2fd9b8d15fef
| 2,091
|
py
|
Python
|
measurements-plot/udp-plots.py
|
HaoruiPeng/latencymeasurement
|
82a9c5300a7cedd72885780f542982bf76ae49b2
|
[
"MIT"
] | null | null | null |
measurements-plot/udp-plots.py
|
HaoruiPeng/latencymeasurement
|
82a9c5300a7cedd72885780f542982bf76ae49b2
|
[
"MIT"
] | null | null | null |
measurements-plot/udp-plots.py
|
HaoruiPeng/latencymeasurement
|
82a9c5300a7cedd72885780f542982bf76ae49b2
|
[
"MIT"
] | null | null | null |
import os
import sys
import pandas as pd
import numpy as np
import scipy.stats as st
import matplotlib.pyplot as plt
def read_udp(file_path):
with open(file_path, "r") as f:
data_dict = {'send':{}, 'rec':{}}
data = pd.read_csv(file_path, sep=",", engine='python', error_bad_lines=False, skiprows=1)
data.columns=['mode', 'seq', 'stamp']
for index, row in data.iterrows():
data_dict[row['mode']][row['seq']] = row['stamp']
loss = 0
rtt_array = []
for k in data_dict['send'].keys():
try:
rtt = (data_dict['rec'][k] - data_dict['send'][k])*1000
rtt_array.append(rtt)
except KeyError:
loss += 1
total_send = data['seq'].iloc[-1]
loss_prob = loss/total_send
return loss_prob, rtt_array
DATADIR = "../data"
cluster_name = ["bbcluster", "erdc"]
figure, axes = plt.subplots(1, 2)
figure.suptitle("UDP")
PING = dict.fromkeys(cluster_name, {})
for i in range(2):
cluster = cluster_name[i]
cluster_dir = os.path.join(DATADIR, cluster)
axes[i].set_title(cluster)
data = []
labels = []
for root, dirs, files in os.walk(cluster_dir, topdown=False):
for file_name in files:
mode, stack = file_name.split("_")
if mode == "udp":
dst = stack.split(".")[0]
file_path = os.path.join(cluster_dir, file_name)
loss_prb, rtt_array = read_udp(file_path)
# print(rtt_array)
length = len(rtt_array)
# rtt_outliar_removal = np.sort(rtt_array)[0: round(length*0.999)]
# rtt_mean = np.mean(rtt_outliar_removal)
# rtt_std = np.sqrt(np.var(rtt_outliar_removal))
# rtt_conf = st.norm.interval(0.95, loc=rtt_mean, scale=rtt_std)
# PING[cluster][dst] = (rtt_mean, rtt_conf)
data.append(rtt_array)
labels.append(dst)
axes[i].boxplot(data, labels=labels, showfliers=True)
plt.savefig("udp-latency-nofilter.png")
| 34.85
| 98
| 0.574845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 431
| 0.206121
|
7fe161bbdcf3de8bdcf00c6ffb06a6b7c300dcdc
| 10,236
|
py
|
Python
|
nitorch/core/pyutils.py
|
wyli/nitorch
|
3ecd18944cf45fb9193c4c6ffc32953c4d1c71ac
|
[
"MIT"
] | 1
|
2021-04-09T21:24:47.000Z
|
2021-04-09T21:24:47.000Z
|
nitorch/core/pyutils.py
|
wyli/nitorch
|
3ecd18944cf45fb9193c4c6ffc32953c4d1c71ac
|
[
"MIT"
] | null | null | null |
nitorch/core/pyutils.py
|
wyli/nitorch
|
3ecd18944cf45fb9193c4c6ffc32953c4d1c71ac
|
[
"MIT"
] | null | null | null |
"""Python utilities."""
import os
import functools
from types import GeneratorType as generator
import warnings
from collections import Counter
def file_mod(s, nam='', prefix='', suffix='', odir='', ext=''):
"""Modify a file path.
Parameters
----------
s : str
File path.
nam : str, default=''
Filename, if empty string, unchanged.
prefix : str, default=''
Filename prefix.
suffix : str, default=''
Filename suffix.
odir : str, default=''
Output directory, if empty string, unchanged.
ext : str, default=''
Extension, if empty string, unchanged.
Returns
----------
s : str
Modified file path.
"""
odir0, nam0 = os.path.split(s)
parts = nam0.split('.')
nam0 = parts[0]
ext0 = '.' + '.'.join(parts[1:])
if not odir:
odir = odir0
odir = os.path.abspath(odir) # Get absolute path
if not nam:
nam = nam0
if not ext:
ext = ext0
return os.path.join(odir, prefix + nam + suffix + ext)
def make_sequence(input, n=None, crop=True, *args, **kwargs):
"""Ensure that the input is a sequence and pad/crop if necessary.
Parameters
----------
input : scalar or sequence or generator
Input argument(s).
n : int, optional
Target length.
crop : bool, default=True
Crop input sequence if longer than `n`.
default : optional
Default value to pad with.
If not provided, replicate the last value.
Returns
-------
output : list or tuple or generator
Output arguments.
"""
default = None
has_default = False
if len(args) > 0:
default = args[0]
has_default = True
elif 'default' in kwargs.keys():
default = kwargs['default']
has_default = True
if isinstance(input, generator):
# special case for generators
def make_gen():
last = None
i = None
for i, elem in input:
if crop and (i == n):
return
last = elem
yield elem
if i is None:
if n is None:
return
if not has_default:
raise ValueError('Empty sequence')
last = default
for j in range(i+1, n):
yield last
return make_gen()
else:
# generic case -> induces a copy
if not isinstance(input, (list, tuple, range)):
input = [input]
return_type = type(input) if isinstance(input, (list, tuple)) else list
input = list(input)
if len(input) == 0 and n is not None and not has_default:
raise ValueError('Empty sequence')
if n is not None:
if crop:
input = input[:min(n, len(input))]
if not has_default:
default = input[-1]
input += [default] * max(0, n - len(input))
return return_type(input)
def make_list(*args, **kwargs) -> list:
"""Ensure that the input is a list and pad/crop if necessary.
Parameters
----------
input : scalar or sequence generator
Input argument(s).
n : int, optional
Target length.
default : optional
Default value to pad with.
If not provided, replicate the last value.
Returns
-------
output : list
Output arguments.
"""
return list(make_sequence(*args, **kwargs))
def make_tuple(*args, **kwargs) -> tuple:
"""Ensure that the input is a tuple and pad/crop if necessary.
Parameters
----------
input : scalar or sequence generator
Input argument(s).
n : int, optional
Target length.
default : optional
Default value to pad with.
If not provided, replicate the last value.
Returns
-------
output : tuple
Output arguments.
"""
return tuple(make_sequence(*args, **kwargs))
def make_set(input) -> set:
"""Ensure that the input is a set.
Parameters
----------
input : scalar or sequence
Input argument(s).
Returns
-------
output : set
Output arguments.
"""
if not isinstance(input, (list, tuple, set, range, generator)):
input = [input]
return set(input)
def rep_sequence(input, n, interleaved=False):
"""Replicate a sequence.
Parameters
----------
input : scalar or iterable generator
Input argument(s).
n : int
Number of replicates.
interleaved : bool, default=False
Interleaved replication.
Returns
-------
output : list or tuple or generator
Replicated list.
If the input argument is not a list or tuple, the output
type is `tuple`.
"""
if isinstance(input, generator):
# special case for generators
if interleaved:
def make_gen():
for elem in input:
for _ in range(n):
yield elem
return make_gen()
else:
warnings.warn('It is not efficient to replicate a generator '
'this way. We are holding *all* the data in '
'memory.', RuntimeWarning)
input = list(input)
# generic case for sequence -> induces a copy
if not isinstance(input, (list, tuple, range)):
input = [input]
return_type = type(input) if isinstance(input, (list, tuple)) else list
input = list(input)
if interleaved:
input = [elem for sub in zip(*([input]*n)) for elem in sub]
else:
input = input * n
return return_type(input)
def rep_list(input, n, interleaved=False) -> list:
"""Replicate a list.
Parameters
----------
input : scalar or sequence or generator
Input argument(s).
n : int
Number of replicates.
interleaved : bool, default=False
Interleaved replication.
Returns
-------
output : list
Replicated list.
If the input argument is not a list or tuple, the output
type is `tuple`.
"""
return list(rep_sequence(input, n, interleaved))
# backward compatibility
padlist = functools.wraps(make_sequence)
replist = functools.wraps(rep_sequence)
def getargs(kpd, args=None, kwargs=None, consume=False):
"""Read and remove argument from args/kwargs input.
Parameters
----------
kpd : list of tuple
List of (key, position, default) tuples with:
* key (str): argument name
* position (int): argument position
* default (optional): default value
args : sequence, optional
List of positional arguments
kwargs : dict, optional
List of keyword arguments
consume : bool, default=False
Consume arguments from args/kwargs
Returns:
values (list): List of values
"""
args = [] if args is None else args
kwargs = {} if kwargs is None else kwargs
def raise_error(key):
import inspect
caller = inspect.stack()[1].function
raise TypeError("{}() got multiple values for \
argument '{}}'".format(caller, key))
# Sort argument by reverse position
kpd = [(i,) + e for i, e in enumerate(kpd)]
kpd = sorted(kpd, key=lambda x: x[2], reverse=True)
values = []
for elem in kpd:
i = elem[0]
key = elem[1]
position = elem[2]
default = elem[3] if len(elem) > 3 else None
value = default
if len(args) >= position:
value = args[-1]
if consume:
del args[-1]
if key in kwargs.keys():
raise_error(key)
elif key in kwargs.keys():
value = kwargs[key]
if consume:
del kwargs[key]
values.append((i, value))
values = [v for _, v in sorted(values)]
return values
def prod(sequence, inplace=False):
"""Perform the product of a sequence of elements.
Parameters
----------
sequence : any object that implements `__iter__`
Sequence of elements for which the `__mul__` operator is defined.
inplace : bool, default=False
Perform the product inplace (using `__imul__` instead of `__mul__`).
Returns
-------
product :
Product of the elements in the sequence.
"""
accumulate = None
for elem in sequence:
if accumulate is None:
accumulate = elem
elif inplace:
accumulate *= elem
else:
accumulate = accumulate * elem
return accumulate
def cumprod(sequence):
"""Perform the cumulative product of a sequence of elements.
Parameters
----------
sequence : any object that implements `__iter__`
Sequence of elements for which the `__mul__` operator is defined.
Returns
-------
product :
Product of the elements in the sequence.
"""
accumulate = None
seq = []
for elem in sequence:
if accumulate is None:
accumulate = elem
else:
accumulate = accumulate * elem
seq.append(accumulate)
return seq
def pop(obj, key=0, *args, **kwargs):
"""Pop an element from a mutable collection.
Parameters
----------
obj : dict or list
Collection
key : str or int
Key or index
default : optional
Default value. Raise error if not provided.
Returns
-------
elem
Popped element
"""
if isinstance(obj, dict):
return obj.pop(key, *args, **kwargs)
else:
try:
val = obj[key]
del obj[key]
return val
except:
if len(args) > 0:
return args[0]
else:
return kwargs.get('default')
def majority(x):
"""Return majority element in a sequence.
Parameters
----------
x : sequence
Input sequence of hashable elements
Returns
-------
elem
Majority element
"""
count = Counter(x)
return count.most_common(1)[0][0]
| 24.965854
| 79
| 0.554807
| 0
| 0
| 3,356
| 0.327862
| 0
| 0
| 0
| 0
| 5,086
| 0.496874
|
7fe32cec9f3243b49d74a552788df1a4f5765a18
| 2,895
|
py
|
Python
|
danceschool/prerequisites/handlers.py
|
benjwrdill/django-danceschool
|
9ecb2754502e62d0f49aa23d08ca6de6cae3c99a
|
[
"BSD-3-Clause"
] | 1
|
2019-02-04T02:11:32.000Z
|
2019-02-04T02:11:32.000Z
|
danceschool/prerequisites/handlers.py
|
benjwrdill/django-danceschool
|
9ecb2754502e62d0f49aa23d08ca6de6cae3c99a
|
[
"BSD-3-Clause"
] | 2
|
2019-03-26T22:37:49.000Z
|
2019-12-02T15:39:35.000Z
|
danceschool/prerequisites/handlers.py
|
benjwrdill/django-danceschool
|
9ecb2754502e62d0f49aa23d08ca6de6cae3c99a
|
[
"BSD-3-Clause"
] | 1
|
2019-03-19T22:49:01.000Z
|
2019-03-19T22:49:01.000Z
|
from django.dispatch import receiver
from django.contrib import messages
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from danceschool.core.signals import check_student_info
from danceschool.core.models import Customer
from danceschool.core.constants import getConstant
from .models import Requirement
import logging
# Define logger for this file
logger = logging.getLogger(__name__)
@receiver(check_student_info)
def checkRequirements(sender,**kwargs):
'''
Check that the customer meets all prerequisites for the items in the registration.
'''
if not getConstant('requirements__enableRequirements'):
return
logger.debug('Signal to check RegistrationContactForm handled by prerequisites app.')
formData = kwargs.get('formData',{})
first = formData.get('firstName')
last = formData.get('lastName')
email = formData.get('email')
request = kwargs.get('request',{})
registration = kwargs.get('registration',None)
customer = Customer.objects.filter(
first_name=first,
last_name=last,
email=email).first()
requirement_warnings = []
requirement_errors = []
for ter in registration.temporaryeventregistration_set.all():
if hasattr(ter.event,'getRequirements'):
for req in ter.event.getRequirements():
if not req.customerMeetsRequirement(
customer=customer,
danceRole=ter.role
):
if req.enforcementMethod == Requirement.EnforcementChoice.error:
requirement_errors.append((ter.event.name, req.name))
if req.enforcementMethod == Requirement.EnforcementChoice.warning:
requirement_warnings.append((ter.event.name,req.name))
if requirement_errors:
raise ValidationError(format_html(
'<p>{}</p> <ul>{}</ul> <p>{}</p>',
ugettext('Unfortunately, you do not meet the following requirements/prerequisites for the items you have chosen:\n'),
mark_safe(''.join(['<li><em>%s:</em> %s</li>\n' % x for x in requirement_errors])),
getConstant('requirements__errorMessage') or '',
))
if requirement_warnings:
messages.warning(request,format_html(
'<p>{}</p> <ul>{}</ul> <p>{}</p>',
mark_safe(ugettext('<strong>Please Note:</strong> It appears that you do not meet the following requirements/prerequisites for the items you have chosen:\n')),
mark_safe(''.join(['<li><em>%s:</em> %s</li>\n' % x for x in requirement_warnings])),
getConstant('requirements__warningMessage') or '',
))
| 38.6
| 172
| 0.646287
| 0
| 0
| 0
| 0
| 2,342
| 0.808981
| 0
| 0
| 744
| 0.256995
|
7fe3c41b5d4495299339fa6ac09fd4c855492415
| 1,577
|
py
|
Python
|
b9_tools.py
|
eoinnoble/b9-indifference
|
a8b7f3c2268af162d5269a8ce7180be717bfb3fb
|
[
"Unlicense"
] | 9
|
2017-11-13T20:31:04.000Z
|
2021-11-08T12:30:48.000Z
|
b9_tools.py
|
eoinnoble/b9-indifference
|
a8b7f3c2268af162d5269a8ce7180be717bfb3fb
|
[
"Unlicense"
] | 1
|
2021-11-30T20:24:26.000Z
|
2021-11-30T20:24:26.000Z
|
b9_tools.py
|
eoinnoble/b9-indifference
|
a8b7f3c2268af162d5269a8ce7180be717bfb3fb
|
[
"Unlicense"
] | 1
|
2017-12-17T09:04:25.000Z
|
2017-12-17T09:04:25.000Z
|
import re
from collections.abc import MutableMapping
from typing import Dict, List
import markovify
import nltk
class RangeDict(MutableMapping):
"""Enables a dictionary whose keys are ranges."""
def __init__(self, iterable: Dict):
if not isinstance(iterable, dict):
raise TypeError("You must pass a dictionary to RangeDict")
self.store = dict()
for (k, v) in iterable.items():
if not isinstance(k, range):
raise TypeError("Your dictionary keys must be ranges")
direction = {num: v for num in k}
self.store.update(direction)
def __getitem__(self, key):
return self.store[key]
def __setitem__(self, key, value):
self.store[key] = value
def __delitem__(self, key):
del self.store[key]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
class POSifiedText(markovify.Text):
"""
A markovify.Text model that obeys sentence structure better than the naive model.
Uses NLTK's part-of-speech tagger (nltk.pos_tag), which is VERY slow but seems to do a better
job of parsing my text corpora than spaCy, which would be faster.
"""
def word_split(self, sentence: str) -> List:
words = re.split(self.word_split_pattern, sentence)
words = ["::".join(tag) for tag in nltk.pos_tag(words)]
return words
def word_join(self, words: List) -> str:
sentence = " ".join(word.split("::")[0] for word in words)
return sentence
| 27.666667
| 97
| 0.639822
| 1,458
| 0.92454
| 0
| 0
| 0
| 0
| 0
| 0
| 404
| 0.256183
|
7fe457572d531fc0f3ed15941394935cc6786462
| 3,907
|
py
|
Python
|
live_visualisation.py
|
duyanh-y4n/DJITelloPy
|
3bfda900a7dc523be4effe21e0e3b83126576750
|
[
"MIT"
] | null | null | null |
live_visualisation.py
|
duyanh-y4n/DJITelloPy
|
3bfda900a7dc523be4effe21e0e3b83126576750
|
[
"MIT"
] | null | null | null |
live_visualisation.py
|
duyanh-y4n/DJITelloPy
|
3bfda900a7dc523be4effe21e0e3b83126576750
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File : live_visualisation.py
# Author : Duy Anh Pham <duyanh.y4n.pham@gmail.com>
# Date : 10.04.2020
# Last Modified By: Duy Anh Pham <duyanh.y4n.pham@gmail.com>
from djitellopy.realtime_plot.RealtimePlotter import *
import redis
import numpy as np
import traceback
import matplotlib
# define data to get from db
# sensorMeshList = ['baro', 'h', 'tof', 'runtime']
# row = len(sensorMeshList)
data_len = 300
plot_update_interval = 0.005
datasource = redis.StrictRedis(host='localhost', port=6379, db=0)
plt.figure()
baro_axes = plt.subplot(3, 1, 1)
plt.title('tello_edu sensors')
baro_data_list = ['baro', 'runtime']
baro_ylim = [-47, -57]
baro_option = DataplotOption.TIMESTAMP_CUSTOM
baro_dataplot = DataPlot(2, data_len, option=baro_option)
baro_plot = RealtimePlotter(baro_dataplot)
baro_plot.config_plots(baro_axes, y_labels=baro_data_list, ylim=baro_ylim)
baro_plot.axes.set_xlabel('time in ms')
baro_plot.axes.set_ylabel('barometer in cmHg')
tof_axes = plt.subplot(3, 1, 2)
tof_data_list = ['tof', 'runtime']
tof_ylim = [-10, 500]
tof_option = DataplotOption.TIMESTAMP_CUSTOM
tof_dataplot = DataPlot(2, data_len, option=tof_option)
tof_plot = RealtimePlotter(tof_dataplot)
tof_plot.config_plots(tof_axes, y_labels=tof_data_list, ylim=tof_ylim)
tof_plot.axes.set_xlabel('time in ms')
tof_plot.axes.set_ylabel('vertical distance in cm')
h_axes = plt.subplot(3, 1, 3)
h_ylim = [-50, 300]
h_data_list = ['h', 'runtime']
h_option = DataplotOption.TIMESTAMP_CUSTOM
h_dataplot = DataPlot(2, data_len, option=h_option)
h_plot = RealtimePlotter(h_dataplot)
h_plot.config_plots(h_axes, y_labels=h_data_list, ylim=h_ylim)
h_plot.axes.set_xlabel('time in ms')
tof_plot.axes.set_ylabel('height in cm')
if __name__ == "__main__":
while True:
# get new data from database and plot
# baro
baro_plot.dataplot.clear_data_regs()
new_data = []
for sensor in baro_data_list:
new_sensor_data = datasource.lrange(sensor, 0, data_len)
# reverse, bc first element is the newest (not the oldest like deque)
new_sensor_data.reverse()
new_data.append(new_sensor_data)
try:
baro_y = np.array(new_data[:-1], dtype=np.float)
baro_x = np.array(new_data[-1], dtype=np.int64)
baro_plot.dataplot.append(
y=baro_y, x=baro_x, single=False)
baro_plot.plot_data()
except Exception as e:
print(e)
# tof
tof_plot.dataplot.clear_data_regs()
new_data = []
for sensor in tof_data_list:
new_sensor_data = datasource.lrange(sensor, 0, data_len)
# reverse, bc first element is the newest (not the oldest like deque)
new_sensor_data.reverse()
new_data.append(new_sensor_data)
try:
tof_y = np.array(new_data[:-1], dtype=np.float)
tof_x = np.array(new_data[-1], dtype=np.int64)
tof_plot.dataplot.append(
y=tof_y, x=tof_x, single=False)
tof_plot.plot_data()
except Exception as e:
print(e)
# height
h_plot.dataplot.clear_data_regs()
new_data = []
for sensor in h_data_list:
new_sensor_data = datasource.lrange(sensor, 0, data_len)
# reverse, bc first element is the newest (not the oldest like deque)
new_sensor_data.reverse()
new_data.append(new_sensor_data)
try:
h_y = np.array(new_data[:-1], dtype=np.float)
h_x = np.array(new_data[-1], dtype=np.int64)
h_plot.dataplot.append(
y=h_y, x=h_x, single=False)
h_plot.plot_data()
except Exception as e:
print(e)
plt.pause(plot_update_interval)
input("Exit(press any key)?")
| 35.198198
| 81
| 0.652931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 801
| 0.205017
|
7fe5f97f042b7d291cc4c77318e4cda78c4dbfcc
| 1,187
|
py
|
Python
|
app/cli.py
|
dev-johnlopez/Assignably
|
056960556dd75dfce064970887f37a44a8c66aec
|
[
"MIT"
] | 1
|
2021-06-09T02:19:18.000Z
|
2021-06-09T02:19:18.000Z
|
app/cli.py
|
dev-johnlopez/Assignably
|
056960556dd75dfce064970887f37a44a8c66aec
|
[
"MIT"
] | 1
|
2021-06-01T23:45:06.000Z
|
2021-06-01T23:45:06.000Z
|
app/cli.py
|
dev-johnlopez/assignably
|
056960556dd75dfce064970887f37a44a8c66aec
|
[
"MIT"
] | null | null | null |
import os
import click
from app import app
from flask.cli import with_appcontext
from app.auth.models import Role
def register(app):
@app.cli.group()
def translate():
"""Translation and localization commands."""
pass
@translate.command()
@click.argument('lang')
def init(lang):
"""Initialize a new language."""
pass
@translate.command()
def update():
"""Update all languages."""
pass
@translate.command()
def compile():
"""Compile all languages."""
pass
@click.command("add_roles")
@with_appcontext
def add_roles():
from app import db, security
from app.auth.models import Role
db.init_app(app)
role = Role(name="Company Admin", description="Administrator of a company. \
Users with this role can modify \
company data.")
db.session.add(role)
db.session.commit()
role = Role(name="Underwriter", description="Users with the ability to \
evaluate deals.")
db.session.add(role)
db.session.commit()
| 25.255319
| 83
| 0.566133
| 0
| 0
| 0
| 0
| 1,026
| 0.864364
| 0
| 0
| 448
| 0.377422
|
7fe64ab21ba37642fb9fd48c4a4ae360552314de
| 2,918
|
py
|
Python
|
autobasedoc/tableofcontents.py
|
NuCOS/autobasedoc
|
54135199b966847d822e772f435ddeb0a942fb42
|
[
"BSD-3-Clause"
] | 3
|
2017-06-20T06:33:05.000Z
|
2021-02-26T19:54:01.000Z
|
autobasedoc/tableofcontents.py
|
skidzo/autobasedoc
|
54135199b966847d822e772f435ddeb0a942fb42
|
[
"BSD-3-Clause"
] | null | null | null |
autobasedoc/tableofcontents.py
|
skidzo/autobasedoc
|
54135199b966847d822e772f435ddeb0a942fb42
|
[
"BSD-3-Clause"
] | 4
|
2017-09-27T09:18:54.000Z
|
2019-07-02T23:58:06.000Z
|
"""
tableofcontents
===============
.. module:: tableofcontents
:platform: Unix, Windows
:synopsis: a tableofcontents that breaks not to the next frame but to the next page
.. moduleauthor:: Johannes Eckstein
"""
from reportlab import rl_config
from reportlab.platypus import Table, Paragraph, PageBreak
from reportlab.platypus.tableofcontents import TableOfContents, drawPageNumbers
class AutoTableOfContents(TableOfContents):
def __init__(self):
super(AutoTableOfContents, self).__init__()
def wrap(self, availWidth, availHeight):
"All table properties should be known by now."
# makes an internal table which does all the work.
# we draw the LAST RUN's entries! If there are
# none, we make some dummy data to keep the table
# from complaining
if len(self._lastEntries) == 0:
_tempEntries = [(0,'Placeholder for table of contents',0,None)]
else:
_tempEntries = self._lastEntries
def drawTOCEntryEnd(canvas, kind, label):
'''Callback to draw dots and page numbers after each entry.'''
label = label.split(',')
page, level, key = int(label[0]), int(label[1]), eval(label[2],{})
style = self.getLevelStyle(level)
if self.dotsMinLevel >= 0 and level >= self.dotsMinLevel:
dot = ' . '
else:
dot = ''
if self.formatter: page = self.formatter(page)
drawPageNumbers(canvas, style, [(page, key)], availWidth, availHeight, dot)
self.canv.drawTOCEntryEnd = drawTOCEntryEnd
tableData = []
for (level, text, pageNum, key) in _tempEntries:
style = self.getLevelStyle(level)
if key:
text = '<a href="#%s">%s</a>' % (key, text)
keyVal = repr(key).replace(',','\\x2c').replace('"','\\x2c')
else:
keyVal = None
para = Paragraph('%s<onDraw name="drawTOCEntryEnd" label="%d,%d,%s"/>' % (text, pageNum, level, keyVal), style)
if style.spaceBefore:
tableData.append([Spacer(1, style.spaceBefore),])
tableData.append([para,])
self._table = TocTable(tableData, colWidths=(availWidth,), style=self.tableStyle)
self.width, self.height = self._table.wrapOn(self.canv, availWidth, availHeight)
return (self.width, self.height)
class TocTable(Table):
def __init__(self, data, **kwargs):
super(TocTable, self).__init__(data, **kwargs)
def split(self, availWidth, availHeight):
self._calc(availWidth, availHeight)
if self.splitByRow:
if not rl_config.allowTableBoundsErrors and self._width>availWidth: return []
return [self._splitRows(availHeight)[0], PageBreak(), self._splitRows(availHeight)[-1]]
else:
raise NotImplementedError
| 37.896104
| 123
| 0.614119
| 2,520
| 0.863605
| 0
| 0
| 0
| 0
| 0
| 0
| 633
| 0.216929
|
7fe7a7ef4cedcf3d41ec5da04172536952412a93
| 570
|
py
|
Python
|
conans/test/model/username_test.py
|
jbaruch/conan
|
263722b5284828c49774ffe18d314b24ee11e178
|
[
"MIT"
] | null | null | null |
conans/test/model/username_test.py
|
jbaruch/conan
|
263722b5284828c49774ffe18d314b24ee11e178
|
[
"MIT"
] | null | null | null |
conans/test/model/username_test.py
|
jbaruch/conan
|
263722b5284828c49774ffe18d314b24ee11e178
|
[
"MIT"
] | 1
|
2021-03-03T17:15:46.000Z
|
2021-03-03T17:15:46.000Z
|
import unittest
from conans.errors import ConanException
from conans.model.username import Username
class UsernameTest(unittest.TestCase):
def username_test(self):
Username("userwith-hypens")
self.assertRaises(ConanException, Username, "")
self.assertRaises(ConanException, Username, "A"*31)
Username("A"*30)
self.assertRaises(ConanException, Username, "1A")
self.assertRaises(ConanException, Username, "_A")
Username("A1")
Username("a_")
self.assertRaises(ConanException, Username, "$$")
| 28.5
| 59
| 0.684211
| 467
| 0.819298
| 0
| 0
| 0
| 0
| 0
| 0
| 45
| 0.078947
|
7fe7fde051fa8a3d76d968e9a6574579dd014181
| 152
|
py
|
Python
|
exercises/01_Primeiros Passos/exe_08.py
|
MariaTrindade/CursoPython
|
2c60dd670747db08011d9dd33e3bbfd5795b06e8
|
[
"Apache-2.0"
] | 1
|
2021-05-11T18:30:17.000Z
|
2021-05-11T18:30:17.000Z
|
exercises/01_Primeiros Passos/exe_08.py
|
MariaTrindade/CursoPython
|
2c60dd670747db08011d9dd33e3bbfd5795b06e8
|
[
"Apache-2.0"
] | null | null | null |
exercises/01_Primeiros Passos/exe_08.py
|
MariaTrindade/CursoPython
|
2c60dd670747db08011d9dd33e3bbfd5795b06e8
|
[
"Apache-2.0"
] | null | null | null |
"""
Faça um Programa que peça a temperatura em graus Fahrenheit, transforme e mostre
a temperatura em graus Celsius.
C = (5 * (F-32) / 9)
"""
| 9.5
| 80
| 0.651316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 145
| 0.941558
|
7febb7dfbccc110592c6373855dc121877f1f2c7
| 1,641
|
py
|
Python
|
throwaway/viz_nav_policy.py
|
sfpd/rlreloaded
|
650c64ec22ad45996c8c577d85b1a4f20aa1c692
|
[
"MIT"
] | null | null | null |
throwaway/viz_nav_policy.py
|
sfpd/rlreloaded
|
650c64ec22ad45996c8c577d85b1a4f20aa1c692
|
[
"MIT"
] | null | null | null |
throwaway/viz_nav_policy.py
|
sfpd/rlreloaded
|
650c64ec22ad45996c8c577d85b1a4f20aa1c692
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from control4.algs.save_load_utils import load_agent_and_mdp
from control4.core.rollout import rollout
from tabulate import tabulate
import numpy as np
import pygame
from control3.pygameviewer import PygameViewer, pygame
from collections import namedtuple
from copy import deepcopy
path = []
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("hdf")
parser.add_argument("--load_idx",type=int,default=-1)
parser.add_argument("--max_steps",type=int)
parser.add_argument("--one_traj",action="store_true")
args = parser.parse_args()
agent, mdp, _hdf = load_agent_and_mdp(args.hdf,args.load_idx)
from matplotlib.patches import Ellipse
import matplotlib.pyplot as plt
fig1,(ax0,ax1)=plt.subplots(2,1)
fig2,(ax3)=plt.subplots(1,1)
h = mdp.halfsize
while True:
path = []
init_arrs, traj_arrs = rollout(mdp,agent,999999,save_arrs=["m","o","a"])
m = np.concatenate([init_arrs["m"]]+traj_arrs["m"],axis=0)
o = np.concatenate([init_arrs["o"]]+traj_arrs["o"],axis=0)
a_na = np.concatenate(traj_arrs["a"])
print "o:"
print o
print "m:"
print m
ax0.cla()
ax0.plot(m)
ax1.cla()
ax1.plot(o)
ax3.cla()
x,y=np.array(init_arrs['x'].path).T
ax3.plot(x,y,'bx-')
ax3.axis([-h,h,-h,h])
for (x,a) in zip(init_arrs['x'].path,a_na):
ax3.add_artist(Ellipse(xy=x+a[0:2], width=2*a[2], height=2*a[3],alpha=0.2))
plt.draw()
plt.pause(0.01)
plt.ginput()
| 28.293103
| 87
| 0.624619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 128
| 0.078001
|
7fec3044100d2f06c27146cd462ed08cea1a54d2
| 201
|
py
|
Python
|
utils/compilers/ConnectionCompiler/token.py
|
pranaOS-bot/pranaOS-1
|
ddb8086d103d004f84744641624e74fc7ec0984e
|
[
"BSD-2-Clause"
] | 5
|
2021-10-06T13:47:26.000Z
|
2022-03-24T10:42:06.000Z
|
utils/compilers/ConnectionCompiler/token.py
|
evilbat831/brutalOS
|
85920a6a95d564320a245a2e48ffc7cdf64ede84
|
[
"BSD-2-Clause"
] | null | null | null |
utils/compilers/ConnectionCompiler/token.py
|
evilbat831/brutalOS
|
85920a6a95d564320a245a2e48ffc7cdf64ede84
|
[
"BSD-2-Clause"
] | 1
|
2021-10-18T12:48:16.000Z
|
2021-10-18T12:48:16.000Z
|
class Token:
def __init__(self, type=None, value=None):
self.type = type
self.value = value
def __str__(self):
return "Token({0}, {1})".format(self.type, self.value)
| 20.1
| 62
| 0.587065
| 199
| 0.99005
| 0
| 0
| 0
| 0
| 0
| 0
| 17
| 0.084577
|
7fec6177c282fa0ff0c92470b63745ee8ad16c40
| 1,772
|
py
|
Python
|
echopype/echodata/convention/attrs.py
|
b-reyes/echopype
|
bc8afa190fa2ca4fab5944bac83cd4b20f7abcf6
|
[
"Apache-2.0"
] | null | null | null |
echopype/echodata/convention/attrs.py
|
b-reyes/echopype
|
bc8afa190fa2ca4fab5944bac83cd4b20f7abcf6
|
[
"Apache-2.0"
] | 2
|
2019-02-20T16:47:51.000Z
|
2021-04-20T20:20:32.000Z
|
echopype/echodata/convention/attrs.py
|
b-reyes/echopype
|
bc8afa190fa2ca4fab5944bac83cd4b20f7abcf6
|
[
"Apache-2.0"
] | 2
|
2019-02-20T16:41:56.000Z
|
2021-08-05T04:33:07.000Z
|
"""
Define convention-based global, coordinate and variable attributes
in one place for consistent reuse
"""
DEFAULT_BEAM_COORD_ATTRS = {
"frequency": {
"long_name": "Transducer frequency",
"standard_name": "sound_frequency",
"units": "Hz",
"valid_min": 0.0,
},
"ping_time": {
"long_name": "Timestamp of each ping",
"standard_name": "time",
"axis": "T",
},
"range_bin": {"long_name": "Along-range bin (sample) number, base 0"},
}
DEFAULT_PLATFORM_COORD_ATTRS = {
"location_time": {
"axis": "T",
"long_name": "Timestamps for NMEA datagrams",
"standard_name": "time",
}
}
DEFAULT_PLATFORM_VAR_ATTRS = {
"latitude": {
"long_name": "Platform latitude",
"standard_name": "latitude",
"units": "degrees_north",
"valid_range": (-90.0, 90.0),
},
"longitude": {
"long_name": "Platform longitude",
"standard_name": "longitude",
"units": "degrees_east",
"valid_range": (-180.0, 180.0),
},
"pitch": {
"long_name": "Platform pitch",
"standard_name": "platform_pitch_angle",
"units": "arc_degree",
"valid_range": (-90.0, 90.0),
},
"roll": {
"long_name": "Platform roll",
"standard_name": "platform_roll_angle",
"units": "arc_degree",
"valid_range": (-90.0, 90.0),
},
"heave": {
"long_name": "Platform heave",
"standard_name": "platform_heave_angle",
"units": "arc_degree",
"valid_range": (-90.0, 90.0),
},
"water_level": {
"long_name": "z-axis distance from the platform coordinate system "
"origin to the sonar transducer",
"units": "m",
},
}
| 26.848485
| 75
| 0.550226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,060
| 0.598194
|
7fecb02664281603ef197605d74e5b00e842bde4
| 2,072
|
py
|
Python
|
tf_tests.py
|
MadsJensen/agency_connectivity
|
b45adbc133573de1ebdcff0edb17e43f1691c577
|
[
"BSD-3-Clause"
] | null | null | null |
tf_tests.py
|
MadsJensen/agency_connectivity
|
b45adbc133573de1ebdcff0edb17e43f1691c577
|
[
"BSD-3-Clause"
] | null | null | null |
tf_tests.py
|
MadsJensen/agency_connectivity
|
b45adbc133573de1ebdcff0edb17e43f1691c577
|
[
"BSD-3-Clause"
] | null | null | null |
import mne
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
import seaborn as sns
from tf_analysis import single_trial_tf
plt.ion()
data_folder = "/home/mje/Projects/agency_connectivity/data/"
epochs = mne.read_epochs(data_folder + "P2_ds_bp_ica-epo.fif")
# single trial morlet tests
frequencies = np.arange(6., 30., 2.)
n_cycles = 5.
times = epochs.times
tfr_vol = single_trial_tf(epochs["voluntary"])
tfr_invol = single_trial_tf(epochs["involuntary"])
pow_vol_Cz = np.asarray([np.mean(np.abs(tfr[37, 4:-2, :])**2, axis=0)
for tfr in tfr_vol])
pow_invol_Cz = np.asarray([np.mean(np.abs(tfr[37, 4:-2, :])**2, axis=0)
for tfr in tfr_invol])
pow_invol_Cz_bs = np.asarray([(10*np.log10(trial / np.mean(trial[:103]))) for
trial in pow_invol_Cz])
pow_vol_Cz_bs = np.asarray([(10*np.log10(trial / np.mean(trial[:103]))) for
trial in pow_vol_Cz])
pow_invol_Cz_mean = pow_invol_Cz_bs[:, 921:1024].mean(axis=1)
pow_vol_Cz_mean = pow_vol_Cz_bs[:, 921:1024].mean(axis=1)
stats.ttest_ind(pow_vol_Cz_mean, pow_invol_Cz_mean)
corr, pval = stats.spearmanr(pow_vol_Cz_mean[-60:], pow_invol_Cz_mean)
print("correlation: %s, pval: %s" % (corr, pval))
sns.regplot(pow_vol_Cz_mean[-60:], pow_invol_Cz_mean)
from sklearn.cluster.spectral import spectral_embedding # noqa
from sklearn.metrics.pairwise import rbf_kernel # noqa
def order_func(times, data):
this_data = data[:, (times < -0.5) & (times < 0)]
this_data /= np.sqrt(np.sum(this_data ** 2, axis=1))[:, np.newaxis]
return np.argsort(spectral_embedding(rbf_kernel(this_data, gamma=1.),
n_components=1, random_state=0).ravel())
good_pick = 37 # channel with a clear evoked response
bad_pick = 47 # channel with no evoked response
plt.close('all')
mne.viz.plot_epochs_image(epochs["involuntary"], [good_pick, bad_pick],
sigma=0.5, cmap="viridis",
colorbar=True, order=order_func, show=True)
| 32.888889
| 77
| 0.670849
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 256
| 0.123552
|
7fee0ec8e03805400ba3a871766b2ab0228dc4a4
| 17,645
|
py
|
Python
|
data/smth.py
|
roeiherz/AG2Video
|
a4eb439d7147c91237ddd50ec305add8e1537360
|
[
"MIT"
] | 22
|
2020-07-01T07:11:15.000Z
|
2022-02-17T13:26:16.000Z
|
data/smth.py
|
roeiherz/AG2Video
|
a4eb439d7147c91237ddd50ec305add8e1537360
|
[
"MIT"
] | 5
|
2021-06-16T02:35:14.000Z
|
2022-03-12T01:00:27.000Z
|
data/smth.py
|
roeiherz/AG2Video
|
a4eb439d7147c91237ddd50ec305add8e1537360
|
[
"MIT"
] | 2
|
2021-08-04T05:22:58.000Z
|
2021-12-11T02:15:57.000Z
|
import json
import os
import pickle as pkl
import random
import math
import numpy as np
import pandas as pd
import torch
from torch.utils.data import Dataset
import torchvision.transforms as T
import torch.nn.functional as F
from PIL import Image
from data.SomethingElse.config import action_to_number_of_instances, action_to_num_objects, valid_actions
from data.args import ALIGN_CORNERS
from models import group_transforms
from models.video_transforms import GroupMultiScaleCrop
class SmthElseDataset(Dataset):
def __init__(self, data_root, is_test=False, is_val=False, debug=False, nframes=301,
image_size=(64, 64), fps=12, frames_per_action=16, initial_frames_per_sample=16,
max_samples=None, include_relationships=True, resize_or_crop='resize',
fine_size=64, load_size=64, aspect_ratio=1, no_flip=True, labels=None):
"""
A PyTorch Dataset for loading Coco and Coco-Stuff annotations and converting
them to scene graphs on the fly.
Inputs:
- image_dir: Path to a directory where images are held
- instances_json: Path to a JSON file giving COCO annotations
- stuff_json: (optional) Path to a JSON file giving COCO-Stuff annotations
- stuff_only: (optional, default True) If True then only iterate over
images which appear in stuff_json; if False then iterate over all images
in instances_json.
- image_size: Size (H, W) at which to load images. Default (64, 64).
- mask_size: Size M for object segmentation masks; default 16.
- normalize_image: If True then normalize images by subtracting ImageNet
mean pixel and dividing by ImageNet std pixel.
- max_samples: If None use all images. Other wise only use images in the
range [0, max_samples). Default None.
- include_relationships: If True then include spatial relationships; if
False then only include the trivial __in_image__ relationship.
- min_object_size: Ignore objects whose bounding box takes up less than
this fraction of the image.
- min_objects_per_image: Ignore images which have fewer than this many
object annotations.
- max_objects_per_image: Ignore images which have more than this many
object annotations.
- include_other: If True, include COCO-Stuff annotations which have category
"other". Default is False, because I found that these were really noisy
and pretty much impossible for the system to model.
- instance_whitelist: None means use all instance categories. Otherwise a
list giving a whitelist of instance category names to use.
- stuff_whitelist: None means use all stuff categories. Otherwise a list
giving a whitelist of stuff category names to use.
"""
super(SmthElseDataset, self).__init__()
self.data_root = data_root
self.videos_path = os.path.join(self.data_root, "videos")
self.scenes_path = os.path.join(self.data_root, "scenes")
self.lists_path = os.path.join(self.data_root, "lists")
self.fps = fps
self.nframes = nframes
self.initial_frames_per_sample = initial_frames_per_sample # before subsampling
self.frames_per_action = frames_per_action
self.resize_or_crop = resize_or_crop
self.fine_size = fine_size
self.load_size = load_size
self.aspect_ratio = aspect_ratio
self.no_flip = no_flip
self.is_val = is_val
self.max_samples = max_samples
self.include_relationships = include_relationships
self.is_test = is_test
# Get videos
self.labels = pd.read_csv(labels)
if "err" in self.labels.columns:
self.labels = self.labels[pd.isnull(self.labels['err'])]
# Get vocab mapping
self.vocab = {}
self.vocab["action_idx_to_name"] = action_to_number_of_instances
# actions
self.vocab["action_name_to_idx"] = {v: i for i, v in enumerate(self.vocab["action_idx_to_name"])}
self.vocab['pred_name_to_idx'] = {'__in_image__': 0, 'right': 1, "above": 2, "below": 3, "left": 4,
'surrounding': 5, 'inside': 6, 'cover': 7, '__padding__': 8}
self.vocab['pred_idx_to_name'] = {v: k for k, v in self.vocab['pred_name_to_idx'].items()}
# attributes
self.vocab["attributes"] = {}
self.vocab["reverse_attributes"] = {}
# with open(os.path.join(self.data_root, 'offical_release_boxes/objects.pkl'), 'rb') as f:
# self.vocab["reverse_attributes"]['object'] = pkl.load(f)
with open(os.path.join(self.data_root, 'offical_release_boxes/objs_mapping.json'), 'rb') as f:
self.objs_mapping = json.load(f)
self.vocab["reverse_attributes"]['object'] = ['__image__'] + sorted(list(set(self.objs_mapping.values())))
self.vocab["attributes"]['object'] = {v: k for k, v in enumerate(self.vocab["reverse_attributes"]['object'])}
self.vocab['object_idx_to_name'] = self.vocab["reverse_attributes"]['object']
self.vocab['object_name_to_idx'] = self.vocab["attributes"]['object']
# Sort actions
self.labels = self.labels[self.labels['template'].isin(self.vocab["action_idx_to_name"])]
self.labels = self.labels[[is_action_valid(row) for i, row in self.labels.iterrows()]]
# Sort objects
self.labels = self.labels.apply(lambda row: object_mapping_func(row, self.objs_mapping), axis=1)
self.labels = self.labels[[is_object_valid(row) for i, row in self.labels.iterrows()]]
self.vid_names = list(self.labels['id'])
self.img_mean = [0.485, 0.456, 0.406]
self.img_std = [0.229, 0.224, 0.225]
# Transformations
self.set_transforms(image_size)
def set_transforms(self, image_size=(224, 224)):
self.image_size = image_size
self.normalize = group_transforms.GroupNormalize(self.img_mean, self.img_std)
self.transforms = [group_transforms.GroupResize(image_size)]
self.transforms += [
group_transforms.ToTensor(),
group_transforms.GroupNormalize(self.img_mean, self.img_std),
]
self.transforms = T.Compose(self.transforms)
def __len__(self):
return len(self.vid_names)
def extract_triplets(self, boxes):
F = boxes.size(0)
O = boxes.size(1) - 1
real_boxes = [i for i in range(O)]
total_triplets = []
for f in range(F):
triplets = []
for cur in real_boxes:
choices = [obj for obj in real_boxes if obj != cur]
if len(choices) == 0 or not self.include_relationships:
break
other = random.choice(choices)
if random.random() > 0.5:
s, o = cur, other
else:
s, o = other, cur
# Check for inside / surrounding
sx0, sy0, sx1, sy1 = boxes[f][s]
ox0, oy0, ox1, oy1 = boxes[f][o]
sw = sx1 - sx0
ow = ox1 - ox0
sh = sy1 - sy0
oh = oy1 - oy0
mean_x = (sx0 + 0.5 * sw) - (ox0 + 0.5 * ow)
mean_y = (sy0 + 0.5 * sh) - (oy0 + 0.5 * oh)
theta = math.atan2(mean_y, mean_x)
# d = obj_centers[s] - obj_centers[o]
# theta = math.atan2(d[1], d[0])
if sx0 < ox0 and sx1 > ox1 and sy0 < oy0 and sy1 > oy1:
p = 'surrounding'
elif sx0 > ox0 and sx1 < ox1 and sy0 > oy0 and sy1 < oy1:
p = 'inside'
elif theta >= 3 * math.pi / 4 or theta <= -3 * math.pi / 4:
p = 'left'
elif -3 * math.pi / 4 <= theta < -math.pi / 4:
p = 'above'
elif -math.pi / 4 <= theta < math.pi / 4:
p = 'right'
elif math.pi / 4 <= theta < 3 * math.pi / 4:
p = 'below'
p = self.vocab['pred_name_to_idx'][p]
triplets.append([s, p, o])
# Add dummy __in_image__ relationships for all objects
in_image = self.vocab['pred_name_to_idx']['__in_image__']
for i in range(O):
triplets.append([i, in_image, O])
total_triplets.append(triplets)
total_triplets = torch.LongTensor(total_triplets)
return total_triplets
def extract_actions_split(self, boxes, num_objects, is_test):
nr_instances = np.array([box['nr_instances'] for box in boxes])
indices = np.where(nr_instances == num_objects)
s_frame, e_frame = np.min(indices), np.max(indices) + 1
f1 = s_frame.copy()
if is_test:
f1 = s_frame
f2 = f1 + self.initial_frames_per_sample
else:
if e_frame - self.initial_frames_per_sample > s_frame:
f1 = np.random.randint(s_frame, e_frame - self.initial_frames_per_sample)
f2 = min(f1 + self.initial_frames_per_sample, e_frame)
s = (f1 - s_frame + 1) / (e_frame - s_frame)
e = (f2 - s_frame + 1) / (e_frame - s_frame)
return f1, f2, s, e
def extract_actions(self, objs, action_id, action_start, action_end):
num_objs = len(objs["object"].cpu().numpy().astype('int'))
hand_idx = num_objs - 1
indices = objs["object"].cpu().numpy().astype('int')
if self.vocab["object_idx_to_name"][indices[hand_idx]] != "hand":
return False, "Last index is not hand"
triplets = []
prev = hand_idx
for i in range(len(indices[:-1])):
if self.vocab["object_idx_to_name"][indices[i]] == "hand":
return False, "Multiple indices are hand"
triplets.append([prev, action_id, i, action_start, action_end])
prev = i
if not len(triplets):
return False, "No returned triplets"
return True, torch.FloatTensor(triplets)
def extract_bounding_boxes(self, boxes, img_shape, num_objects):
"""
Get for each scene the bounding box
:param scene: json data
:param frames_id: list of frames ids
:return: [F, O, 4]
"""
object_indices = {}
for timestep in boxes:
for obj in timestep['labels']:
obj_cat = (obj['standard_category'], obj['gt_annotation'], self.objs_mapping[obj['category']])
if obj_cat not in object_indices:
object_indices[obj_cat] = len(object_indices)
output_boxes = np.zeros((len(boxes), num_objects, 4))
for i in range(len(boxes)):
output_boxes[i] = output_boxes[i - 1]
timestep = boxes[i]
for obj in timestep['labels']:
x1, x2, y1, y2 = obj['box2d']['x1'], obj['box2d']['x2'], obj['box2d']['y1'], obj['box2d']['y2']
# Adding protection against bad boxes annotations
if x1 == x2 and y1 == y2:
x1 = x2 = y1 = y2 = 0.0
print("Error: H=W=0 in {}".format(boxes[0]['name']))
output_boxes[i, object_indices[(obj['standard_category'], obj['gt_annotation'],
self.objs_mapping[obj['category']])]] = x1, y1, x2 - x1, y2 - y1
reverse_object_indices = {v: k for k, v in object_indices.items()}
objects = {"object": []}
for i in range(len(reverse_object_indices)):
objects["object"].append(self.vocab["object_name_to_idx"][reverse_object_indices[i][-1]])
objects["object"] = torch.LongTensor(objects["object"])
output_boxes = output_boxes / (img_shape * 2)
if len(objects["object"]) != num_objects:
return False, "len(objects) != num_objects", None
return True, torch.FloatTensor(output_boxes), objects # [x0, y0, w, h]
def load_frames(self, frames_fns):
return [Image.open(fn) for fn in frames_fns]
def __getitem__(self, index):
"""
Get the pixels of an image, and a random synthetic scene graph for that
image constructed on-the-fly from its COCO object annotations. We assume
that the image will have height H, width W, C channels; there will be O
object annotations, each of which will have both a bounding box and a
segmentation mask of shape (M, M). There will be T triplets in the scene
graph.
Returns a tuple of:
- image: FloatTensor of shape (C, H, W)
- objs: LongTensor of shape (O,)
- boxes: FloatTensor of shape (O, 4) giving boxes for objects in
(x0, y0, x1, y1) format, in a [0, 1] coordinate system
- masks: LongTensor of shape (O, M, M) giving segmentation masks for
objects, where 0 is background and 1 is object.
- triplets: LongTensor of shape (T, 3) where triplets[t] = [i, p, j]
means that (objs[i], p, objs[j]) is a triple.
"""
try:
# Choose video index
video_id = self.vid_names[index]
# Choose scene graph
with open(os.path.join(self.data_root, f'offical_release_boxes/boxes_by_video_id/{video_id}/boxes.pkl'),
'rb') as f:
boxes_metadata = pkl.load(f)
boxes_metadata = clean_boxes_metadata(boxes_metadata)
action_name = self.labels[self.labels['id'] == video_id].iloc[0]['template']
action_idx = self.vocab["action_name_to_idx"][action_name]
# Open video file
imgs = self.extract_frames(boxes_metadata)
if imgs is None:
return False, "imgs is None"
output = self.extract_actions_split(boxes_metadata, action_to_num_objects[action_name], self.is_test)
if output is None:
return False, "Mixed number of objects (occlusion?)"
s_frame, e_frame, action_progress_s, action_progress_e = output
chosen_video_id = f'{video_id}_{s_frame}-{e_frame}'
thr_frames = self.initial_frames_per_sample if self.initial_frames_per_sample < 8 else 8
if not self.is_val and (e_frame - s_frame) < thr_frames:
return False, f"e_frame - s_frame < {thr_frames}"
# Choose frames
frames_lst = list(range(s_frame, e_frame))
boxes_metadata = boxes_metadata[s_frame:e_frame]
if self.is_test:
frames_per_action = len(frames_lst)
initial_frames_per_sample = len(frames_lst)
else:
frames_per_action = self.frames_per_action
initial_frames_per_sample = self.initial_frames_per_sample
frames_lst = frames_lst[0:initial_frames_per_sample: initial_frames_per_sample // frames_per_action]
boxes_metadata = boxes_metadata[0:initial_frames_per_sample: initial_frames_per_sample // frames_per_action]
initial_number_frames = len(frames_lst)
padding = 0
if len(frames_lst) < frames_per_action:
padding = frames_per_action - initial_number_frames
frames_lst = frames_lst + frames_lst[-1:] * padding
boxes_metadata = boxes_metadata + boxes_metadata[-1:] * (frames_per_action - initial_number_frames)
img_shape = self.load_frames(imgs[0:1])[0].size
status, boxes, objs = self.extract_bounding_boxes(boxes_metadata, img_shape, action_to_num_objects[action_name])
if not status:
return False, status
# Get actions - [A, 5]
status, actions = self.extract_actions(objs, action_idx, action_progress_s, action_progress_e)
if not status:
return False, actions
final_object_position = torch.zeros(actions.size(0), 2)
actions = torch.cat([actions, final_object_position], dim=1)
# Get triplets
triplets = self.extract_triplets(boxes)
try:
frames = self.load_frames(imgs[frames_lst])
vids = self.transforms(frames)
except Exception as e:
return False, "Error: Failed to load frames in video id: {}".format(video_id)
return vids, objs, boxes, triplets, actions, chosen_video_id
except Exception as e:
return False, "Error: in video_id {} with {}".format(chosen_video_id, e)
def extract_frames(self, boxes):
paths = sorted([os.path.join(self.data_root, 'frames', box['name']) for box in boxes])
return np.array(paths)
def is_action_valid(row):
return action_to_num_objects[row['template']] == row['nr_instances'] and row['template'] in valid_actions
def object_mapping_func(row, objs_mapping):
row['placeholders'] = [objs_mapping.get(obj, None) for obj in eval(row['placeholders'])]
return row
def is_object_valid(row):
return None not in row['placeholders']
def clean_boxes_metadata(boxes_metadata):
"""
Get unique boxes metadata
:param boxes_metadata:
:return:
"""
boxes_names = {b['name']: 0 for b in boxes_metadata}
new_boxes_metadata = []
for bb in boxes_metadata:
if bb['name'] in boxes_names and boxes_names[bb['name']] == 0:
boxes_names[bb['name']] += 1
new_boxes_metadata.append(bb)
return new_boxes_metadata
| 45.127877
| 124
| 0.607708
| 16,369
| 0.927685
| 0
| 0
| 0
| 0
| 0
| 0
| 5,245
| 0.297251
|
7feeefa1c9cfdfdf846929d05b2027d327b3a9e6
| 60
|
py
|
Python
|
user_login.py
|
pieddro/football
|
d5a021da26a2252dcece752c51818f03d1d3db46
|
[
"Apache-2.0"
] | null | null | null |
user_login.py
|
pieddro/football
|
d5a021da26a2252dcece752c51818f03d1d3db46
|
[
"Apache-2.0"
] | null | null | null |
user_login.py
|
pieddro/football
|
d5a021da26a2252dcece752c51818f03d1d3db46
|
[
"Apache-2.0"
] | null | null | null |
Meine neuer Code..
Neue Codezeile ....
Tst stash zum 2. mal
| 20
| 20
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
7fef83c18d0039ec789a2a57075be970bd25f765
| 1,195
|
py
|
Python
|
refstack/api/controllers/__init__.py
|
jovial/refstack
|
b6f9f8611bc3752acbf0c4275453285e80be85dc
|
[
"Apache-2.0"
] | null | null | null |
refstack/api/controllers/__init__.py
|
jovial/refstack
|
b6f9f8611bc3752acbf0c4275453285e80be85dc
|
[
"Apache-2.0"
] | null | null | null |
refstack/api/controllers/__init__.py
|
jovial/refstack
|
b6f9f8611bc3752acbf0c4275453285e80be85dc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""API controllers package."""
from oslo_config import cfg
from refstack.api import constants as const
CTRLS_OPTS = [
cfg.IntOpt('results_per_page',
default=20,
help='Number of results for one page'),
cfg.StrOpt('input_date_format',
default='%Y-%m-%d %H:%M:%S',
help='The format for %(start)s and %(end)s parameters' % {
'start': const.START_DATE,
'end': const.END_DATE
})
]
CONF = cfg.CONF
CONF.register_opts(CTRLS_OPTS, group='api')
| 32.297297
| 78
| 0.646862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 803
| 0.671967
|
7ff0e77d9b3db005d3ce70f0c9f81c5bbde228f8
| 4,808
|
py
|
Python
|
main.py
|
superwaiwjia/lowRankForSeer
|
86041e0a39e1ef2718e8133eb65a63c05d9a441c
|
[
"MIT"
] | 2
|
2021-11-18T07:01:40.000Z
|
2021-11-18T07:01:49.000Z
|
main.py
|
superwaiwjia/lowRankForSeer
|
86041e0a39e1ef2718e8133eb65a63c05d9a441c
|
[
"MIT"
] | null | null | null |
main.py
|
superwaiwjia/lowRankForSeer
|
86041e0a39e1ef2718e8133eb65a63c05d9a441c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#coding=utf-8
import pickle
import sys, os, re, subprocess, math
reload(sys)
sys.setdefaultencoding("utf-8")
from os.path import abspath, dirname, join
whereami = abspath(dirname(__file__))
sys.path.append(whereami)
from sklearn.metrics import roc_auc_score
import pandas as pd
import numpy as np
from scipy import *
import itertools
from utility import getWfixingA, getSparseWeight, appendDFToCSV_void, stop_critier
from solver import optimize
from data import load_dataset, loadSeer
def main(datasetName):
if datasetName == 'seer':
numberOfClass = 2
X, y = loadSeer(numberOfClass)
else:
X, y = load_dataset(name=datasetName)
from sklearn.model_selection import train_test_split
#P_train, P_test, y_train, y_test = train_test_split(X, y, random_state=42)
# split: 60-20-20
P_train, P_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
P_train, P_val, y_train, y_val = train_test_split(P_train, y_train, test_size=0.25, random_state=1) # 0.25 x 0.8 = 0.2
input_size, input_dimension = P_train.shape
numberOfClass = np.unique(y).shape[0]
P_train = np.transpose(P_train); P_val = np.transpose(P_val); P_test = np.transpose(P_test);
if numberOfClass > 2:
from sklearn.preprocessing import LabelBinarizer, StandardScaler
lb = LabelBinarizer()
y_train = lb.fit_transform(y_train); y_train = np.transpose(y_train);
y_val = lb.fit_transform(y_val); y_val = np.transpose(y_val);
y_test = lb.transform(y_test); y_test = np.transpose(y_test);
elif numberOfClass == 2:
y_train = y_train.reshape((y_train.shape[0], 1)); y_train = np.transpose(y_train);
y_val = y_val.reshape((y_val.shape[0], 1)); y_val = np.transpose(y_val);
y_test = y_test.reshape((y_test.shape[0], 1)); y_test = np.transpose(y_test);
print datasetName, input_size, input_dimension, numberOfClass
rank = P_train.shape[1]
#grid search or pre-defined hyper-parameters
lambda_X_list = [0.1]#[math.pow(10, x) for x in range(-4, 3)];
lambda_W_list = [10.0]#[math.pow(10, x) for x in range(-4, 3)];
lambda_D_list = [0.01]#[math.pow(10, x) for x in range(-4, 3)];
valid_list = []
for (lambda_X, lambda_W, lambda_D) in itertools.product(lambda_X_list, lambda_W_list, lambda_D_list):
try:
###############################################
# 0-th iteration
##############################################
X_ini = np.random.rand(P_train.shape[0], rank)
W_ini = np.random.rand(rank, P_train.shape[1])
D_ini = np.random.rand(y_train.shape[0], rank)
X = X_ini; D = D_ini; W = W_ini;
w_val = getSparseWeight(X, P_val, choice = 0, alpha = 0.3)
w = getSparseWeight(X, P_test, choice = 0, alpha = 0.3)
print 'iter', 'training', 'validation', 'testing'
print 0, roc_auc_score(y_train.T, np.dot(D, W).T), \
roc_auc_score(y_val.T, np.dot(D, w_val.T).T), \
roc_auc_score(y_test.T, np.dot(D, w.T).T)
appendDFToCSV_void(pd.DataFrame([{"train":roc_auc_score(y_train.T, np.dot(D, W).T), "validation":roc_auc_score(y_val.T, np.dot(D, w_val.T).T), "test":roc_auc_score(y_test.T, np.dot(D, w.T).T)}]), join(whereami+'/res', datasetName+'.log'))
from time import time
start = time()
###############################################
# loop iteration
##############################################
for iter in range(1, 50):
#update X
X_new = optimize.min_rank_dict(P_train, W, lambda_X, X)
#update W
Z = np.concatenate((P_train,y_train),axis=0)
A = np.concatenate((X_new,D),axis=0)
W_new = getWfixingA(Z, W, A, lambda_W)
#update D
D_new = optimize.min_rank_dict(y_train, W_new, lambda_D, D)
#D_new = np.dot(y_train, np.linalg.pinv(W_new))
#from sklearn.decomposition.dict_learning import _update_dict
#D_new = _update_dict(D, y_train, W_new)
#E = np.dot(y_train, W_new.T)
#F = np.dot(W_new, W_new.T)
#D_new = optimize.ODL_updateD(D, E, F, iterations = 1)
w = getSparseWeight(X_new, P_test, choice = 0, alpha = 0.3)
w_val = getSparseWeight(X, P_val, choice = 0, alpha = 0.3)
print iter, roc_auc_score(y_train.T, np.dot(D_new, W_new).T), \
roc_auc_score(y_val.T, np.dot(D_new, w_val.T).T), \
roc_auc_score(y_test.T, np.dot(D_new, w.T).T)
appendDFToCSV_void(pd.DataFrame([{"train":roc_auc_score(y_train.T, np.dot(D_new, W_new).T),"validation":roc_auc_score(y_val.T, np.dot(D_new, w_val.T).T), "test":roc_auc_score(y_test.T, np.dot(D_new, w.T).T) }]), join(whereami+'/res', datasetName+'.log'))
D = D_new; W = W_new; X = X_new;
valid_list.append( roc_auc_score(y_val.T, np.dot(D_new, w_val.T).T) )
if stop_critier(valid_list):
break
except Exception as err:
print( err )
if __name__ == '__main__':
datasetName = sys.argv[1]
main(datasetName)
| 38.464
| 258
| 0.66015
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 952
| 0.198003
|
7ff1b8e6fdd883cf61f529bf469c18df4b7174fc
| 166
|
py
|
Python
|
django_gotolong/bhav/apps.py
|
ParikhKadam/gotolong
|
839beb8aa37055a2078eaa289b8ae05b62e8905e
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 15
|
2019-12-06T16:19:45.000Z
|
2021-08-20T13:22:22.000Z
|
django_gotolong/bhav/apps.py
|
ParikhKadam/gotolong
|
839beb8aa37055a2078eaa289b8ae05b62e8905e
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 14
|
2020-12-08T10:45:05.000Z
|
2021-09-21T17:23:45.000Z
|
django_gotolong/bhav/apps.py
|
ParikhKadam/gotolong
|
839beb8aa37055a2078eaa289b8ae05b62e8905e
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 9
|
2020-01-01T03:04:29.000Z
|
2021-04-18T08:42:30.000Z
|
from django.apps import AppConfig
from django_gotolong.bhav.views import start
class BhavConfig(AppConfig):
name = 'bhav'
def ready(self):
start()
| 16.6
| 44
| 0.704819
| 84
| 0.506024
| 0
| 0
| 0
| 0
| 0
| 0
| 6
| 0.036145
|
7ff2225b3cf1350521968e39323aa03d96333bb2
| 2,130
|
py
|
Python
|
tethysext/atcore/services/paginate.py
|
Aquaveo/tethysext-atcore
|
7a83ccea24fdbbe806f12154f938554dd6c8015f
|
[
"BSD-3-Clause"
] | 3
|
2020-11-05T23:50:47.000Z
|
2021-02-26T21:43:29.000Z
|
tethysext/atcore/services/paginate.py
|
Aquaveo/tethysext-atcore
|
7a83ccea24fdbbe806f12154f938554dd6c8015f
|
[
"BSD-3-Clause"
] | 7
|
2020-10-29T16:53:49.000Z
|
2021-05-07T19:46:47.000Z
|
tethysext/atcore/services/paginate.py
|
Aquaveo/tethysext-atcore
|
7a83ccea24fdbbe806f12154f938554dd6c8015f
|
[
"BSD-3-Clause"
] | null | null | null |
"""
********************************************************************************
* Name: pagintate.py
* Author: nswain
* Created On: April 17, 2018
* Copyright: (c) Aquaveo 2018
********************************************************************************
"""
def paginate(objects, results_per_page, page, result_name, sort_by_raw=None, sort_reversed=False):
"""
Paginate given list of objects.
Args:
objects(list): list of objects to paginate.
results_per_page(int): maximum number of results to show on a page.
page(int): page to view.
result_name(str): name to use when referencing the objects.
sort_by_raw(str): sort field if applicable.
sort_reversed(boo): indicates whether the sort is reversed or not.
Returns:
list, dict: list of objects for current page, metadata form paginantion page.
"""
results_per_page_options = [5, 10, 20, 40, 80, 120]
num_objects = len(objects)
if num_objects <= results_per_page:
page = 1
min_index = (page - 1) * results_per_page
max_index = min(page * results_per_page, num_objects)
paginated_objects = objects[min_index:max_index]
enable_next_button = max_index < num_objects
enable_previous_button = min_index > 0
pagination_info = {
'num_results': num_objects,
'result_name': result_name,
'page': page,
'min_showing': min_index + 1 if max_index > 0 else 0,
'max_showing': max_index,
'next_page': page + 1,
'previous_page': page - 1,
'sort_by': sort_by_raw,
'sort_reversed': sort_reversed,
'enable_next_button': enable_next_button,
'enable_previous_button': enable_previous_button,
'hide_buttons': page == 1 and max_index == num_objects,
'hide_header_buttons': len(paginated_objects) < 20,
'show': results_per_page,
'results_per_page_options': [x for x in results_per_page_options if x <= num_objects],
'hide_results_per_page_options': num_objects <= results_per_page_options[0],
}
return paginated_objects, pagination_info
| 39.444444
| 98
| 0.620188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,029
| 0.483099
|
7ff4886052822174f0f2c10e163f3567d0699ee7
| 133
|
py
|
Python
|
geotweet/tests/integration/twitter/__init__.py
|
meyersj/geotweet
|
1a6b55f98adf34d1b91f172d9187d599616412d9
|
[
"MIT"
] | 6
|
2016-03-26T19:29:25.000Z
|
2020-07-12T02:18:22.000Z
|
geotweet/tests/integration/twitter/__init__.py
|
meyersj/geotweet
|
1a6b55f98adf34d1b91f172d9187d599616412d9
|
[
"MIT"
] | null | null | null |
geotweet/tests/integration/twitter/__init__.py
|
meyersj/geotweet
|
1a6b55f98adf34d1b91f172d9187d599616412d9
|
[
"MIT"
] | 1
|
2020-01-06T01:25:05.000Z
|
2020-01-06T01:25:05.000Z
|
import os
from os.path import dirname
import sys
ROOT = dirname(dirname(dirname(os.path.abspath(__file__))))
sys.path.append(ROOT)
| 16.625
| 59
| 0.774436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
7ff4ceaf754a9a8a176cc343441eb5563e96bf86
| 1,996
|
py
|
Python
|
main.py
|
nerdmanPc/arvore-b
|
f993028f0c8971cff4e4434967c8f9b44a5cc265
|
[
"MIT"
] | null | null | null |
main.py
|
nerdmanPc/arvore-b
|
f993028f0c8971cff4e4434967c8f9b44a5cc265
|
[
"MIT"
] | null | null | null |
main.py
|
nerdmanPc/arvore-b
|
f993028f0c8971cff4e4434967c8f9b44a5cc265
|
[
"MIT"
] | null | null | null |
# Primeiro Trabalho Pratico de EDAII (UFBA)
# Desenvolvido em dupla:
# Laila Pereira Mota Santos e Pedro Antonhyonih Silva Costa
# Versão Python 3.8.10
#
# OBSERVACAO IMPORTANTE:
# A CONSTANTE GRAUMINIMO ESTA NO ARQUIVO node.py
import os
from struct import Struct
from typing import Optional, Tuple
from enum import Enum
from node import Node
from data_base import DataBase, OpStatus
import sys
FILE_PATH = "tree.bin"
#GRAUMINIMO = 2 # Movido para node.py
def insert_entry(key:int, name:str, age:int):
data_base = DataBase(FILE_PATH)
insert_result = data_base.add_entry(key, name, age)
#print(insert_result)
if insert_result == OpStatus.OK:
print('insercao com sucesso: {}'.format(key))
elif insert_result == OpStatus.ERR_KEY_EXISTS:
print('chave ja existente: {}'.format(key))
else:
print('DEBUG: erro logico na insercao da chave {}'.format(key))
def query_entry(key:int):
data_base = DataBase(FILE_PATH)
_entry = data_base.entry_by_key(key)
if _entry is not None:
print(_entry)
else:
print('chave nao encontrada: {}'.format(key))
def print_tree():
data_base = DataBase(FILE_PATH)
data_base.print_tree()
def print_sequence():
data_base = DataBase(FILE_PATH)
data_base.print_keys_ordered()
def print_occupancy():
data_base = DataBase(FILE_PATH)
if data_base.empty():
print('arvore vazia')
return
occupancy = data_base.occupancy()
print('taxa de ocupacao: {:.1f}'.format(occupancy))
def exit_shell():
sys.exit()
#os.remove(FILE_PATH)
#Loop principal que processa os comandos
entry = input()
while entry != 'e':
if(entry == 'i'):
num_reg = input()
name_reg = input()
age_reg = input()
insert_entry(int(num_reg), name_reg, int(age_reg))
elif(entry == 'c'):
num_reg = input()
query_entry(int(num_reg))
elif(entry == 'p'):
print_tree()
elif(entry == 'o'):
print_sequence()
elif(entry == 't'):
print_occupancy()
entry = input()
exit_shell()
#Fim do loop principal
| 25.265823
| 65
| 0.698397
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 551
| 0.275914
|
7ff5855819bc7ea53013b0091b066cc505d14375
| 6,134
|
py
|
Python
|
hcat/backends/spatial_embedding.py
|
buswinka/hcat
|
dcfd855904ba51f6e1fa6c9ddc775b3364695e3e
|
[
"MIT"
] | 4
|
2021-10-14T19:22:57.000Z
|
2022-03-29T09:37:43.000Z
|
hcat/backends/spatial_embedding.py
|
buswinka/hcat
|
dcfd855904ba51f6e1fa6c9ddc775b3364695e3e
|
[
"MIT"
] | null | null | null |
hcat/backends/spatial_embedding.py
|
buswinka/hcat
|
dcfd855904ba51f6e1fa6c9ddc775b3364695e3e
|
[
"MIT"
] | null | null | null |
import torch
import hcat.lib.functional
from hcat.lib.functional import IntensityCellReject
from hcat.backends.backend import Backend
from hcat.models.r_unet import embed_model as RUnet
from hcat.train.transforms import median_filter, erosion
import hcat.lib.utils
from hcat.lib.utils import graceful_exit
import os.path
import wget
from typing import Dict, Optional
class SpatialEmbedding(Backend):
def __init__(self,
sigma: Optional[torch.Tensor] = torch.tensor([0.02, 0.02, 0.02]),
device: Optional[str] = 'cuda',
model_loc: Optional[str] = None,
postprocessing: Optional[bool] = True,
scale: Optional[int] = 25,
figure: Optional[str] = None,
archetecture: Optional[RUnet] = RUnet):
"""
Initialize Spatial embedding Algorithm.
:param sigma: torch.Tensor[sigma_x, sigma_y, sigma_z] values for gaussian probability estimation.
:param device: String value for torch device by which to run segmentation backbone on.
:param model_loc: Path to trained model files.
:param postprocessing: Disable segmentation postprocessing, namely
:param scale: scale factor based on max diameter of object
:param figure: filename and path of diagnostic figure which may be rendered
"""
super(SpatialEmbedding, self).__init__()
self.url = 'https://github.com/buswinka/hcat/blob/master/modelfiles/spatial_embedding.trch?raw=true'
# self.url = None
self.scale = torch.tensor(scale)
self.device = device
self.sigma = sigma.to(device)
self.postprocessing = postprocessing
self.figure = figure
if self.url:
self.model = self._model_loader_url(self.url, archetecture, device)
else:
self.model = self._model_loader_path(model_loc, archetecture, device)
self.vector_to_embedding = torch.jit.script(
hcat.lib.functional.VectorToEmbedding(scale=self.scale).requires_grad_(False).eval())
self.embedding_to_probability = torch.jit.script(
hcat.lib.functional.EmbeddingToProbability(scale=self.scale).requires_grad_(False).eval())
self.estimate_centroids = hcat.lib.functional.EstimateCentroids(scale=self.scale).requires_grad_(False)
self.filter = median_filter(kernel_targets=3, rate=1, device=device)
self.binary_erosion = erosion(device=device)
self.intensity_rejection = IntensityCellReject()
self.nms = hcat.lib.functional.nms().requires_grad_(False)
self.centroids = None
self.vec = None
self.embed = None
self.prob = None
@graceful_exit('\x1b[1;31;40m' + 'ERROR: Spatial Embedding Failed. Aborting...' + '\x1b[0m')
def forward(self, image: torch.Tensor) -> torch.Tensor:
"""
Inputs an image and outputs a probability mask of everything seen in the image.
.. note::
Call the module as a function to execute this method (similar to torch.nn.module).
.. warning:
Will not raise an error upon failure, instead returns None and prints to standard out
Example:
>>> from hcat.backends.spatial_embedding import SpatialEmbedding
>>> import torch
>>> backend = SpatialEmbedding()
>>> image = torch.load('path/to/my/image.trch')
>>> assert image.ndim == 5 # Shape should be [B, C, X, Y, Z]
>>> masks = backend(image)
:param image: [B, C=4, X, Y, Z] input image
:return: [B, 1, X, Y, Z] output segmentation mask where each pixel value is a cell id (0 is background)
"""
assert image.ndim == 5
assert image.shape[1] == 1
assert image.min() >= -1
assert image.max() <= 1
# image = self.filter(image.to(self.device))
image = image.to(self.device)
b, c, x, y, z = image.shape
if self.image_reject and self._is_image_bad(image):
return torch.zeros((b, 0, x, y, z), device=self.device)
# Evaluate Neural Network Model
out: torch.Tensor = self.model(image)
# Assign Outputs
probability_map = out[:, [-1], ...]
out = out[:, 0:3:1, ...]
self.prob = probability_map.cpu()
self.vec = out.cpu()
out: torch.Tensor = self.vector_to_embedding(out)
self.embed = out.cpu()
centroids: Dict[str, torch.Tensor] = self.estimate_centroids(out, probability_map)
self.centroids = centroids
out: torch.Tensor = self.embedding_to_probability(out, centroids, self.sigma)
# Reject cell masks that overlap or meet min Myo7a criteria
if self.postprocessing:
out: torch.Tensor = self.intensity_rejection(out, image)
# print(centroids.shape, out.shape)
if out.numel() == 0:
return torch.zeros((b, 0, x, y, z), device=self.device)
ind = self.nms(out, 0.5)
out = out[:, ind, ...]
# Take probabilities and generate masks!
probability_map = probability_map.lt(0.8).squeeze(1)
for i in range(out.shape[1]):
out[:, i, ...][probability_map] = 0
self.zero_grad()
return out
def load(self, model_loc: str) -> None:
"""
Initializes model weights from a url or filepath.
Example:
>>> from hcat.backends.spatial_embedding import SpatialEmbedding
>>> backend = SpatialEmbedding()
>>>
>>> url = 'https://www.model_location.com/model.trch'
>>> backend.load(url) # Works with url
>>>
>>> model_path = 'path/to/my/model.trch'
>>> backend.load(model_path) # Also works with path
:param model_loc: url or filepath
:return: None
"""
if self._is_url(model_loc):
return self._model_loader_url(model_loc, RUnet(in_channels=1).requires_grad_(False), self.device)
else:
return self._model_loader_path(model_loc, RUnet(in_channels=1).requires_grad_(False), self.device)
| 34.077778
| 111
| 0.626997
| 5,760
| 0.939028
| 0
| 0
| 2,561
| 0.417509
| 0
| 0
| 2,284
| 0.372351
|
7ff58669b09c24b09a4ab1de5e76c0c33e23118d
| 6,656
|
py
|
Python
|
mortar_rdb/tests/test_utility.py
|
Mortar/mortar_rdb
|
576628a299f94ef60324244777766a620556592b
|
[
"MIT"
] | 1
|
2017-03-24T15:20:40.000Z
|
2017-03-24T15:20:40.000Z
|
mortar_rdb/tests/test_utility.py
|
Mortar/mortar_rdb
|
576628a299f94ef60324244777766a620556592b
|
[
"MIT"
] | 3
|
2015-12-01T20:06:30.000Z
|
2018-02-02T07:05:21.000Z
|
mortar_rdb/tests/test_utility.py
|
Mortar/mortar_rdb
|
576628a299f94ef60324244777766a620556592b
|
[
"MIT"
] | 1
|
2019-03-01T08:37:48.000Z
|
2019-03-01T08:37:48.000Z
|
from mortar_rdb import register_session, get_session
from mortar_rdb.interfaces import ISession
from testfixtures.components import TestComponents
from sqlalchemy.exc import OperationalError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm.session import Session
from sqlalchemy.schema import Column
from sqlalchemy.types import Integer, String
from threading import Thread
from testfixtures import (
ShouldRaise,compare,generator,Comparison as C, LogCapture
)
from unittest import TestCase
from zope.component import getSiteManager
from zope.component.interfaces import ComponentLookupError
import transaction
class TestUtility(TestCase):
def setUp(self):
self.components = TestComponents()
self.Base = declarative_base()
class Model(self.Base):
__tablename__ = 'model'
id = Column('id', Integer, primary_key=True)
name = Column('name', String(50))
self.Model = Model
def tearDown(self):
self.components.uninstall()
def test_how_to_create(self):
register_session('sqlite://', transactional=False)
# at this stage we have no tables
session = get_session()
session.add(self.Model(name='foo'))
# so we get an error
with ShouldRaise(OperationalError):
session.commit()
# ...which we then need to abort:
session.rollback()
# if we know we have no tables, we can do:
self.Base.metadata.create_all(session.bind)
# now we can commit:
session.add(self.Model(name='foo'))
session.commit()
# ...and get stuff back:
self.assertEqual(1,session.query(self.Model).count())
def test_get_in_view(self):
register_session('sqlite://')
register_session('sqlite://','foo')
# create the tables
session1 = get_session()
session2 = get_session('foo')
with transaction.manager:
self.Base.metadata.create_all(session1.bind)
self.Base.metadata.create_all(session2.bind)
# this is what you'd do in views:
session = get_session()
session.add(self.Model(id=1,name='foo'))
model1 = session.query(self.Model).one()
self.assertEqual(model1.id,1)
self.assertEqual(model1.name,'foo')
# or with a name...
session = get_session('foo')
session.add(self.Model(id=1,name='foo'))
model2 = session.query(self.Model).one()
self.assertEqual(model2.id,1)
self.assertEqual(model2.name,'foo')
# paranoia
self.assertFalse(model1 is model2)
def test_register(self):
register_session('sqlite://')
# create the tables
session = get_session()
self.Base.metadata.create_all(session.bind)
# check registrations
compare(generator(
C('zope.component.registry.UtilityRegistration',
component=C('sqlalchemy.orm.scoping.ScopedSession'),
factory=None,
info=u'',
name=u'',
provided=ISession,
registry=self.components.registry
)),self.components.registry.registeredUtilities())
# this is what get_session goes:
session = getSiteManager().getUtility(ISession)
session.add(self.Model(id=1,name='foo'))
model = session.query(self.Model).one()
self.assertEqual(model.id,1)
self.assertEqual(model.name,'foo')
def test_register_with_name(self):
register_session('sqlite://','foo')
# check registrations
compare(generator(
C('zope.component.registry.UtilityRegistration',
component=C('sqlalchemy.orm.scoping.ScopedSession'),
factory=None,
info=u'',
name=u'foo',
provided=ISession,
registry=self.components.registry
)),self.components.registry.registeredUtilities())
registry = getSiteManager()
# check we don't register with no name:
with ShouldRaise(ComponentLookupError(ISession, u'')):
registry.getUtility(ISession)
# check we do with the right name
self.assertTrue(isinstance(
registry.getUtility(ISession,'foo')(),
Session
))
def test_transaction(self):
register_session('sqlite://')
# functional
with transaction.manager:
session = get_session()
self.Base.metadata.create_all(session.bind)
session.add(self.Model(id=1,name='foo'))
compare(session.query(self.Model).count(), expected=1)
def test_transaction_no_session_usage(self):
register_session('sqlite://')
# functional
with transaction.manager:
session = get_session()
self.Base.metadata.create_all(session.bind)
session.execute(
self.Model.__table__.insert().values(name='test')
)
compare(session.query(self.Model).count(), expected=1)
def test_no_transaction(self):
register_session('sqlite://',transactional=False)
# functional
session = get_session()
self.Base.metadata.create_all(session.bind)
session.add(self.Model(id=1,name='foo'))
session.commit()
compare(session.query(self.Model).count(), expected=1)
def test_different_sessions_per_thread(self):
register_session('sqlite://')
class TestThread(Thread):
def run(self):
self.resulting_session = get_session()
t1 = TestThread()
t1.start()
t2 = TestThread()
t2.start()
t1.join()
t2.join()
self.assertNotEqual(
id(t1.resulting_session),
id(t2.resulting_session),
)
def test_different_sessions_when_async(self):
register_session('sqlite://',
scoped=False, transactional=False)
s1 = get_session()
s2 = get_session()
self.assertNotEqual(id(s1),id(s2))
def test_logging_functional(self):
with LogCapture() as l:
register_session('sqlite://')
l.check((
'mortar_rdb',
'INFO',
"Registering session for sqlite:// with name ''"
))
| 32.154589
| 70
| 0.587891
| 6,007
| 0.902494
| 0
| 0
| 0
| 0
| 0
| 0
| 923
| 0.138672
|
7ff6757eb76e6c391780f0171055dc2c8c0944f0
| 2,637
|
py
|
Python
|
magic_driver_control/scripts/driver_controller.py
|
flamma7/adv_robotics
|
da9150de28a5464ee6af1d0859312f4858a6b3d2
|
[
"Apache-2.0"
] | null | null | null |
magic_driver_control/scripts/driver_controller.py
|
flamma7/adv_robotics
|
da9150de28a5464ee6af1d0859312f4858a6b3d2
|
[
"Apache-2.0"
] | null | null | null |
magic_driver_control/scripts/driver_controller.py
|
flamma7/adv_robotics
|
da9150de28a5464ee6af1d0859312f4858a6b3d2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
This node talks directly to the pololu driver
It takes in PID control effort and maps it to commands for the pololu driver
"""
from __future__ import division
import rospy
from std_msgs.msg import Float64, Int8MultiArray
DRIVE_PUB_INDEX = 0
YAW_PUB_INDEX = 1
SIDE_IR_THRESH = 10
FRONT_IR_THRESH = 21
DOORWAY_THRESH = 5
DOORWAY_IGNORES = 250
class DriverControl:
def __init__(self):
rospy.Subscriber("yaw/control_effort", Float64, self.yaw_ce_callback)
rospy.Subscriber("drive/control_effort", Float64, self.drive_ce_callback)
rospy.Subscriber("distance", Float64, self.side_dist_callback)
rospy.Subscriber("front_distance", Float64, self.front_dist_callback)
self.pololu_pub = rospy.Publisher('move_setpoints', Int8MultiArray, queue_size=5)
self.drive = 0
self.yaw = 0
self.side_distance = 5
self.front_distance = 100
rospy.loginfo("Driver Controller Initialized")
self.doorway = False
self.doorway_count = 0
def front_dist_callback(self, msg):
self.front_distance = msg.data
def side_dist_callback(self,msg):
self.side_distance = msg.data
def drive_ce_callback(self, msg):
self.drive = msg.data
# conversions... if sm we probably don't need to convert
self.publish_cmd()
def yaw_ce_callback(self, msg):
"""
This comes in the form of around 3???
Map it to something between -100 and 100?
"""
self.yaw = -msg.data
# conversions...
self.publish_cmd()
def publish_cmd(self):
"""
Sends a command to the pololu
"""
yaw = self.yaw
if self.doorway:
self.doorway_count -= 1
if self.doorway_count < 0:
rospy.logwarn("Leaving Doorway")
self.doorway = False
else:
rospy.logwarn("Doorway")
yaw = 0
elif self.side_distance > SIDE_IR_THRESH and self.front_distance > FRONT_IR_THRESH:
self.doorway_count += 1
if self.doorway_count > DOORWAY_THRESH:
self.doorway = True
self.doorway_count = DOORWAY_IGNORES
else:
self.doorway_count = 0
data = []
data.append(int(self.drive))
data.append(int(yaw))
msg = Int8MultiArray(); msg.data = data
self.pololu_pub.publish(msg)
def main():
rospy.init_node("driver_controller")
dc = DriverControl()
rospy.spin()
if __name__ == "__main__":
main()
| 28.663043
| 91
| 0.615851
| 2,127
| 0.806598
| 0
| 0
| 0
| 0
| 0
| 0
| 557
| 0.211225
|
7ff703d79b5264be25b5282ef47dd791ebb22441
| 4,025
|
py
|
Python
|
GageRnR/application.py
|
tobyndax/GageRnR
|
2dadafe6cd76a963068b7cbbd732f5f8e02d36fb
|
[
"MIT"
] | null | null | null |
GageRnR/application.py
|
tobyndax/GageRnR
|
2dadafe6cd76a963068b7cbbd732f5f8e02d36fb
|
[
"MIT"
] | null | null | null |
GageRnR/application.py
|
tobyndax/GageRnR
|
2dadafe6cd76a963068b7cbbd732f5f8e02d36fb
|
[
"MIT"
] | null | null | null |
"""GageRnR.
The input data should be structured
in a 3d array n[i,j,k] where
i = operator, j = part, k = measurement
Stored to file this data would look:
m1 m2 m3
3.29; 3.41; 3.64 # p1 | o1
2.44; 2.32; 2.42 # p2
3.08; 3.25; 3.07 # p1 | o2
2.53; 1.78; 2.32 # p2
3.04; 2.89; 2.85 # p1 | o3
1.62; 1.87; 2.04 # p2
More info: https://github.com/owodunni/GageRnR
Usage:
GageRnR -f FILE -s STRUCTURE [-a <AXES>] [-d <DELIMITER>] [-o <FOLDER>] [-g <PARTS>]
GageRnR -h | --help
GageRnR -v | --version
Examples:
GageRnR -f data.csv -s5,7,11 -o report
GageRnR -f data/data_mXop.csv -s 3,5,11 -o outDir
GageRnR -f data/data_opXm.csv -s 5,7,11 -a 2,1,0 -o outDir
GageRnR -f data/data_demoGRnR.csv -s 3,10,3 -a 0,2,1 -g 40,42,30,43,29,45,27.5,42,26,35 -o outDir
Options:
-f --file=FILE Load input data.
-s --structure=STRUCTURE Data structure.
Order should be operators, parts, measurements.
-a --axes=<AXES> Order of data axes [default: 0,1,2].
-d --delimiter=<DELIMITER> Order of data axes [default: ;].
-o --output=<FOLDER> Report output directory
-g --groundTruth=<PARTS> Ground Truth data for parts
-h --help Show this screen.
-v --version Show version.
"""
from docopt import docopt
import os.path
import GageRnR
from .reportGenerator import ReportGenerator
def toInt(values):
return [int(v) for v in values.split(',')]
def toFloat(values):
return [float(v) for v in values.split(',')]
def positiveIntegers(values, minValue):
for value in values:
if value < minValue:
return False
return True
def checkIntegerList(name, values, minValue=0):
if(len(values) != 3):
raise AttributeError(name, " can only have three values.")
if(not positiveIntegers(values, minValue)):
raise AttributeError(name, " can only be positive integers.")
class Application():
def __init__(self, argv=None):
arguments = docopt(__doc__, argv, version=GageRnR.__version__)
self.file = str(arguments["--file"])
self.structure = toInt(arguments["--structure"])
self.axes = toInt(arguments["--axes"])
self.delimiter = str(arguments["--delimiter"])
if(arguments["--groundTruth"] is not None):
self.gt = toFloat(arguments["--groundTruth"])
if(arguments["--output"] is not None):
self.outputFolder = arguments["--output"]
def check(self):
if not os.path.isfile(self.file):
raise FileNotFoundError(self.file)
checkIntegerList("Structure", self.structure, 1)
checkIntegerList("Axes", self.axes)
def run(self):
loader = GageRnR.DataLoader()
data = loader.load(
file=self.file,
structure=self.structure,
axes=self.axes,
delimiter=self.delimiter)
g = GageRnR.GageRnR(data)
g.calculate()
s = GageRnR.Statistics(data)
s.calculate()
n = GageRnR.Normality(data)
n.calculate()
if hasattr(self, 'gt'):
lin = GageRnR.Linearity(data=data, partGt=self.gt)
lin.calculate()
if not hasattr(self, 'outputFolder'):
return
rg = ReportGenerator(self.outputFolder)
rg.addTitle(g.title)
rg.addDoc(g)
rg.addTable(g.summary(tableFormat="html"))
rg.addTitle(s.title)
rg.addDoc(s)
rg.addTable(s.summary(tableFormat="html"))
rg.addPlot(s.createPartsBoxPlot(), 'Parts Box Plot')
rg.addPlot(s.createOperatorsBoxPlot(), 'Operators Box Plot')
rg.addTitle(n.title)
rg.addDoc(n)
rg.addTable(n.summary(tableFormat="html"))
if hasattr(self, 'gt'):
rg.addTitle(lin.title)
rg.addDoc(lin)
rg.addTable(lin.summary(tableFormat="html"))
rg.addPlot(lin.createLinearityPlot(), 'Residual Linearity Plot')
rg.generateReport()
print("Report written to: " + self.outputFolder)
| 29.166667
| 101
| 0.610683
| 2,141
| 0.531925
| 0
| 0
| 0
| 0
| 0
| 0
| 1,546
| 0.384099
|
7ff97680a496e4eac114964f67955913e58ace45
| 4,536
|
py
|
Python
|
final/options.py
|
annahung31/Advance_MM_homeworks
|
f6b2d600220442a73d25d478d08898ee796457b6
|
[
"MIT"
] | null | null | null |
final/options.py
|
annahung31/Advance_MM_homeworks
|
f6b2d600220442a73d25d478d08898ee796457b6
|
[
"MIT"
] | null | null | null |
final/options.py
|
annahung31/Advance_MM_homeworks
|
f6b2d600220442a73d25d478d08898ee796457b6
|
[
"MIT"
] | null | null | null |
import numpy as np
import os
import glob
import torch
import argparse
def parse_args(script):
parser = argparse.ArgumentParser(description= 'few-shot script %s' %(script))
parser.add_argument('--task', default='fsl', help='fsl/cdfsl-single/cdfsl-multi')
parser.add_argument('--dataset', default='miniImagenet', help='miniImagenet/cub/cars/places/plantae, specify multi for training with multiple domains')
parser.add_argument('--testset', default='cub', help='cub/cars/places/plantae, valid only when dataset=multi')
parser.add_argument('--model', default='ResNet10', help='model: Conv{4|6} / ResNet{10|18|34}') # we use ResNet10 in the paper
parser.add_argument('--method', default='myMethod', help='baseline/baseline++/protonet/matchingnet/relationnet{_softmax}/gnnnet')
parser.add_argument('--kl', default=1, type=int, help='use kl divergence in baseline model')
parser.add_argument('--train_n_way' , default=5, type=int, help='class num to classify for training')
parser.add_argument('--test_n_way' , default=5, type=int, help='class num to classify for testing (validation) ')
parser.add_argument('--n_shot' , default=5, type=int, help='number of labeled data in each class, same as n_support')
parser.add_argument('--train_aug' , action='store_true', help='perform data augmentation or not during training ')
parser.add_argument('--name' , default='tmp', type=str, help='')
parser.add_argument('--save_dir' , default='./output', type=str, help='')
parser.add_argument('--data_dir' , default='/home/annahung/189/courses/AMMAI/final/NTU_aMMAI21_cdfsl/filelists', type=str, help='')
if script == 'train':
parser.add_argument('--num_classes' , default=200, type=int, help='total number of classes in softmax, only used in baseline')
parser.add_argument('--save_freq' , default=25, type=int, help='Save frequency')
parser.add_argument('--start_epoch' , default=0, type=int,help ='Starting epoch')
parser.add_argument('--stop_epoch' , default=400, type=int, help ='Stopping epoch')
parser.add_argument('--resume' , default='', type=str, help='continue from previous trained model with largest epoch')
parser.add_argument('--resume_epoch', default=-1, type=int, help='')
parser.add_argument('--warmup' , default='baseline', type=str, help='continue from baseline, neglected if resume is true')
elif script == 'test':
parser.add_argument('--finetune' , action='store_true', help='finetune the few-shot model or not')
parser.add_argument('--split' , default='novel', help='base/val/novel')
parser.add_argument('--save_epoch', default=-1, type=int,help ='load the model trained in x epoch, use the best model if x is -1')
else:
raise ValueError('Unknown script')
return parser.parse_args()
def get_assigned_file(checkpoint_dir,num):
assign_file = os.path.join(checkpoint_dir, '{:d}.tar'.format(num))
return assign_file
def get_resume_file(checkpoint_dir, resume_epoch=-1):
filelist = glob.glob(os.path.join(checkpoint_dir, '*.tar'))
if len(filelist) == 0:
return None
filelist = [ x for x in filelist if os.path.basename(x) != 'best_model.tar' ]
epochs = np.array([int(os.path.splitext(os.path.basename(x))[0]) for x in filelist])
max_epoch = np.max(epochs)
epoch = max_epoch if resume_epoch == -1 else resume_epoch
resume_file = os.path.join(checkpoint_dir, '{:d}.tar'.format(epoch))
return resume_file
def get_best_file(checkpoint_dir):
best_file = os.path.join(checkpoint_dir, 'best_model.tar')
if os.path.isfile(best_file):
return best_file
else:
return get_resume_file(checkpoint_dir)
def load_warmup_state(filename, method):
print(' load pre-trained model file: {}'.format(filename))
warmup_resume_file = get_resume_file(filename)
tmp = torch.load(warmup_resume_file)
if tmp is not None:
state = tmp['state']
state_keys = list(state.keys())
for i, key in enumerate(state_keys):
if 'relationnet' in method and "feature." in key:
newkey = key.replace("feature.","")
state[newkey] = state.pop(key)
elif method == 'gnnnet' and 'feature.' in key:
newkey = key.replace("feature.","")
state[newkey] = state.pop(key)
elif method == 'matchingnet' and 'feature.' in key and '.7.' not in key:
newkey = key.replace("feature.","")
state[newkey] = state.pop(key)
else:
state.pop(key)
else:
raise ValueError(' No pre-trained encoder file found!')
return state
| 52.744186
| 153
| 0.698633
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,614
| 0.35582
|
7ffbe9ba3d0ccc12492d20e36da26c44617c81e1
| 6,322
|
py
|
Python
|
tests/test_pt.py
|
atti84it/ebook-reader-dict
|
6a23b633bb06af7f9ea9d54c837cd78d627a7eb7
|
[
"MIT"
] | 83
|
2020-05-21T12:25:07.000Z
|
2022-03-25T23:26:42.000Z
|
tests/test_pt.py
|
atti84it/ebook-reader-dict
|
6a23b633bb06af7f9ea9d54c837cd78d627a7eb7
|
[
"MIT"
] | 1,015
|
2020-04-18T12:21:25.000Z
|
2022-03-31T16:38:53.000Z
|
tests/test_pt.py
|
atti84it/ebook-reader-dict
|
6a23b633bb06af7f9ea9d54c837cd78d627a7eb7
|
[
"MIT"
] | 16
|
2020-11-05T22:49:31.000Z
|
2022-03-31T08:14:05.000Z
|
import pytest
from wikidict.render import parse_word
from wikidict.utils import process_templates
@pytest.mark.parametrize(
"word, pronunciations, gender, etymology, definitions",
[
("ababalhar", [], "", ["De baba."], ["<i>(popular)</i> babar; conspurcar"]),
(
"alguém",
["aw.ˈgẽj"],
"",
["Do latim <i>alĭquem</i> <sup>(la)</sup>."],
["pessoa não identificada"],
),
(
"algo",
[],
"",
[],
["um pouco, de certo modo", "objeto (não-identificado) de que se fala"],
),
(
"baiano",
[],
"",
["Derivado de Bahia, mais o sufixo ano, com perda do H."],
[
"do Estado da Bahia, Brasil",
"natural ou habitante do Estado da Bahia, Brasil",
"<i>(São Paulo, Brasil; popular; pejorativo)</i> pessoa que se veste de maneira incomum ou brega; fora da moda", # noqa
],
),
(
"cabrum",
[],
"mf",
['Do latim <i>caprunu</i> <sup>(la)</sup> "cabra".'],
[
"<i>(Pecuária)</i> de cabras:",
"<i>(Regionalismo, Brasil)</i> marido de mulher adúltera",
"indica estrondo",
],
),
(
"COPOM",
[],
"m",
[],
[
"<b>C</b>entro de <b>O</b>perações da <b>Po</b>lícia <b>M</b>ilitar",
"<i>(Brasil)</i> <b>Co</b>mitê de <b>Po</b>lítica <b>M</b>onetária",
],
),
(
"dezassete",
[],
"",
["Contração do latim vulgar <i>decem</i> + <i>ac</i> + <i>septem</i>."],
[
"o número dezassete (17, XVII)",
"nota correspondente a dezassete valores",
"pessoa ou coisa que apresenta o número dezassete numa ordenação",
"vide dezessete",
],
),
(
"etc",
[],
"",
[],
[
'abreviação do latim <i>et cetera</i>, que significa "e outros", "e os restantes" e "e outras coisas mais"', # noqa
],
),
(
"-ista",
[],
"",
[
"Do grego antigo <i>-ιστεσ</i> (<i>-istes</i>) através do latim <i>-ista</i> através do francês antigo <i>-iste</i>." # noqa
],
[
"que segue um princípio",
"que é estudioso ou profissional de um assunto",
"que usa algo",
"que tem uma visão preconceituosa",
],
),
(
"neo-",
[],
"",
["Do grego antigo <i>νέος</i>."],
[
"exprime a ideia de <i>novo</i>",
"<b>Nota:</b> Liga-se por hífen ao morfema seguinte quando este começa por <b>vogal</b>, <b>h</b>, <b>r</b> ou <b>s</b>.", # noqa
],
),
("para", [], "", [], ["exprime fim, destino, lugar, tempo, direção etc"]),
(
"paulista",
[],
"",
[],
[
"diz-se de pessoa de origem do Estado de São Paulo, Brasil",
"diz-se de artigo ou objeto do Estado de São Paulo",
"pessoa de origem do Estado de São Paulo, Brasil",
"artigo ou objeto do Estado de São Paulo",
],
),
("tenui-", [], "", [], ["variante ortográfica de <b>tenu-</b>"]),
(
"to",
[],
"",
[],
[
'<i>(antigo)</i> contração do pronome pessoal "te" com o pronome pessoal ou demonstrativo "o"',
"<i>(coloquial e Brasil)</i> forma aferética (muito comum na linguagem falada) de estou",
],
),
(
"ũa",
[],
"",
[
"Do Latim <i>una-</i>: <i>una-</i> deu <b>ũa</b> por queda do <b>n</b> com a nasalação do <b>ũ</b>."
],
["ortografia antiga de uma"],
),
("UTC", [], "", [], ["<i>(estrangeirismo)</i> ver TUC"]),
],
)
def test_parse_word(word, pronunciations, gender, etymology, definitions, page):
"""Test the sections finder and definitions getter."""
code = page(word, "pt")
details = parse_word(word, code, "pt", force=True)
assert pronunciations == details.pronunciations
assert gender == details.gender
assert etymology == details.etymology
assert definitions == details.definitions
@pytest.mark.parametrize(
"wikicode, expected",
[
("{{AFI|/k/|pt}}", "/k/"),
("{{barra de cor|yellow|#FFFF00}}", "[RGB #FFFF00]"),
("{{escopo2|Informática}}", "<i>(Informática)</i>"),
("{{escopo2|Brasil|governo}}", "<i>(Brasil)</i>"),
("{{escopoCat|Árvore|pt}}", "<i>(Botânica)</i>"),
("{{escopoCat|Náutica|pt}}", "<i>(Náutica)</i>"),
("{{escopoCatLang|Alimentação|pt}}", "<i>(Culinária)</i>"),
("{{escopoCatLang|Verbo auxiliar|pt}}", "<i>(Verbo auxiliar)</i>"),
("{{escopoUso|Portugal|pt}}", "<i>(Portugal)</i>"),
("{{escopoUso|Coloquialismo|pt}}", "<i>(coloquialismo)</i>"),
("{{fem|heliostático}}", "feminino de <b>heliostático</b>"),
("{{fl|la|occŭlo}}", "occŭlo"),
("{{l|pt|usar|usar}}", "usar"),
("{{l.o.|jurídico|jurídica}}", "jurídica"),
("{{l.s.|uso}}", "uso"),
("{{link preto|ciconiforme}}", "ciconiforme"),
("{{ll|publicar}}", "publicar"),
("{{m|ar|شيشة|tr=šīša}}", "<i>masculino</i>"),
("{{mq|palavra}}", "o mesmo que <b>palavra</b>"),
("{{mq|word|en}}", "o mesmo que <i>word</i>"),
("{{PE|cu}}", "cu <sup>(português de Portugal)</sup>"),
("{{r|la|basium|basĭum}}", "basĭum"),
("{{r.l|la|utor|ūtor}}", "ūtor"),
("{{varort|tenu-|pt}}", "variante ortográfica de <b>tenu-</b>"),
],
)
def test_process_templates(wikicode, expected):
"""Test templates handling."""
assert process_templates("foo", wikicode, "pt") == expected
| 35.122222
| 146
| 0.440683
| 0
| 0
| 0
| 0
| 6,302
| 0.983612
| 0
| 0
| 3,524
| 0.550023
|
7ffcca638b4a383642444cb66e73358214905bc8
| 10,792
|
py
|
Python
|
rebalancer.py
|
papercheck/lndg
|
8a0a5c9b2b53dfa2bf790feedac4bc903b4ff5ca
|
[
"MIT"
] | null | null | null |
rebalancer.py
|
papercheck/lndg
|
8a0a5c9b2b53dfa2bf790feedac4bc903b4ff5ca
|
[
"MIT"
] | null | null | null |
rebalancer.py
|
papercheck/lndg
|
8a0a5c9b2b53dfa2bf790feedac4bc903b4ff5ca
|
[
"MIT"
] | null | null | null |
import django, json, datetime
from django.conf import settings
from django.db.models import Sum
from pathlib import Path
from datetime import datetime, timedelta
from gui.lnd_deps import lightning_pb2 as ln
from gui.lnd_deps import lightning_pb2_grpc as lnrpc
from gui.lnd_deps import router_pb2 as lnr
from gui.lnd_deps import router_pb2_grpc as lnrouter
from gui.lnd_deps.lnd_connect import lnd_connect
BASE_DIR = Path(__file__).resolve().parent
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3'
}
}
)
django.setup()
from lndg import settings
from gui.models import Rebalancer, Channels, LocalSettings, Forwards, Autopilot
def run_rebalancer(rebalance):
if Rebalancer.objects.filter(status=1).exists():
unknown_errors = Rebalancer.objects.filter(status=1)
for unknown_error in unknown_errors:
unknown_error.status = 400
unknown_error.stop = datetime.now()
unknown_error.save()
rebalance.start = datetime.now()
rebalance.save()
try:
#Open connection with lnd via grpc
connection = lnd_connect(settings.LND_DIR_PATH, settings.LND_NETWORK, settings.LND_RPC_SERVER)
stub = lnrpc.LightningStub(connection)
routerstub = lnrouter.RouterStub(connection)
chan_ids = json.loads(rebalance.outgoing_chan_ids)
timeout = rebalance.duration * 60
response = stub.AddInvoice(ln.Invoice(value=rebalance.value, expiry=timeout))
for response in routerstub.SendPaymentV2(lnr.SendPaymentRequest(payment_request=str(response.payment_request), fee_limit_sat=rebalance.fee_limit, outgoing_chan_ids=chan_ids, last_hop_pubkey=bytes.fromhex(rebalance.last_hop_pubkey), timeout_seconds=(timeout-5), allow_self_payment=True), timeout=(timeout+60)):
if response.status == 1 and rebalance.status == 0:
#IN-FLIGHT
rebalance.status = 1
rebalance.payment_hash = response.payment_hash
rebalance.save()
elif response.status == 2:
#SUCCESSFUL
rebalance.status = 2
elif response.status == 3:
#FAILURE
if response.failure_reason == 1:
#FAILURE_REASON_TIMEOUT
rebalance.status = 3
elif response.failure_reason == 2:
#FAILURE_REASON_NO_ROUTE
rebalance.status = 4
elif response.failure_reason == 3:
#FAILURE_REASON_ERROR
rebalance.status = 5
elif response.failure_reason == 4:
#FAILURE_REASON_INCORRECT_PAYMENT_DETAILS
rebalance.status = 6
elif response.failure_reason == 5:
#FAILURE_REASON_INSUFFICIENT_BALANCE
rebalance.status = 7
except Exception as e:
if str(e.code()) == 'StatusCode.DEADLINE_EXCEEDED':
rebalance.status = 408
else:
rebalance.status = 400
error = str(e)
print(error)
finally:
rebalance.stop = datetime.now()
rebalance.save()
def auto_schedule():
#No rebalancer jobs have been scheduled, lets look for any channels with an auto_rebalance flag and make the best request if we find one
if LocalSettings.objects.filter(key='AR-Enabled').exists():
enabled = int(LocalSettings.objects.filter(key='AR-Enabled')[0].value)
else:
LocalSettings(key='AR-Enabled', value='0').save()
enabled = 0
if enabled == 1:
auto_rebalance_channels = Channels.objects.filter(is_active=True, is_open=True).annotate(percent_outbound=(Sum('local_balance')*100)/Sum('capacity')).annotate(inbound_can=((Sum('remote_balance')*100)/Sum('capacity'))/Sum('ar_target'))
if len(auto_rebalance_channels) > 0:
if LocalSettings.objects.filter(key='AR-Outbound%').exists():
outbound_percent = int(float(LocalSettings.objects.filter(key='AR-Outbound%')[0].value) * 100)
else:
LocalSettings(key='AR-Outbound%', value='0.75').save()
outbound_percent = 0.75 * 100
outbound_cans = list(auto_rebalance_channels.filter(auto_rebalance=False, percent_outbound__gte=outbound_percent).values_list('chan_id', flat=True))
inbound_cans = auto_rebalance_channels.filter(auto_rebalance=True, inbound_can__gte=1)
if len(inbound_cans) > 0 and len(outbound_cans) > 0:
if LocalSettings.objects.filter(key='AR-Target%').exists():
target_percent = float(LocalSettings.objects.filter(key='AR-Target%')[0].value)
else:
LocalSettings(key='AR-Target%', value='0.05').save()
target_percent = 0.05
if LocalSettings.objects.filter(key='AR-MaxFeeRate').exists():
max_fee_rate = int(LocalSettings.objects.filter(key='AR-MaxFeeRate')[0].value)
else:
LocalSettings(key='AR-MaxFeeRate', value='100').save()
max_fee_rate = 100
if LocalSettings.objects.filter(key='AR-MaxCost%').exists():
max_cost = float(LocalSettings.objects.filter(key='AR-MaxCost%')[0].value)
else:
LocalSettings(key='AR-MaxCost%', value='0.50').save()
max_cost = 0.50
# TLDR: lets target a custom % of the amount that would bring us back to a 50/50 channel balance using the MaxFeerate to calculate sat fee intervals
for target in inbound_cans:
target_fee_rate = int(target.local_fee_rate * max_cost)
if target_fee_rate > 0 and target_fee_rate > target.remote_fee_rate:
value_per_fee = int(1 / (target_fee_rate / 1000000)) if target_fee_rate <= max_fee_rate else int(1 / (max_fee_rate / 1000000))
target_value = int((target.capacity * target_percent) / value_per_fee) * value_per_fee
if target_value >= value_per_fee:
if LocalSettings.objects.filter(key='AR-Time').exists():
target_time = int(LocalSettings.objects.filter(key='AR-Time')[0].value)
else:
LocalSettings(key='AR-Time', value='5').save()
target_time = 5
inbound_pubkey = Channels.objects.filter(chan_id=target.chan_id)[0]
# TLDR: willing to pay 1 sat for every value_per_fee sats moved
target_fee = int(target_value * (1 / value_per_fee))
if Rebalancer.objects.filter(last_hop_pubkey=inbound_pubkey.remote_pubkey).exclude(status=0).exists():
last_rebalance = Rebalancer.objects.filter(last_hop_pubkey=inbound_pubkey.remote_pubkey).exclude(status=0).order_by('-id')[0]
if not (last_rebalance.value != target_value or last_rebalance.status in [2, 6] or (last_rebalance.status in [3, 4, 5, 7, 400, 408] and (int((datetime.now() - last_rebalance.stop).total_seconds() / 60) > 30)) or (last_rebalance.status == 1 and (int((datetime.now() - last_rebalance.start).total_seconds() / 60) > 30))):
continue
print('Creating Auto Rebalance Request')
print('Request for:', target.chan_id)
print('Request routing through:', outbound_cans)
print('Target % Of Value:', target_percent)
print('Target Value:', target_value)
print('Target Fee:', target_fee)
print('Target Time:', target_time)
Rebalancer(value=target_value, fee_limit=target_fee, outgoing_chan_ids=outbound_cans, last_hop_pubkey=inbound_pubkey.remote_pubkey, target_alias=inbound_pubkey.alias, duration=target_time).save()
def auto_enable():
if LocalSettings.objects.filter(key='AR-Autopilot').exists():
enabled = int(LocalSettings.objects.filter(key='AR-Autopilot')[0].value)
else:
LocalSettings(key='AR-Autopilot', value='0').save()
enabled = 0
if enabled == 1:
channels = Channels.objects.filter(is_active=True, is_open=True).annotate(outbound_percent=(Sum('local_balance')*1000)/Sum('capacity')).annotate(inbound_percent=(Sum('remote_balance')*1000)/Sum('capacity'))
filter_7day = datetime.now() - timedelta(days=7)
forwards = Forwards.objects.filter(forward_date__gte=filter_7day)
for channel in channels:
outbound_percent = int(round(channel.outbound_percent/10, 0))
inbound_percent = int(round(channel.inbound_percent/10, 0))
routed_in_7day = forwards.filter(chan_id_in=channel.chan_id).count()
routed_out_7day = forwards.filter(chan_id_out=channel.chan_id).count()
i7D = 0 if routed_in_7day == 0 else int(forwards.filter(chan_id_in=channel.chan_id).aggregate(Sum('amt_in_msat'))['amt_in_msat__sum']/10000000)/100
o7D = 0 if routed_out_7day == 0 else int(forwards.filter(chan_id_out=channel.chan_id).aggregate(Sum('amt_out_msat'))['amt_out_msat__sum']/10000000)/100
if o7D > (i7D*1.10) and outbound_percent > 75:
print('Case 1: Pass')
elif o7D > (i7D*1.10) and inbound_percent > 75 and channel.auto_rebalance == False:
print('Case 2: Enable AR - o7D > i7D AND Inbound Liq > 75%')
channel.auto_rebalance = True
channel.save()
Autopilot(chan_id=channel.chan_id, peer_alias=channel.alias, setting='Enabled', old_value=0, new_value=1).save()
elif o7D < (i7D*1.10) and outbound_percent > 75 and channel.auto_rebalance == True:
print('Case 3: Disable AR - o7D < i7D AND Outbound Liq > 75%')
channel.auto_rebalance = False
channel.save()
Autopilot(chan_id=channel.chan_id, peer_alias=channel.alias, setting='Enabled', old_value=1, new_value=0).save()
elif o7D < (i7D*1.10) and inbound_percent > 75:
print('Case 4: Pass')
else:
print('Case 5: Pass')
def main():
rebalances = Rebalancer.objects.filter(status=0).order_by('id')
if len(rebalances) == 0:
auto_enable()
auto_schedule()
else:
run_rebalancer(rebalances[0])
if __name__ == '__main__':
main()
| 58.972678
| 351
| 0.615456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,455
| 0.134822
|
7ffe3a2a54d20351ed2bd85d6e6203ef3341cc49
| 5,112
|
py
|
Python
|
pale/fields/base.py
|
Loudr/pale
|
dc002ee6032c856551143af222ff8f71ed9853fe
|
[
"MIT"
] | 13
|
2015-06-18T02:35:31.000Z
|
2019-03-15T14:39:28.000Z
|
pale/fields/base.py
|
Loudr/pale
|
dc002ee6032c856551143af222ff8f71ed9853fe
|
[
"MIT"
] | 34
|
2015-05-18T17:13:16.000Z
|
2021-03-25T21:40:42.000Z
|
pale/fields/base.py
|
Loudr/pale
|
dc002ee6032c856551143af222ff8f71ed9853fe
|
[
"MIT"
] | 3
|
2016-06-08T01:05:47.000Z
|
2020-02-04T17:50:17.000Z
|
# -*- coding: utf-8 -*-
import logging
import types
from collections import Iterable
class BaseField(object):
"""The base class for all Fields and Arguments.
Field objects are used by Resources to define the data they return.
They include a name, a type, a short description, and a long
description. Of these instance variables, only `name` is functionally
significant, as it's used as the key for the field's value in the
outgoing JSON. The rest of the instance variables are used to generate
documentation.
Argument objects inherit from Field, in that they share the same base set
of instance variables, but are used on the input side of the API, and
include validation functionality.
"""
value_type = 'base'
def __init__(self,
value_type,
description,
details=None,
property_name=None,
value=None):
self.value_type = value_type
self.description = description
self.details = details
self.property_name = property_name
self.value_lambda = value
if self.value_lambda is not None:
assert isinstance(value, types.LambdaType), \
"A Field's `value` parameter must be a lambda"
assert self.property_name is None, \
("Field does not support setting both `property_name` "
"*AND* `value`. Please pick one or the other")
def _fix_up(self, cls, code_name):
"""Internal helper to name the field after its variable.
This is called by _fix_up_fields, which is called by the MetaHasFields
metaclass when finishing the construction of a Resource subclass.
The `code_name` passed in is the name of the python attribute that
the Field has been assigned to in the resource.
Note that each BaseField instance must only be assigned to at most
one Resource class attribute.
"""
self.name = code_name
def render(self, obj, name, context):
"""The default field renderer.
This basic renderer assumes that the object has an attribute with
the same name as the field, unless a different field is specified
as a `property_name`.
The renderer is also passed the context so that it can be
propagated to the `_render_serializable` method of nested
resources (or, for example, if you decide to implement attribute
hiding at the field level instead of at the object level).
Callable attributes of `obj` will be called to fetch value.
This is useful for fields computed from lambda functions
or instance methods.
"""
if self.value_lambda is not None:
val = self.value_lambda(obj)
else:
attr_name = name
if self.property_name is not None:
attr_name = self.property_name
if isinstance(obj, dict):
val = obj.get(attr_name, None)
else:
val = getattr(obj, attr_name, None)
if callable(val):
try:
val = val()
except:
logging.exception("Attempted to call `%s` on obj of type %s.",
attr_name, type(obj))
raise
return val
def doc_dict(self):
"""Generate the documentation for this field."""
doc = {
'type': self.value_type,
'description': self.description,
'extended_description': self.details
}
return doc
class StaticItem(object):
def __init__(self, obj):
self.obj = obj
class ListField(BaseField):
"""A Field that contains a list of Fields."""
value_type = 'list'
def __init__(self, description, item_type=BaseField, **kwargs):
super(ListField, self).__init__(
self.value_type,
description,
**kwargs)
# Item type initialization
self.item_type = item_type
kd = {'description':'nested_list'}
if item_type is BaseField:
kd['value_type'] = 'base_field'
self.item_type_instance = self.item_type(
**kd
)
def doc_dict(self):
doc = super(ListField, self).doc_dict()
doc['item_type'] = self.item_type.value_type
return doc
def render(self, obj, name, context):
if obj is None:
return []
output = []
# again, the base renderer basically just calls getattr.
# We're expecting the attr to be a list, though.
lst = super(ListField, self).render(obj, name, context)
if lst is None:
return []
# attempt to wrap any non-iterable in to a list of iterables.
if not isinstance(lst, Iterable):
lst = [lst]
renderer = self.item_type_instance.render
for res in lst:
item = renderer(StaticItem(res), 'obj', context)
output.append(item)
return output
| 32.35443
| 78
| 0.600352
| 5,020
| 0.982003
| 0
| 0
| 0
| 0
| 0
| 0
| 2,364
| 0.462441
|
7ffeda80306a79591e192335e97b6bc94abc7f4b
| 160
|
py
|
Python
|
DublinBusTest/forms.py
|
Eimg851/DublinBusApp_ResearchPracticum
|
41b2c559dc4608705fd1348480ce729c645d6d5a
|
[
"BSD-2-Clause"
] | null | null | null |
DublinBusTest/forms.py
|
Eimg851/DublinBusApp_ResearchPracticum
|
41b2c559dc4608705fd1348480ce729c645d6d5a
|
[
"BSD-2-Clause"
] | null | null | null |
DublinBusTest/forms.py
|
Eimg851/DublinBusApp_ResearchPracticum
|
41b2c559dc4608705fd1348480ce729c645d6d5a
|
[
"BSD-2-Clause"
] | 1
|
2020-06-20T09:53:15.000Z
|
2020-06-20T09:53:15.000Z
|
from django import forms
from .models import *
class routeForm(forms.ModelForm):
class Meta:
model = Routes
fields = ('route_short_name',)
| 20
| 38
| 0.66875
| 111
| 0.69375
| 0
| 0
| 0
| 0
| 0
| 0
| 18
| 0.1125
|
3d006479e663873fb437875d9ddb0f2fa1dee350
| 9,802
|
py
|
Python
|
tests/automated/test_aws_automated.py
|
hrichardlee/meadowrun
|
77a182505209a4d185f111cbd5aa62a46038728a
|
[
"MIT"
] | null | null | null |
tests/automated/test_aws_automated.py
|
hrichardlee/meadowrun
|
77a182505209a4d185f111cbd5aa62a46038728a
|
[
"MIT"
] | null | null | null |
tests/automated/test_aws_automated.py
|
hrichardlee/meadowrun
|
77a182505209a4d185f111cbd5aa62a46038728a
|
[
"MIT"
] | null | null | null |
"""
These tests require an AWS account to be set up, but don't require any manual
intervention beyond some initial setup. Also, these tests create instances (which cost
money!). Either `meadowrun-manage install` needs to be set up, or `meadowrun-manage
clean` needs to be run periodically
"""
import asyncio
import datetime
import io
import pprint
import threading
import uuid
import boto3
import fabric
import pytest
import meadowrun.aws_integration.management_lambdas.adjust_ec2_instances as adjust_ec2_instances # noqa: E501
from basics import BasicsSuite, HostProvider, ErrorsSuite, MapSuite
from instance_registrar_suite import (
InstanceRegistrarProvider,
InstanceRegistrarSuite,
TERMINATE_INSTANCES_IF_IDLE_FOR_TEST,
)
from meadowrun.aws_integration.aws_core import _get_default_region_name
from meadowrun.aws_integration.ec2_instance_allocation import EC2InstanceRegistrar
from meadowrun.aws_integration.ec2_pricing import _get_ec2_instance_types
from meadowrun.aws_integration.ec2_ssh_keys import ensure_meadowrun_key_pair
from meadowrun.aws_integration.grid_tasks_sqs import (
_add_tasks,
_complete_task,
_create_queues_for_job,
_get_task,
get_results,
worker_loop,
)
from meadowrun.instance_allocation import InstanceRegistrar
from meadowrun.instance_selection import choose_instance_types_for_job, Resources
from meadowrun.meadowrun_pb2 import ProcessState
from meadowrun.run_job import AllocCloudInstance
from meadowrun.run_job_core import Host, JobCompletion, CloudProviderType
# TODO don't always run tests in us-east-2
REGION = "us-east-2"
class AwsHostProvider(HostProvider):
def get_host(self) -> Host:
return AllocCloudInstance(1, 2, 80, "EC2", REGION)
def get_test_repo_url(self) -> str:
return "https://github.com/meadowdata/test_repo"
async def get_log_file_text(self, job_completion: JobCompletion) -> str:
with fabric.Connection(
job_completion.public_address,
user="ubuntu",
connect_kwargs={"pkey": ensure_meadowrun_key_pair(REGION)},
) as conn:
with io.BytesIO() as local_copy:
conn.get(job_completion.log_file_name, local_copy)
return local_copy.getvalue().decode("utf-8")
class TestBasicsAws(AwsHostProvider, BasicsSuite):
pass
class TestErrorsAws(AwsHostProvider, ErrorsSuite):
pass
class TestMapAws(MapSuite):
def cloud_provider(self) -> CloudProviderType:
return "EC2"
class EC2InstanceRegistrarProvider(InstanceRegistrarProvider[InstanceRegistrar]):
async def get_instance_registrar(self) -> InstanceRegistrar:
return EC2InstanceRegistrar(await _get_default_region_name(), "create")
async def deregister_instance(
self,
instance_registrar: InstanceRegistrar,
public_address: str,
require_no_running_jobs: bool,
) -> bool:
return adjust_ec2_instances._deregister_ec2_instance(
public_address,
require_no_running_jobs,
instance_registrar.get_region_name(),
)
async def num_currently_running_instances(
self, instance_registrar: InstanceRegistrar
) -> int:
ec2 = boto3.resource("ec2", region_name=instance_registrar.get_region_name())
return sum(1 for _ in adjust_ec2_instances._get_running_instances(ec2))
async def run_adjust(self, instance_registrar: InstanceRegistrar) -> None:
adjust_ec2_instances._deregister_and_terminate_instances(
instance_registrar.get_region_name(),
TERMINATE_INSTANCES_IF_IDLE_FOR_TEST,
datetime.timedelta.min,
)
async def terminate_all_instances(
self, instance_registrar: InstanceRegistrar
) -> None:
adjust_ec2_instances.terminate_all_instances(
instance_registrar.get_region_name()
)
def cloud_provider(self) -> CloudProviderType:
return "EC2"
class TestEC2InstanceRegistrar(EC2InstanceRegistrarProvider, InstanceRegistrarSuite):
pass
@pytest.mark.asyncio
async def test_get_ec2_instance_types():
# This function makes a lot of assumptions about the format of the data we get from
# various AWS endpoints, good to check that everything works. Look for unexpected
# warnings!
instance_types = await _get_ec2_instance_types(REGION)
# the actual number of instance types will fluctuate based on AWS' whims.
assert len(instance_types) > 600
chosen_instance_types = choose_instance_types_for_job(
Resources(5, 3, {}), 52, 10, instance_types
)
total_cpu = sum(
instance_type.instance_type.logical_cpu * instance_type.num_instances
for instance_type in chosen_instance_types
)
assert total_cpu >= 3 * 52
total_memory_gb = sum(
instance_type.instance_type.memory_gb * instance_type.num_instances
for instance_type in chosen_instance_types
)
assert total_memory_gb >= 5 * 52
assert all(
instance_type.instance_type.interruption_probability <= 10
for instance_type in chosen_instance_types
)
pprint.pprint(chosen_instance_types)
chosen_instance_types = choose_instance_types_for_job(
Resources(24000, 1000, {}), 1, 10, instance_types
)
assert len(chosen_instance_types) == 0
class TestGridTaskQueue:
def test_grid_task_queue(self):
"""
Tests the grid_task_queue functions without actually running any tasks. Uses SQS
resources.
"""
region_name = asyncio.run(_get_default_region_name())
task_arguments = ["hello", ("hey", "there"), {"a": 1}]
# dummy variables
job_id = str(uuid.uuid4())
public_address = "foo"
worker_id = 1
request_queue_url, result_queue_url = asyncio.run(
_create_queues_for_job(job_id, region_name)
)
# get results in a different thread as we're adding/completing tasks
results = None
def get_results_thread():
nonlocal results
results = asyncio.run(
get_results(result_queue_url, region_name, len(task_arguments), 1)
)
results_thread = threading.Thread(target=get_results_thread)
results_thread.start()
# add some tasks
asyncio.run(_add_tasks(request_queue_url, region_name, task_arguments))
# get some tasks and complete them
task1 = _get_task(
request_queue_url,
result_queue_url,
region_name,
0,
public_address,
worker_id,
)
assert task1 is not None
task2 = _get_task(
request_queue_url,
result_queue_url,
region_name,
0,
public_address,
worker_id,
)
assert task2 is not None
_complete_task(
result_queue_url,
region_name,
task1,
ProcessState(
state=ProcessState.ProcessStateEnum.SUCCEEDED,
pickled_result=task1.pickled_function_arguments,
),
public_address,
worker_id,
)
task3 = _get_task(
request_queue_url,
result_queue_url,
region_name,
0,
public_address,
worker_id,
)
assert task3 is not None
# there should be no more tasks to get
assert (
_get_task(
request_queue_url,
result_queue_url,
region_name,
0,
public_address,
worker_id,
)
is None
)
_complete_task(
result_queue_url,
region_name,
task2,
ProcessState(
state=ProcessState.ProcessStateEnum.SUCCEEDED,
pickled_result=task2.pickled_function_arguments,
),
public_address,
worker_id,
)
_complete_task(
result_queue_url,
region_name,
task3,
ProcessState(
state=ProcessState.ProcessStateEnum.SUCCEEDED,
pickled_result=task3.pickled_function_arguments,
),
public_address,
worker_id,
)
results_thread.join()
assert results == task_arguments
def test_worker_loop(self):
region_name = asyncio.run(_get_default_region_name())
task_arguments = [1, 2, 3, 4]
# dummy variables
job_id = str(uuid.uuid4())
public_address = "foo"
worker_id = 1
request_queue_url, result_queue_url = asyncio.run(
_create_queues_for_job(job_id, region_name)
)
# get results on another thread
results = None
def get_results_thread():
nonlocal results
results = asyncio.run(
get_results(result_queue_url, region_name, len(task_arguments), 1)
)
results_thread = threading.Thread(target=get_results_thread)
results_thread.start()
# add tasks
asyncio.run(_add_tasks(request_queue_url, region_name, task_arguments))
# start a worker_loop which will get tasks and complete them
worker_thread = threading.Thread(
target=lambda: worker_loop(
lambda x: x**x,
request_queue_url,
result_queue_url,
region_name,
public_address,
worker_id,
)
)
worker_thread.start()
results_thread.join()
worker_thread.join()
assert results == [1, 4, 27, 256]
| 31.517685
| 110
| 0.652724
| 6,906
| 0.70455
| 0
| 0
| 1,275
| 0.130075
| 2,967
| 0.302693
| 1,142
| 0.116507
|
3d0113714f49189583df2b472f9f7bb1b7d3193b
| 117
|
py
|
Python
|
aficionado/defaults.py
|
SamuelHornsey/aficionado
|
27654028ede3d719b091dd61f5c8d252f631a316
|
[
"MIT"
] | 1
|
2019-11-27T21:58:10.000Z
|
2019-11-27T21:58:10.000Z
|
aficionado/defaults.py
|
SamuelHornsey/aficionado
|
27654028ede3d719b091dd61f5c8d252f631a316
|
[
"MIT"
] | null | null | null |
aficionado/defaults.py
|
SamuelHornsey/aficionado
|
27654028ede3d719b091dd61f5c8d252f631a316
|
[
"MIT"
] | null | null | null |
def not_found_handler():
return '404. Path not found'
def internal_error_handler():
return '500. Internal error'
| 23.4
| 30
| 0.752137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 42
| 0.358974
|
3d01213807fe34d6cbaa37ec89c61cfcc0e43948
| 11,536
|
py
|
Python
|
apps/hosts/views.py
|
kaustubh-s1/EvalAI
|
1884811e7759e0d095f7afb68188a7f010fa65dc
|
[
"BSD-3-Clause"
] | 1,470
|
2016-10-21T01:21:45.000Z
|
2022-03-30T14:08:29.000Z
|
apps/hosts/views.py
|
kaustubh-s1/EvalAI
|
1884811e7759e0d095f7afb68188a7f010fa65dc
|
[
"BSD-3-Clause"
] | 2,594
|
2016-11-02T03:36:01.000Z
|
2022-03-31T15:30:04.000Z
|
apps/hosts/views.py
|
kaustubh-s1/EvalAI
|
1884811e7759e0d095f7afb68188a7f010fa65dc
|
[
"BSD-3-Clause"
] | 865
|
2016-11-09T17:46:32.000Z
|
2022-03-30T13:06:52.000Z
|
from django.contrib.auth.models import User
from rest_framework import permissions, status
from rest_framework.decorators import (
api_view,
authentication_classes,
permission_classes,
throttle_classes,
)
from rest_framework.response import Response
from rest_framework_expiring_authtoken.authentication import (
ExpiringTokenAuthentication,
)
from rest_framework.throttling import UserRateThrottle
from rest_framework_simplejwt.authentication import JWTAuthentication
from accounts.permissions import HasVerifiedEmail
from base.utils import get_model_object, team_paginated_queryset
from .filters import HostTeamsFilter
from .models import ChallengeHost, ChallengeHostTeam
from .serializers import (
ChallengeHostSerializer,
ChallengeHostTeamSerializer,
InviteHostToTeamSerializer,
HostTeamDetailSerializer,
)
from .utils import is_user_part_of_host_team
get_challenge_host_model = get_model_object(ChallengeHost)
@api_view(["GET", "POST"])
@throttle_classes([UserRateThrottle])
@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
@authentication_classes(
(
JWTAuthentication,
ExpiringTokenAuthentication,
)
)
def challenge_host_team_list(request):
if request.method == "GET":
challenge_host_team_ids = ChallengeHost.objects.filter(
user=request.user
).values_list("team_name", flat=True)
challenge_host_teams = ChallengeHostTeam.objects.filter(
id__in=challenge_host_team_ids
).order_by("-id")
filtered_teams = HostTeamsFilter(
request.GET, queryset=challenge_host_teams
)
paginator, result_page = team_paginated_queryset(
filtered_teams.qs, request
)
serializer = HostTeamDetailSerializer(result_page, many=True)
response_data = serializer.data
return paginator.get_paginated_response(response_data)
elif request.method == "POST":
serializer = ChallengeHostTeamSerializer(
data=request.data, context={"request": request}
)
if serializer.is_valid():
serializer.save()
response_data = serializer.data
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(["GET", "PUT", "PATCH"])
@throttle_classes([UserRateThrottle])
@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
@authentication_classes((JWTAuthentication, ExpiringTokenAuthentication))
def challenge_host_team_detail(request, pk):
try:
challenge_host_team = ChallengeHostTeam.objects.get(pk=pk)
except ChallengeHostTeam.DoesNotExist:
response_data = {"error": "ChallengeHostTeam does not exist"}
return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
if request.method == "GET":
serializer = HostTeamDetailSerializer(challenge_host_team)
response_data = serializer.data
return Response(response_data, status=status.HTTP_200_OK)
elif request.method in ["PUT", "PATCH"]:
if request.method == "PATCH":
serializer = ChallengeHostTeamSerializer(
challenge_host_team,
data=request.data,
context={"request": request},
partial=True,
)
else:
serializer = ChallengeHostTeamSerializer(
challenge_host_team,
data=request.data,
context={"request": request},
)
if serializer.is_valid():
serializer.save()
response_data = serializer.data
return Response(response_data, status=status.HTTP_200_OK)
else:
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
@api_view(["GET", "POST"])
@throttle_classes([UserRateThrottle])
@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
@authentication_classes((JWTAuthentication, ExpiringTokenAuthentication))
def challenge_host_list(request, challenge_host_team_pk):
try:
challenge_host_team = ChallengeHostTeam.objects.get(
pk=challenge_host_team_pk
)
except ChallengeHostTeam.DoesNotExist:
response_data = {"error": "ChallengeHostTeam does not exist"}
return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
if request.method == "GET":
challenge_host_status = request.query_params.get("status", None)
filter_condition = {
"team_name": challenge_host_team,
"user": request.user,
}
if challenge_host_status:
challenge_host_status = challenge_host_status.split(",")
filter_condition.update({"status__in": challenge_host_status})
challenge_host = ChallengeHost.objects.filter(
**filter_condition
).order_by("-id")
paginator, result_page = team_paginated_queryset(
challenge_host, request
)
serializer = ChallengeHostSerializer(result_page, many=True)
response_data = serializer.data
return paginator.get_paginated_response(response_data)
elif request.method == "POST":
serializer = ChallengeHostSerializer(
data=request.data,
context={
"challenge_host_team": challenge_host_team,
"request": request,
},
)
if serializer.is_valid():
serializer.save()
response_data = serializer.data
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(["GET", "PUT", "PATCH", "DELETE"])
@throttle_classes([UserRateThrottle])
@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
@authentication_classes((JWTAuthentication, ExpiringTokenAuthentication))
def challenge_host_detail(request, challenge_host_team_pk, pk):
try:
challenge_host_team = ChallengeHostTeam.objects.get(
pk=challenge_host_team_pk
)
except ChallengeHostTeam.DoesNotExist:
response_data = {"error": "ChallengeHostTeam does not exist"}
return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
challenge_host = get_challenge_host_model(pk)
if request.method == "GET":
serializer = ChallengeHostSerializer(challenge_host)
response_data = serializer.data
return Response(response_data, status=status.HTTP_200_OK)
elif request.method in ["PUT", "PATCH"]:
if request.method == "PATCH":
serializer = ChallengeHostSerializer(
challenge_host,
data=request.data,
context={
"challenge_host_team": challenge_host_team,
"request": request,
},
partial=True,
)
else:
serializer = ChallengeHostSerializer(
challenge_host,
data=request.data,
context={
"challenge_host_team": challenge_host_team,
"request": request,
},
)
if serializer.is_valid():
serializer.save()
response_data = serializer.data
return Response(response_data, status=status.HTTP_200_OK)
else:
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
elif request.method == "DELETE":
challenge_host.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@api_view(["POST"])
@throttle_classes([UserRateThrottle])
@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
@authentication_classes((JWTAuthentication, ExpiringTokenAuthentication))
def create_challenge_host_team(request):
serializer = ChallengeHostTeamSerializer(
data=request.data, context={"request": request}
)
if serializer.is_valid():
serializer.save()
response_data = serializer.data
challenge_host_team = serializer.instance
challenge_host = ChallengeHost(
user=request.user,
status=ChallengeHost.SELF,
permissions=ChallengeHost.ADMIN,
team_name=challenge_host_team,
)
challenge_host.save()
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(["DELETE"])
@throttle_classes([UserRateThrottle])
@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
@authentication_classes((JWTAuthentication, ExpiringTokenAuthentication))
def remove_self_from_challenge_host_team(request, challenge_host_team_pk):
"""
A user can remove himself from the challenge host team.
"""
try:
ChallengeHostTeam.objects.get(pk=challenge_host_team_pk)
except ChallengeHostTeam.DoesNotExist:
response_data = {"error": "ChallengeHostTeam does not exist"}
return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
try:
challenge_host = ChallengeHost.objects.filter(
user=request.user.id, team_name__pk=challenge_host_team_pk
)
challenge_host.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
except: # noqa E722
response_data = {"error": "Sorry, you do not belong to this team."}
return Response(response_data, status=status.HTTP_401_UNAUTHORIZED)
@api_view(["POST"])
@throttle_classes([UserRateThrottle])
@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
@authentication_classes((JWTAuthentication, ExpiringTokenAuthentication))
def invite_host_to_team(request, pk):
try:
challenge_host_team = ChallengeHostTeam.objects.get(pk=pk)
except ChallengeHostTeam.DoesNotExist:
response_data = {"error": "Host Team does not exist"}
return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
email = request.data.get("email")
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
response_data = {
"error": "User does not exist with this email address!"
}
return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
# Check if the user requesting this API is part of host team
if not is_user_part_of_host_team(request.user, challenge_host_team):
response_data = {"error": "You are not a member of this team!"}
return Response(response_data, status=status.HTTP_400_BAD_REQUEST)
host = ChallengeHost.objects.filter(
team_name=challenge_host_team, user=user
)
if host.exists():
response_data = {"error": "User is already part of the team!"}
return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
serializer = InviteHostToTeamSerializer(
data=request.data,
context={
"challenge_host_team": challenge_host_team,
"request": request,
},
)
if serializer.is_valid():
serializer.save()
response_data = {
"message": "User has been added successfully to the host team"
}
return Response(response_data, status=status.HTTP_202_ACCEPTED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| 37.093248
| 78
| 0.684466
| 0
| 0
| 0
| 0
| 10,561
| 0.915482
| 0
| 0
| 970
| 0.084085
|
3d019cb8d3804b67e4c6cc481ba0582e56b8a8a0
| 2,207
|
py
|
Python
|
trace_for_guess/rescale.py
|
wtraylor/trace21ka_for_lpjguess
|
184f8e213504fdad975eab40cf335bc47810669f
|
[
"MIT"
] | null | null | null |
trace_for_guess/rescale.py
|
wtraylor/trace21ka_for_lpjguess
|
184f8e213504fdad975eab40cf335bc47810669f
|
[
"MIT"
] | null | null | null |
trace_for_guess/rescale.py
|
wtraylor/trace21ka_for_lpjguess
|
184f8e213504fdad975eab40cf335bc47810669f
|
[
"MIT"
] | null | null | null |
# SPDX-FileCopyrightText: 2021 Wolfgang Traylor <wolfgang.traylor@senckenberg.de>
#
# SPDX-License-Identifier: MIT
import os
import shutil
import subprocess
from termcolor import cprint
from trace_for_guess.skip import skip
def rescale_file(in_file, out_file, template_file, alg):
"""Regrid a NetCDF file using NCO (i.e. the ncremap command).
Args:
in_file: Path of input file.
out_file: Output file path. It will not be overwritten.
template_file: Path to a NetCDF file that has the desired grid
resolution.
alg: ESMF regrid algorithm. See here:
http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_6_3_0rp1/ESMF_refdoc/node3.html#SECTION03020000000000000000
Returns:
The output file (`out_file`).
Raises:
FileNotFoundError: If `in_file` or `template_file` doesn’t exist.
RuntimeError: The `cdo` command is not in the PATH.
RuntimeError: The `ncremap` command failed or produced no output
file.
"""
if not os.path.isfile(in_file):
raise FileNotFoundError("Input file doesn’t exist: '%s'" % in_file)
if not os.path.isfile(template_file):
raise FileNotFoundError("Template file doesn’t exist: '%s'" %
template_file)
if skip([in_file, template_file], out_file):
return out_file
if shutil.which("ncremap") is None:
raise RuntimeError("Executable `ncremap` not found.")
cprint("Regridding '%s'..." % in_file, 'yellow')
try:
subprocess.run(["ncremap",
"--algorithm=%s" % alg,
"--template_file=%s" % template_file,
"--input_file=%s" % in_file,
"--output_file=%s" % out_file], check=True)
except Exception:
if os.path.isfile(out_file):
cprint(f"Removing file '{out_file}'.", 'red')
os.remove(out_file)
raise
if not os.path.isfile(out_file):
raise RuntimeError("Regridding with `ncremap` failed: No output file "
"created.")
cprint(f"Successfully created '{out_file}'.", 'green')
return out_file
| 36.783333
| 132
| 0.622565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,220
| 0.551288
|
3d03e7e9418a784fa6ae34ca818d4e877cfbf8bb
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_khazix/na_khazix_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_khazix/na_khazix_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_khazix/na_khazix_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Khazix_Top_Aatrox(Ratings):
pass
class NA_Khazix_Top_Ahri(Ratings):
pass
class NA_Khazix_Top_Akali(Ratings):
pass
class NA_Khazix_Top_Alistar(Ratings):
pass
class NA_Khazix_Top_Amumu(Ratings):
pass
class NA_Khazix_Top_Anivia(Ratings):
pass
class NA_Khazix_Top_Annie(Ratings):
pass
class NA_Khazix_Top_Ashe(Ratings):
pass
class NA_Khazix_Top_AurelionSol(Ratings):
pass
class NA_Khazix_Top_Azir(Ratings):
pass
class NA_Khazix_Top_Bard(Ratings):
pass
class NA_Khazix_Top_Blitzcrank(Ratings):
pass
class NA_Khazix_Top_Brand(Ratings):
pass
class NA_Khazix_Top_Braum(Ratings):
pass
class NA_Khazix_Top_Caitlyn(Ratings):
pass
class NA_Khazix_Top_Camille(Ratings):
pass
class NA_Khazix_Top_Cassiopeia(Ratings):
pass
class NA_Khazix_Top_Chogath(Ratings):
pass
class NA_Khazix_Top_Corki(Ratings):
pass
class NA_Khazix_Top_Darius(Ratings):
pass
class NA_Khazix_Top_Diana(Ratings):
pass
class NA_Khazix_Top_Draven(Ratings):
pass
class NA_Khazix_Top_DrMundo(Ratings):
pass
class NA_Khazix_Top_Ekko(Ratings):
pass
class NA_Khazix_Top_Elise(Ratings):
pass
class NA_Khazix_Top_Evelynn(Ratings):
pass
class NA_Khazix_Top_Ezreal(Ratings):
pass
class NA_Khazix_Top_Fiddlesticks(Ratings):
pass
class NA_Khazix_Top_Fiora(Ratings):
pass
class NA_Khazix_Top_Fizz(Ratings):
pass
class NA_Khazix_Top_Galio(Ratings):
pass
class NA_Khazix_Top_Gangplank(Ratings):
pass
class NA_Khazix_Top_Garen(Ratings):
pass
class NA_Khazix_Top_Gnar(Ratings):
pass
class NA_Khazix_Top_Gragas(Ratings):
pass
class NA_Khazix_Top_Graves(Ratings):
pass
class NA_Khazix_Top_Hecarim(Ratings):
pass
class NA_Khazix_Top_Heimerdinger(Ratings):
pass
class NA_Khazix_Top_Illaoi(Ratings):
pass
class NA_Khazix_Top_Irelia(Ratings):
pass
class NA_Khazix_Top_Ivern(Ratings):
pass
class NA_Khazix_Top_Janna(Ratings):
pass
class NA_Khazix_Top_JarvanIV(Ratings):
pass
class NA_Khazix_Top_Jax(Ratings):
pass
class NA_Khazix_Top_Jayce(Ratings):
pass
class NA_Khazix_Top_Jhin(Ratings):
pass
class NA_Khazix_Top_Jinx(Ratings):
pass
class NA_Khazix_Top_Kalista(Ratings):
pass
class NA_Khazix_Top_Karma(Ratings):
pass
class NA_Khazix_Top_Karthus(Ratings):
pass
class NA_Khazix_Top_Kassadin(Ratings):
pass
class NA_Khazix_Top_Katarina(Ratings):
pass
class NA_Khazix_Top_Kayle(Ratings):
pass
class NA_Khazix_Top_Kayn(Ratings):
pass
class NA_Khazix_Top_Kennen(Ratings):
pass
class NA_Khazix_Top_Khazix(Ratings):
pass
class NA_Khazix_Top_Kindred(Ratings):
pass
class NA_Khazix_Top_Kled(Ratings):
pass
class NA_Khazix_Top_KogMaw(Ratings):
pass
class NA_Khazix_Top_Leblanc(Ratings):
pass
class NA_Khazix_Top_LeeSin(Ratings):
pass
class NA_Khazix_Top_Leona(Ratings):
pass
class NA_Khazix_Top_Lissandra(Ratings):
pass
class NA_Khazix_Top_Lucian(Ratings):
pass
class NA_Khazix_Top_Lulu(Ratings):
pass
class NA_Khazix_Top_Lux(Ratings):
pass
class NA_Khazix_Top_Malphite(Ratings):
pass
class NA_Khazix_Top_Malzahar(Ratings):
pass
class NA_Khazix_Top_Maokai(Ratings):
pass
class NA_Khazix_Top_MasterYi(Ratings):
pass
class NA_Khazix_Top_MissFortune(Ratings):
pass
class NA_Khazix_Top_MonkeyKing(Ratings):
pass
class NA_Khazix_Top_Mordekaiser(Ratings):
pass
class NA_Khazix_Top_Morgana(Ratings):
pass
class NA_Khazix_Top_Nami(Ratings):
pass
class NA_Khazix_Top_Nasus(Ratings):
pass
class NA_Khazix_Top_Nautilus(Ratings):
pass
class NA_Khazix_Top_Nidalee(Ratings):
pass
class NA_Khazix_Top_Nocturne(Ratings):
pass
class NA_Khazix_Top_Nunu(Ratings):
pass
class NA_Khazix_Top_Olaf(Ratings):
pass
class NA_Khazix_Top_Orianna(Ratings):
pass
class NA_Khazix_Top_Ornn(Ratings):
pass
class NA_Khazix_Top_Pantheon(Ratings):
pass
class NA_Khazix_Top_Poppy(Ratings):
pass
class NA_Khazix_Top_Quinn(Ratings):
pass
class NA_Khazix_Top_Rakan(Ratings):
pass
class NA_Khazix_Top_Rammus(Ratings):
pass
class NA_Khazix_Top_RekSai(Ratings):
pass
class NA_Khazix_Top_Renekton(Ratings):
pass
class NA_Khazix_Top_Rengar(Ratings):
pass
class NA_Khazix_Top_Riven(Ratings):
pass
class NA_Khazix_Top_Rumble(Ratings):
pass
class NA_Khazix_Top_Ryze(Ratings):
pass
class NA_Khazix_Top_Sejuani(Ratings):
pass
class NA_Khazix_Top_Shaco(Ratings):
pass
class NA_Khazix_Top_Shen(Ratings):
pass
class NA_Khazix_Top_Shyvana(Ratings):
pass
class NA_Khazix_Top_Singed(Ratings):
pass
class NA_Khazix_Top_Sion(Ratings):
pass
class NA_Khazix_Top_Sivir(Ratings):
pass
class NA_Khazix_Top_Skarner(Ratings):
pass
class NA_Khazix_Top_Sona(Ratings):
pass
class NA_Khazix_Top_Soraka(Ratings):
pass
class NA_Khazix_Top_Swain(Ratings):
pass
class NA_Khazix_Top_Syndra(Ratings):
pass
class NA_Khazix_Top_TahmKench(Ratings):
pass
class NA_Khazix_Top_Taliyah(Ratings):
pass
class NA_Khazix_Top_Talon(Ratings):
pass
class NA_Khazix_Top_Taric(Ratings):
pass
class NA_Khazix_Top_Teemo(Ratings):
pass
class NA_Khazix_Top_Thresh(Ratings):
pass
class NA_Khazix_Top_Tristana(Ratings):
pass
class NA_Khazix_Top_Trundle(Ratings):
pass
class NA_Khazix_Top_Tryndamere(Ratings):
pass
class NA_Khazix_Top_TwistedFate(Ratings):
pass
class NA_Khazix_Top_Twitch(Ratings):
pass
class NA_Khazix_Top_Udyr(Ratings):
pass
class NA_Khazix_Top_Urgot(Ratings):
pass
class NA_Khazix_Top_Varus(Ratings):
pass
class NA_Khazix_Top_Vayne(Ratings):
pass
class NA_Khazix_Top_Veigar(Ratings):
pass
class NA_Khazix_Top_Velkoz(Ratings):
pass
class NA_Khazix_Top_Vi(Ratings):
pass
class NA_Khazix_Top_Viktor(Ratings):
pass
class NA_Khazix_Top_Vladimir(Ratings):
pass
class NA_Khazix_Top_Volibear(Ratings):
pass
class NA_Khazix_Top_Warwick(Ratings):
pass
class NA_Khazix_Top_Xayah(Ratings):
pass
class NA_Khazix_Top_Xerath(Ratings):
pass
class NA_Khazix_Top_XinZhao(Ratings):
pass
class NA_Khazix_Top_Yasuo(Ratings):
pass
class NA_Khazix_Top_Yorick(Ratings):
pass
class NA_Khazix_Top_Zac(Ratings):
pass
class NA_Khazix_Top_Zed(Ratings):
pass
class NA_Khazix_Top_Ziggs(Ratings):
pass
class NA_Khazix_Top_Zilean(Ratings):
pass
class NA_Khazix_Top_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 5,944
| 0.908174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
3d05562ae792843c99e988fb6a4b5372987caff9
| 616
|
py
|
Python
|
setup.py
|
KloudTrader/libkloudtrader
|
015e2779f80ba2de93be9fa6fd751412a9d5f492
|
[
"Apache-2.0"
] | 11
|
2019-01-16T16:10:09.000Z
|
2021-03-02T00:59:17.000Z
|
setup.py
|
KloudTrader/kloudtrader
|
015e2779f80ba2de93be9fa6fd751412a9d5f492
|
[
"Apache-2.0"
] | 425
|
2019-07-10T06:59:49.000Z
|
2021-01-12T05:32:14.000Z
|
setup.py
|
KloudTrader/kloudtrader
|
015e2779f80ba2de93be9fa6fd751412a9d5f492
|
[
"Apache-2.0"
] | 6
|
2019-03-15T16:25:06.000Z
|
2021-05-03T10:02:13.000Z
|
from distutils.core import setup
setup(
name='libkloudtrader',
version='1.0.0',
author='KloudTrader',
author_email='admin@kloudtrader.com',
packages=['libkloudtrader'],
url='https://github.com/KloudTrader/kloudtrader',
license='LICENSE',
description="KloudTrader's in-house library that makes it much easier for you to code algorithms that can trade for you.",
long_description_content_type="text/markdown",
long_description='pypi.md',
install_requires=[
"boto3",
"pandas",
"numpy",
"empyrical",
"asyncio",
"ccxt"
],
)
| 26.782609
| 126
| 0.644481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 309
| 0.501623
|
3d05d97057af36632d639c5d678cfee0618bd44e
| 197
|
py
|
Python
|
sdk/python/opencannabis/media/__init__.py
|
CookiesCo/OpenCannabis
|
a7bb1f71200c6b8f56c509df47039198f0c3bd4c
|
[
"MIT"
] | 2
|
2020-08-27T00:45:49.000Z
|
2021-06-19T08:01:13.000Z
|
sdk/python/opencannabis/media/__init__.py
|
CookiesCo/OpenCannabis
|
a7bb1f71200c6b8f56c509df47039198f0c3bd4c
|
[
"MIT"
] | 67
|
2020-08-27T03:16:33.000Z
|
2022-03-26T14:33:38.000Z
|
sdk/python/opencannabis/media/__init__.py
|
CookiesCo/OpenCannabis
|
a7bb1f71200c6b8f56c509df47039198f0c3bd4c
|
[
"MIT"
] | 1
|
2020-11-12T04:26:43.000Z
|
2020-11-12T04:26:43.000Z
|
# ~*~ coding: utf-8 ~*~
__doc__ = """
`opencannabis.media`
---------------------------
Records and definitions that structure digital media and related assets.
"""
# `opencannabis.media`
| 16.416667
| 74
| 0.573604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 182
| 0.923858
|
3d05dcf32ee27f2f4d2629e65ab5d7e2a5641f27
| 14,581
|
py
|
Python
|
mlutils/losses.py
|
DSciLab/mlutils
|
352af36f2b34218b6551254f641427b7bbdd0f31
|
[
"MIT"
] | null | null | null |
mlutils/losses.py
|
DSciLab/mlutils
|
352af36f2b34218b6551254f641427b7bbdd0f31
|
[
"MIT"
] | null | null | null |
mlutils/losses.py
|
DSciLab/mlutils
|
352af36f2b34218b6551254f641427b7bbdd0f31
|
[
"MIT"
] | null | null | null |
from typing import Callable, Optional, Union, Tuple, List
import torch
from torch import nn
from cfg import Opts
from torch import Tensor
from torch.nn import functional as F
from mlutils import LogitToPreds
EPS = 1.0e-8
__all__ = ['IOULoss', 'GDiceLoss', 'SoftDiceLoss',
'CrossEntropyLoss', 'BCELossWithLogits',
'GDiceCELoss', 'GDiceBCELoss', 'SoftDiceCELoss',
'SoftDiceBCELoss', 'DeepSupervisedLoss',
'LossPicker']
def softmax_helper(inp: Tensor) -> Tensor:
return F.softmax(inp, 1)
def onehot(inp: Tensor, num_classes: int,
with_channel: Optional[bool]=False) -> Tensor:
if not with_channel:
inp = inp.unsqueeze(1)
output_shape = list(inp.shape)
output_shape[1] = num_classes
output = torch.zeros(output_shape, dtype=torch.float).to(inp.device)
output.scatter_(1, inp.type(torch.int64), 1)
return output
def flatten(inp: Tensor, with_class: Optional[bool]=False) -> Tensor:
"""
:param inp: input tensor with shape (B, C, Spatial_shape)
:param with_class: flatten the tensor with C dim
:return: if with_class is True, return a tensor woth shape of
(B, C, prod(spatial_shape)), if with_class is False,
return a tensor with shape of (B, C * prod(spatial_shape))
"""
if with_class:
B = inp.size(0)
C = inp.size(1)
inp = inp.view(B, C, -1)
else:
B = inp.size(0)
inp = inp.view(B, -1)
return inp
def flatten_with_class(inp: Tensor) -> Tensor:
"""
:param inp: input tensor, the expected shape is (B, C, spatial_shape)
:return: a tentor with shape (C, B * prod(spatial_shape))
"""
inp = inp.permute(1, 0, *tuple(range(2, inp.ndim))).contiguous()
C = inp.size(0)
return inp.view(C, -1)
def iou_loss(pred: Tensor, gt: Tensor,
smooth: Optional[float]=0.01,
ignore_label: Optional[int]=None) -> Tensor:
"""
:param pred: after latest activation, the shape is (B, C, spatial_shape)
:param gt: onehoted gt, the shape is (B, C, spatial_shape)
:return: IOU, the shape is (B,)
"""
assert pred.shape == gt.shape
if ignore_label is not None:
pred = torch.stack([v for i, v in enumerate(torch.unbind(pred, dim=1))
if i != ignore_label])
gt = torch.stack([v for i, v in enumerate(torch.unbind(gt, dim=1))
if i != ignore_label])
pred = flatten(pred)
gt = flatten(gt)
tp = (pred * gt).sum(-1)
fp = (pred * (1 - gt)).sum(-1)
fn = ((1 - pred) * gt).sum(-1)
iou = (tp + smooth) / (tp + fp + fn + EPS + smooth)
return 1.0 - iou
def generalized_dice_loss(pred: Tensor, gt: Tensor,
smooth: Optional[float]=0.01,
with_weight: Optional[bool]=True,
ignore_label: Optional[int]=None) -> Tensor:
"""
:param pred: after latest activation, the shape is (B, C, spatial_shape)
:param gt: onehoted gt, the shape is (B, C, spatial_shape)
:return: GDice, the shape is (B,)
"""
assert pred.shape == gt.shape
if ignore_label is not None:
pred = torch.stack([v for i, v in enumerate(torch.unbind(pred, dim=1))
if i != ignore_label])
gt = torch.stack([v for i, v in enumerate(torch.unbind(gt, dim=1))
if i != ignore_label])
pred = flatten(pred, with_class=True)
gt = flatten(gt, with_class=True)
if with_weight:
gt_class_flatten = flatten_with_class(gt).sum(-1)
class_weight = 1.0 / (gt_class_flatten * gt_class_flatten + EPS)
intersect = (pred * gt).sum(-1) * class_weight.unsqueeze(0)
intersect = intersect.sum(-1)
else:
intersect = (pred * gt).sum([-2, -1])
# the shape of intersect is (B,)
# the shape of pred and gt is (B, C, prod(spatial_shape))
denominator = pred.sum([-2, -1]) + gt.sum([-2, -1])
assert intersect.shape == denominator.shape, \
f'{intersect.shape} != {denominator.shape}'
return 1.0 - (intersect + smooth) / (denominator + EPS + smooth)
def soft_dice_loss(pred: Tensor, gt: Tensor,
ignore_label: Optional[int]=None) -> Tensor:
"""
soft dice = 2 * IOU / (1 + IOU)
:param pred: after latest activation, the shape is (B, C, spatial_shape)
:param gt: onehoted gt, the shape is (B, C, spatial_shape)
:return: dice loss, the shape is (B,)
"""
iou = iou_loss(pred, gt, ignore_label=ignore_label)
return 2.0 * iou / (1.0 + iou)
class IOULoss(nn.Module):
def __init__(self, opt: Opts,
activation: Optional[Callable]=None,
ignore_label: Optional[int]=None) -> None:
super().__init__()
self.ignore_label = ignore_label
if activation is None:
self.activation = LogitToPreds(opt)
else:
self.activation = activation
def forward(self, logit: Tensor, gt: Tensor, *,
reduction: Optional[str]='mean') -> Tensor:
pred = self.activation(logit)
loss = iou_loss(pred, gt, ignore_label=self.ignore_label)
if reduction == 'mean':
loss = loss.mean()
elif reduction == 'none':
pass
else:
raise ValueError(f'Unrecognized reduction method ({reduction}).')
return loss
class GDiceLoss(nn.Module):
def __init__(self, opt: Opts,
activation: Optional[Callable]=None,
with_weight: Optional[bool]=False,
ignore_label: Optional[int]=None) -> None:
super().__init__()
self.with_weight = with_weight
self.ignore_label = ignore_label
if activation is None:
self.activation = LogitToPreds(opt)
else:
self.activation = activation
def forward(self, logit: Tensor, gt: Tensor, *,
onehoted: Optional[bool]=False,
reduction: Optional[str]='mean') -> Tensor:
if not onehoted:
num_classes = logit.size(1)
with_channel = True if gt.ndim == logit.ndim else False
onehoted_gt = onehot(gt, num_classes, with_channel=with_channel)
else:
onehoted_gt = gt
pred = self.activation(logit)
loss = generalized_dice_loss(pred, onehoted_gt,
with_weight=self.with_weight,
ignore_label=self.ignore_label)
if reduction == 'mean':
loss = loss.mean()
elif reduction == 'none':
pass
else:
raise ValueError(f'Unrecognized reduction method ({reduction}).')
return loss
class SoftDiceLoss(nn.Module):
def __init__(self, opt: Opts,
activation: Optional[Callable]=None,
ignore_label: int=None,
*args, **kwargs) -> None:
super().__init__()
self.ignore_label = ignore_label
if activation is None:
self.activation = LogitToPreds(opt)
else:
self.activation = activation
def forward(self, logit: Tensor, gt: Tensor, *,
onehoted: Optional[bool]=False,
reduction: Optional[str]='mean') -> Tensor:
if not onehoted:
num_classes = logit.size(1)
with_channel = True if gt.ndim == logit.ndim else False
onehoted_gt = onehot(gt, num_classes, with_channel=with_channel)
else:
onehoted_gt = gt
pred = self.activation(logit)
loss = soft_dice_loss(pred, onehoted_gt,
ignore_label=self.ignore_label)
if reduction == 'mean':
loss = loss.mean()
elif reduction == 'none':
pass
else:
raise ValueError(f'Unrecognized reduction method ({reduction}).')
return loss
class CrossEntropyLoss(nn.Module):
def forward(self, logit: Tensor, gt: Tensor, *,
reduction: Optional[str]='mean',
ignore_label: Optional[int]=None) -> Tensor:
assert logit.ndim == gt.ndim + 1
if ignore_label is None:
ignore_label = -100
loss = F.cross_entropy(logit, gt, reduction='none',
ignore_index=ignore_label)
if reduction == 'mean':
loss = loss.mean()
elif reduction == 'none':
loss = loss.mean(list(range(1, loss.ndim)))
else:
raise ValueError(
f'Unrecognized reduction method ({reduction}).')
return loss
class BCELossWithLogits(nn.Module):
def forward(self, logit: Tensor, gt: Tensor, *,
reduction: Optional[str]='mean',
ignore_label: Optional[int]=None) -> Tensor:
assert logit.shape == gt.shape
if ignore_label is not None:
logit = torch.stack(
[v for i, v in enumerate(torch.unbind(logit, dim=1))
if i != ignore_label])
gt = torch.stack(
[v for i, v in enumerate(torch.unbind(gt, dim=1))
if i != ignore_label])
loss = F.binary_cross_entropy_with_logits(logit, gt, reduction='none')
if reduction == 'mean':
loss = loss.mean()
elif reduction == 'none':
loss = loss.mean(list(range(1, loss.ndim)))
else:
raise ValueError(
f'Unrecognized reduction method ({reduction}).')
return loss
class GDiceCELoss(nn.Module):
def __init__(self, opt: Opts, dice_weight: Optional[float]=1.0,
ce_weight: Optional[float]=1.0,
ignore_label: Optional[int]=None,
*args, **kwargs) -> None:
super().__init__()
self.dice_weight = dice_weight
self.ce_weight = ce_weight
self.ignore_label = ignore_label
self.dice_loss = GDiceLoss(opt, activation=softmax_helper,
ignore_label=self.ignore_label,
*args, **kwargs)
self.ce_loss = CrossEntropyLoss()
def forward(self, logit: Tensor, gt: Tensor, *,
reduction: Optional[str]='mean') -> Tensor:
ce_gt = gt.squeeze(1) if logit.ndim == gt.ndim else gt
dice_loss_ = self.dice_loss(logit, gt.float(), reduction=reduction)
ce_loss_ = self.ce_loss(logit, ce_gt.long(), reduction=reduction,
ignore_label=self.ignore_label)
loss = dice_loss_ * self.dice_weight + ce_loss_ * self.ce_weight
return loss
class GDiceBCELoss(nn.Module):
def __init__(self, opt: Opts, dice_weight: Optional[float]=1.0,
ce_weight: Optional[float]=1.0,
ignore_label: Optional[int]=None,
*args, **kwargs) -> None:
super().__init__()
self.ignore_label = ignore_label
self.dice_weight = dice_weight
self.ce_weight = ce_weight
self.dice_loss = GDiceLoss(opt, activation=torch.sigmoid,
ignore_label=self.ignore_label,
*args, **kwargs)
self.ce_loss = BCELossWithLogits()
def forward(self, logit: Tensor, gt: Tensor, *,
reduction: Optional[str]='mean') -> Tensor:
num_classes = logit.size(1)
with_channel = True if gt.ndim == logit.ndim else False
onehoted_gt = onehot(gt, num_classes, with_channel=with_channel)
dice_loss_ = self.dice_loss(logit, onehoted_gt, onehoted=True,
reduction=reduction)
ce_loss_ = self.ce_loss(logit, onehoted_gt, reduction=reduction,
ignore_label=self.ignore_label)
loss = dice_loss_ * self.dice_weight + ce_loss_ * self.ce_weight
return loss
class SoftDiceCELoss(GDiceCELoss):
def __init__(self, opt: Opts, dice_weight: Optional[float]=1.0,
ce_weight: Optional[float]=1.0,
ignore_label: Optional[int]=None,
*args, **kwargs) -> None:
super().__init__(opt, dice_weight=dice_weight,
ce_weight=ce_weight,
ignore_label=ignore_label,
*args, **kwargs)
self.dice_loss = SoftDiceLoss(opt, activation=softmax_helper,
ignore_label=ignore_label,
*args, **kwargs)
class SoftDiceBCELoss(GDiceBCELoss):
def __init__(self, opt: Opts, dice_weight: Optional[float]=1.0,
ce_weight: Optional[float]=1.0,
ignore_label: Optional[int]=None,
*args, **kwargs) -> None:
super().__init__(opt, dice_weight=dice_weight,
ce_weight=ce_weight,
ignore_label=ignore_label,
*args, **kwargs)
self.dice_loss = SoftDiceLoss(opt, activation=torch.sigmoid,
ignore_label=ignore_label,
*args, **kwargs)
class DeepSupervisedLoss(nn.Module):
def __init__(self, loss_fn: Callable,
weights: Union[List, Tuple]) -> None:
super().__init__()
self.loss_fn = loss_fn
self.weights = weights
def forward(self, logits: Union[Tuple, List],
gts: Union[Tuple, List],
**kwargs) -> Tensor:
assert len(logits) == len(gts)
assert len(logits) == len(self.weights)
final_loss = 0
for logit, gt, weight in zip(logits, gts, self.weights):
final_loss += self.loss_fn(logit, gt, **kwargs) * weight
return final_loss
class LossPicker(object):
def __init__(self, opt: Opts, *args, **kwargs) -> None:
super().__init__()
assert opt.loss in _loss_dict_.keys(), \
f'{opt.loss} not in {_loss_dict_.keys()}'
self.loss_fn = _loss_dict_[opt.loss](opt, *args, **kwargs)
def __call__(self, *args, **kwargs) -> Tensor:
return self.loss_fn(*args, **kwargs)
_loss_dict_ = {
'IOULoss': IOULoss,
'GDiceLoss': GDiceLoss,
'SoftDiceLoss': SoftDiceBCELoss,
'CrossEntropyLoss': CrossEntropyLoss,
'BCELossWithLogits': BCELossWithLogits,
'GDiceCELoss': GDiceCELoss,
'GDiceBCELoss': GDiceBCELoss,
'SoftDiceCELoss': SoftDiceCELoss,
'SoftDiceBCELoss': SoftDiceBCELoss
}
| 36.002469
| 78
| 0.56992
| 9,599
| 0.658322
| 0
| 0
| 0
| 0
| 0
| 0
| 1,899
| 0.130238
|
3d06f699f338062bc96644c815234c6952e6bcf8
| 1,136
|
py
|
Python
|
libary/yml_wrapper.py
|
NekoFanatic/kaiji
|
7ae8e12d4e821e7d28d78034e1ec044ed75f9536
|
[
"MIT"
] | null | null | null |
libary/yml_wrapper.py
|
NekoFanatic/kaiji
|
7ae8e12d4e821e7d28d78034e1ec044ed75f9536
|
[
"MIT"
] | null | null | null |
libary/yml_wrapper.py
|
NekoFanatic/kaiji
|
7ae8e12d4e821e7d28d78034e1ec044ed75f9536
|
[
"MIT"
] | null | null | null |
from typing import Union
import yaml
class ConfigReader:
def __init__(self):
with open("config.yml", "r") as f:
data = yaml.safe_load(f)
self.data = data
def __getattr__(self, __name: str):
s = __name.split("_")
data = self.data
try:
for i in s:
data = data[i]
return data
except KeyError:
raise Exception("Can't find object")
class TextReader:
def __init__(self):
with open("text.yml", "r") as f:
data = yaml.safe_load(f)
self.data = data
def __getattr__(self, __name: str):
s = __name.split("_")
data = self.data
try:
for i in s:
data = data[i]
return data
except KeyError:
raise Exception("Can't find object")
def find(self, string: str) -> Union[str, list]:
s = string.split("_")
data = self.data
try:
for i in s:
data = data[i]
return data
except KeyError:
raise Exception("Can't find object")
| 23.183673
| 52
| 0.49912
| 1,093
| 0.962148
| 0
| 0
| 0
| 0
| 0
| 0
| 94
| 0.082746
|
3d0821d054373cb00fdbdf718c2ebff667597c8c
| 14,688
|
py
|
Python
|
smoothfdr/easy.py
|
tansey/smoothfdr
|
c5b693d0a66e83c9387433b33c0eab481bd4a763
|
[
"MIT"
] | 6
|
2016-02-26T23:08:57.000Z
|
2018-09-13T16:14:47.000Z
|
smoothfdr/easy.py
|
tansey/smoothfdr
|
c5b693d0a66e83c9387433b33c0eab481bd4a763
|
[
"MIT"
] | 2
|
2015-09-23T16:59:37.000Z
|
2017-09-29T13:19:44.000Z
|
smoothfdr/easy.py
|
tansey/smoothfdr
|
c5b693d0a66e83c9387433b33c0eab481bd4a763
|
[
"MIT"
] | 3
|
2017-07-04T12:25:32.000Z
|
2021-04-16T00:10:33.000Z
|
# import itertools
# from functools import partial
# from scipy.stats import norm
# from scipy.sparse import csc_matrix, linalg as sla
# from scipy import sparse
# from scipy.optimize import minimize, minimize_scalar
# from collections import deque, namedtuple
import numpy as np
from networkx import Graph
from pygfl.solver import TrailSolver
from pygfl.trails import decompose_graph, save_chains
from pygfl.utils import chains_to_trails, calc_plateaus, hypercube_edges
from smoothfdr.smoothed_fdr import GaussianKnown
from smoothfdr.normix import *
from smoothfdr.utils import calc_fdr
def smooth_fdr(data, fdr_level, edges=None, initial_values=None, verbose=0, null_dist=None, signal_dist=None, num_sweeps=10, missing_val=None):
flat_data = data.flatten()
nonmissing_flat_data = flat_data
if edges is None:
if verbose:
print('Using default edge set of a grid of same shape as the data: {0}'.format(data.shape))
edges = hypercube_edges(data.shape)
if missing_val is not None:
if verbose:
print('Removing all data points whose data value is {0}'.format(missing_val))
edges = [(e1,e2) for (e1,e2) in edges if flat_data[e1] != missing_val and flat_data[e2] != missing_val]
nonmissing_flat_data = flat_data[flat_data != missing_val]
# Decompose the graph into trails
g = Graph()
g.add_edges_from(edges)
chains = decompose_graph(g, heuristic='greedy')
ntrails, trails, breakpoints, edges = chains_to_trails(chains)
if null_dist is None:
# empirical null estimation
mu0, sigma0 = empirical_null(nonmissing_flat_data, verbose=max(0,verbose-1))
elif isinstance(null_dist,GaussianKnown):
mu0, sigma0 = null_dist.mean, null_dist.stdev
else:
mu0, sigma0 = null_dist
null_dist = GaussianKnown(mu0, sigma0)
if verbose:
print('Empirical null: {0}'.format(null_dist))
# signal distribution estimation
if verbose:
print('Running predictive recursion for {0} sweeps'.format(num_sweeps))
if signal_dist is None:
grid_x = np.linspace(max(-20, nonmissing_flat_data.min() - 1), min(nonmissing_flat_data.max() + 1, 20), 220)
pr_results = predictive_recursion(nonmissing_flat_data, num_sweeps, grid_x, mu0=mu0, sig0=sigma0)
signal_dist = GridDistribution(pr_results['grid_x'], pr_results['y_signal'])
if verbose:
print('Smoothing priors via solution path algorithm')
solver = TrailSolver()
solver.set_data(flat_data, edges, ntrails, trails, breakpoints)
results = solution_path_smooth_fdr(flat_data, solver, null_dist, signal_dist, verbose=max(0, verbose-1))
results['discoveries'] = calc_fdr(results['posteriors'], fdr_level)
results['null_dist'] = null_dist
results['signal_dist'] = signal_dist
# Reshape everything back to the original data shape
results['betas'] = results['betas'].reshape(data.shape)
results['priors'] = results['priors'].reshape(data.shape)
results['posteriors'] = results['posteriors'].reshape(data.shape)
results['discoveries'] = results['discoveries'].reshape(data.shape)
results['beta_iters'] = np.array([x.reshape(data.shape) for x in results['beta_iters']])
results['prior_iters'] = np.array([x.reshape(data.shape) for x in results['prior_iters']])
results['posterior_iters'] = np.array([x.reshape(data.shape) for x in results['posterior_iters']])
return results
def smooth_fdr_known_dists(data, fdr_level, null_dist, signal_dist, edges=None, initial_values=None, verbose=0, missing_val=None):
'''FDR smoothing where the null and alternative distributions are known
(and not necessarily Gaussian). Both must define the function pdf.'''
flat_data = data.flatten()
nonmissing_flat_data = flat_data
if edges is None:
if verbose:
print('Using default edge set of a grid of same shape as the data: {0}'.format(data.shape))
edges = hypercube_edges(data.shape)
if missing_val is not None:
if verbose:
print('Removing all data points whose data value is {0}'.format(missing_val))
edges = [(e1,e2) for (e1,e2) in edges if flat_data[e1] != missing_val and flat_data[e2] != missing_val]
nonmissing_flat_data = flat_data[flat_data != missing_val]
# Decompose the graph into trails
g = Graph()
g.add_edges_from(edges)
chains = decompose_graph(g, heuristic='greedy')
ntrails, trails, breakpoints, edges = chains_to_trails(chains)
if verbose:
print('Smoothing priors via solution path algorithm')
solver = TrailSolver()
solver.set_data(flat_data, edges, ntrails, trails, breakpoints)
results = solution_path_smooth_fdr(flat_data, solver, null_dist, signal_dist, verbose=max(0, verbose-1))
results['discoveries'] = calc_fdr(results['posteriors'], fdr_level)
results['null_dist'] = null_dist
results['signal_dist'] = signal_dist
# Reshape everything back to the original data shape
results['betas'] = results['betas'].reshape(data.shape)
results['priors'] = results['priors'].reshape(data.shape)
results['posteriors'] = results['posteriors'].reshape(data.shape)
results['discoveries'] = results['discoveries'].reshape(data.shape)
results['beta_iters'] = np.array([x.reshape(data.shape) for x in results['beta_iters']])
results['prior_iters'] = np.array([x.reshape(data.shape) for x in results['prior_iters']])
results['posterior_iters'] = np.array([x.reshape(data.shape) for x in results['posterior_iters']])
return results
def solution_path_smooth_fdr(data, solver, null_dist, signal_dist, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, verbose=0, initial_values=None):
'''Follows the solution path of the generalized lasso to find the best lambda value.'''
lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins))
aic_trace = np.zeros(lambda_grid.shape) # The AIC score for each lambda value
aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for each lambda value (correcting for finite sample size)
bic_trace = np.zeros(lambda_grid.shape) # The BIC score for each lambda value
dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom of each final solution
log_likelihood_trace = np.zeros(lambda_grid.shape)
beta_trace = []
u_trace = []
w_trace = []
c_trace = []
results_trace = []
best_idx = None
best_plateaus = None
for i, _lambda in enumerate(lambda_grid):
if verbose:
print('#{0} Lambda = {1}'.format(i, _lambda))
# Fit to the final values
results = fixed_penalty_smooth_fdr(data, solver, _lambda, null_dist, signal_dist,
verbose=max(0,verbose - 1),
initial_values=initial_values)
if verbose:
print('Calculating degrees of freedom')
plateaus = calc_plateaus(results['beta'], solver.edges)
dof_trace[i] = len(plateaus)
if verbose:
print('Calculating AIC')
# Get the negative log-likelihood
log_likelihood_trace[i] = -_data_negative_log_likelihood(data, results['c'], null_dist, signal_dist)
# Calculate AIC = 2k - 2ln(L)
aic_trace[i] = 2. * dof_trace[i] - 2. * log_likelihood_trace[i]
# Calculate AICc = AIC + 2k * (k+1) / (n - k - 1)
aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) / (data.shape[0] - dof_trace[i] - 1.)
# Calculate BIC = -2ln(L) + k * (ln(n) - ln(2pi))
bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(data)) - np.log(2 * np.pi))
# Track the best model thus far
if best_idx is None or bic_trace[i] < bic_trace[best_idx]:
best_idx = i
best_plateaus = plateaus
# Save the final run parameters to use for warm-starting the next iteration
initial_values = results
# Save the trace of all the resulting parameters
beta_trace.append(results['beta'])
w_trace.append(results['w'])
c_trace.append(results['c'])
if verbose:
print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i]))
if verbose:
print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx]))
return {'aic': aic_trace,
'aicc': aicc_trace,
'bic': bic_trace,
'dof': dof_trace,
'loglikelihood': log_likelihood_trace,
'beta_iters': np.array(beta_trace),
'posterior_iters': np.array(w_trace),
'prior_iters': np.array(c_trace),
'lambda_iters': lambda_grid,
'best': best_idx,
'betas': beta_trace[best_idx],
'priors': c_trace[best_idx],
'posteriors': w_trace[best_idx],
'lambda': lambda_grid[best_idx],
'plateaus': best_plateaus}
def fixed_penalty_smooth_fdr(data, solver, _lambda, null_dist, signal_dist, initial_values=None, verbose=0):
converge = 1e-6
max_steps = 30
m_steps = 1
m_converge = 1e-6
w_iters = []
beta_iters = []
c_iters = []
delta_iters = []
delta = converge + 1
if initial_values is None:
beta = np.zeros(data.shape)
prior_prob = np.exp(beta) / (1 + np.exp(beta))
else:
beta = initial_values['beta']
prior_prob = initial_values['c']
prev_nll = 0
cur_step = 0
while delta > converge and cur_step < max_steps:
if verbose:
print('Step #{0}'.format(cur_step))
if verbose:
print('\tE-step...')
# Get the likelihood weights vector (E-step)
post_prob = _e_step(data, prior_prob, null_dist, signal_dist)
if verbose:
print('\tM-step...')
# Find beta using an alternating Taylor approximation and convex optimization (M-step)
beta, initial_values = _m_step(beta, prior_prob, post_prob, _lambda,
solver, m_converge, m_steps,
max(0,verbose-1), initial_values)
# Get the signal probabilities
prior_prob = ilogit(beta)
cur_nll = _data_negative_log_likelihood(data, prior_prob, null_dist, signal_dist)
# Track the change in log-likelihood to see if we've converged
delta = np.abs(cur_nll - prev_nll) / (prev_nll + converge)
if verbose:
print('\tDelta: {0}'.format(delta))
# Track the step
w_iters.append(post_prob)
beta_iters.append(beta)
c_iters.append(prior_prob)
delta_iters.append(delta)
# Increment the step counter
cur_step += 1
# Update the negative log-likelihood tracker
prev_nll = cur_nll
# DEBUGGING
if verbose:
print('\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max()))
print('\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max()))
print('\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max()))
w_iters = np.array(w_iters)
beta_iters = np.array(beta_iters)
c_iters = np.array(c_iters)
delta_iters = np.array(delta_iters)
# Return the results of the run
return {'beta': beta, 'w': post_prob, 'c': prior_prob,
'z': initial_values['z'], 'u': initial_values['u'],
'w_iters': w_iters, 'beta_iters': beta_iters,
'c_iters': c_iters, 'delta_iters': delta_iters}
def _data_negative_log_likelihood(data, prior_prob, null_dist, signal_dist):
'''Calculate the negative log-likelihood of the data given the weights.'''
signal_weight = prior_prob * signal_dist.pdf(data)
null_weight = (1-prior_prob) * null_dist.pdf(data)
return -np.log(signal_weight + null_weight).sum()
def _e_step(data, prior_prob, null_dist, signal_dist):
'''Calculate the complete-data sufficient statistics (weights vector).'''
signal_weight = prior_prob * signal_dist.pdf(data)
null_weight = (1-prior_prob) * null_dist.pdf(data)
post_prob = signal_weight / (signal_weight + null_weight)
return post_prob
def _m_step(beta, prior_prob, post_prob, _lambda,
solver, converge, max_steps,
verbose, initial_values):
'''
Alternating Second-order Taylor-series expansion about the current iterate
'''
prev_nll = _m_log_likelihood(post_prob, beta)
delta = converge + 1
cur_step = 0
while delta > converge and cur_step < max_steps:
if verbose:
print('\t\tM-Step iteration #{0}'.format(cur_step))
print('\t\tTaylor approximation...')
# Cache the exponentiated beta
exp_beta = np.exp(beta)
# Form the parameters for our weighted least squares
weights = (prior_prob * (1 - prior_prob))
y = beta - (prior_prob - post_prob) / weights
solver.set_values_only(y, weights=weights)
if initial_values is None:
initial_values = {'beta': solver.beta, 'z': solver.z, 'u': solver.u}
else:
solver.beta = initial_values['beta']
solver.z = initial_values['z']
solver.u = initial_values['u']
solver.solve(_lambda)
# if np.abs(beta).max() > 20:
# beta = np.clip(beta, -20, 20)
# u = None
beta = initial_values['beta']
# Get the current log-likelihood
cur_nll = _m_log_likelihood(post_prob, beta)
# Track the convergence
delta = np.abs(prev_nll - cur_nll) / (prev_nll + converge)
if verbose:
print('\t\tM-step delta: {0}'.format(delta))
# Increment the step counter
cur_step += 1
# Update the negative log-likelihood tracker
prev_nll = cur_nll
return beta, initial_values
def _m_log_likelihood(post_prob, beta):
'''Calculate the log-likelihood of the betas given the weights and data.'''
return (np.log(1 + np.exp(beta)) - post_prob * beta).sum()
def ilogit(x):
return 1. / (1. + np.exp(-x))
| 41.027933
| 210
| 0.633715
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,731
| 0.254017
|
3d09d053089d2dfd866a874b9112340c6aa15645
| 438
|
py
|
Python
|
code/2cams.py
|
ctm1098/umucv
|
ea6cce5d9cfece1e372e05eb9223ef6ddc17b438
|
[
"BSD-3-Clause"
] | 12
|
2018-02-15T17:54:57.000Z
|
2022-02-25T12:00:49.000Z
|
code/2cams.py
|
ctm1098/umucv
|
ea6cce5d9cfece1e372e05eb9223ef6ddc17b438
|
[
"BSD-3-Clause"
] | 8
|
2019-03-06T18:53:43.000Z
|
2022-03-18T10:04:40.000Z
|
code/2cams.py
|
ctm1098/umucv
|
ea6cce5d9cfece1e372e05eb9223ef6ddc17b438
|
[
"BSD-3-Clause"
] | 22
|
2018-02-06T14:40:03.000Z
|
2022-03-17T11:38:48.000Z
|
#!/usr/bin/env python
import numpy as np
import cv2 as cv
cap1 = cv.VideoCapture(0)
cap2 = cv.VideoCapture(1)
while(cv.waitKey(1) & 0xFF != 27):
ret, frame1 = cap1.read()
ret, frame2 = cap2.read()
cv.imshow('c1',frame1)
cv.imshow('c2',frame2)
gray1 = cv.cvtColor(frame1, cv.COLOR_RGB2GRAY)
gray2 = cv.cvtColor(frame2, cv.COLOR_RGB2GRAY)
cv.imshow('frame', gray1//2 + gray2//2)
cv.destroyAllWindows()
| 19.043478
| 50
| 0.652968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 36
| 0.082192
|
3d0a653d9351f079b350d765b5ef2da6e1ece3a5
| 1,109
|
py
|
Python
|
Summary pie chart/Sum_Indonesia.py
|
pdeesawat/PSIT58_test_01
|
631946eacd82503e0697680f06290a4fe10f17f2
|
[
"Apache-2.0"
] | null | null | null |
Summary pie chart/Sum_Indonesia.py
|
pdeesawat/PSIT58_test_01
|
631946eacd82503e0697680f06290a4fe10f17f2
|
[
"Apache-2.0"
] | null | null | null |
Summary pie chart/Sum_Indonesia.py
|
pdeesawat/PSIT58_test_01
|
631946eacd82503e0697680f06290a4fe10f17f2
|
[
"Apache-2.0"
] | null | null | null |
import plotly.plotly as py
"""Get data from csv and split it"""
data = open('Real_Final_database_02.csv')
alldata = data.readlines()
listdata = []
for ix in alldata:
listdata.append(ix.strip().split(','))
"""Seperate data in each type of disaster."""
all_disaster = {'Drought':0, 'Flood':0, 'Storm':0, 'Epidemic':0, 'Earthquake':0}
for iy in listdata:
if iy[0] == 'Indonesia' and iy[2] in all_disaster:
all_disaster[iy[2]] += 1
"""Calculate each type for make an average."""
total = sum(all_disaster.values())
average = []
for iz in all_disaster:
all_disaster[iz] = float("%.2f" % ((all_disaster[iz]/total)*100))
label = [i for i in all_disaster]
value = [all_disaster[j] for j in label]
"""Apprerance"""
make_circle = {"data": [{"values":value,"labels":label,
"name": "Average", "hoverinfo":"label+percent+name", "hole": 0.39, "type": "pie"}],
"layout": {"title":"Indonesia's Average Disaster from 2000 to 2014", "annotations": [{"font": {"size": 20},
"showarrow": False, "text": ""}]}}
url = py.plot(make_circle, filename='Indonesia\'s Average Disaster from 200o to 2014')
| 35.774194
| 107
| 0.658251
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 477
| 0.430117
|
3d0b11a3dec857ffd6e51932557d206c66901849
| 2,515
|
py
|
Python
|
python/draw_dog.py
|
event-driven-robotics/study-air-hockey
|
e933bcf85d77762dae7d468f314c7db6e71fba81
|
[
"BSD-3-Clause"
] | null | null | null |
python/draw_dog.py
|
event-driven-robotics/study-air-hockey
|
e933bcf85d77762dae7d468f314c7db6e71fba81
|
[
"BSD-3-Clause"
] | null | null | null |
python/draw_dog.py
|
event-driven-robotics/study-air-hockey
|
e933bcf85d77762dae7d468f314c7db6e71fba81
|
[
"BSD-3-Clause"
] | 1
|
2021-07-29T15:09:37.000Z
|
2021-07-29T15:09:37.000Z
|
import numpy as np
from matplotlib.patches import Ellipse
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import cm
from scipy import signal
import matplotlib.image as mpimg
# matplotlib.use('Agg')
# define normalized 2D gaussian
def gaus2d(x, y, mx, my, sx, sy):
return 1. / (2. * np.pi * sx * sy) * np.exp(-((x - mx)**2. / (2. * sx**2.) + (y - my)**2. / (2. * sy**2.)))
def rgb2gray(rgb):
return np.dot(rgb[...,:3], [0.2989, 0.5870, 0.1140])
ellipse = Ellipse(xy=(0,0), width=3.6, height=1.8, edgecolor='r', lw=2, facecolor='none')
x = np.linspace(0, 10, 101)
y = np.linspace(0, 10, 101)
x1, y1 = np.meshgrid(x, y) # get 2D variables instead of 1D
z1 = gaus2d(x1, y1, 5, 5, 2.7, 1.35)
z1_copy = z1.copy()
z1 = z1/z1.max()
x2, y2 = np.meshgrid(x, y) # get 2D variables instead of 1D
z2 = gaus2d(x2, y2, 5, 5, 0.9, 0.45)
z2_copy = z2.copy()
z2 = z2/z2.max()
dog_not_norm = z1 - z2
dog = (z1 - z2)/np.max(z1-z2)
dog[dog<0] = 0
# path
# path1 = 'image_puck.png'
# img1 = mpimg.imread(path1)
# gray1 = rgb2gray(img1)
# img1 = (np.array(gray1))[0:84, 0:84]
# path2 = 'circle.png'
# img2 = mpimg.imread(path2)
# gray2 = rgb2gray(img2)
# img2 = (np.array(gray1))[0:84, 0:84]
# img_conv = signal.convolve2d(img1, z1)
# # img_product = img1 * img2
#
# # Displaying the image
# fig1 = plt.figure()
#
# plt.imshow(img_conv)
# plt.show()
# fig2 = plt.figure()
# plt.imshow(img)
# plt.show()
fig = plt.figure()
ax1 = fig.add_subplot(3,2,5)
ax1.add_artist(ellipse)
im = ax1.imshow(dog, cmap="viridis", extent=(-5, 5, -5, 5))
ax1.set_xlabel('x')
ax1.set_ylabel('y')
ax1.title.set_text('dog 2D')
cbar = fig.colorbar(im, ax=ax1)
ax2 = fig.add_subplot(3,2,6,projection='3d')
ax2.contour3D(x, y, dog, 100, cmap=cm.viridis)
ax2.set_xlabel('x')
ax2.set_ylabel('y')
ax2.set_zlabel('z')
ax2.title.set_text('dog 3D')
ax3 = fig.add_subplot(3,2,1)
im1 = ax3.imshow(z1, cmap="viridis", extent=(-5, 5, -5, 5))
ax3.set_xlabel('x')
ax3.set_ylabel('y')
ax3.title.set_text('g1 2D')
ax4 = fig.add_subplot(3,2,2,projection='3d')
ax4.contour3D(x, y, z1, 50, cmap=cm.viridis)
ax4.set_xlabel('x')
ax4.set_ylabel('y')
ax4.set_zlabel('z')
ax4.title.set_text('g1 3D')
ax5 = fig.add_subplot(3,2,3)
im2 = ax5.imshow(z2, cmap="viridis", extent=(-5, 5, -5, 5))
ax5.set_xlabel('x')
ax5.set_ylabel('y')
ax5.title.set_text('g2 2D')
ax6 = fig.add_subplot(3,2,4,projection='3d')
ax6.contour3D(x, y, z2, 50, cmap=cm.viridis)
ax6.set_xlabel('x')
ax6.set_ylabel('y')
ax6.set_zlabel('z')
ax6.title.set_text('g2 3D')
plt.show()
| 25.927835
| 111
| 0.652485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 689
| 0.273956
|
3d0b63ff899d9630d5763b8599ddc075bb3c108f
| 620
|
py
|
Python
|
PycharmProjects/PythonValidacao/consome_api.py
|
FeFSRibeiro/learning-python
|
4f642aa7e1c6523f5209f83ece7e974bfb3ef24e
|
[
"Apache-2.0"
] | null | null | null |
PycharmProjects/PythonValidacao/consome_api.py
|
FeFSRibeiro/learning-python
|
4f642aa7e1c6523f5209f83ece7e974bfb3ef24e
|
[
"Apache-2.0"
] | null | null | null |
PycharmProjects/PythonValidacao/consome_api.py
|
FeFSRibeiro/learning-python
|
4f642aa7e1c6523f5209f83ece7e974bfb3ef24e
|
[
"Apache-2.0"
] | null | null | null |
import requests
class BuscaEndereco:
def __init__(self,cep):
if self.valida_cep(str(cep)):
self.cep = str(cep)
else:
raise ValueError("CEP Inválido !!!")
def __str__(self):
return self.formata_cep()
def valida_cep(self,cep):
if len(self.cep == 8):
return True
else:
return False
def formata_cep(self,cep):
return "{}-{}".format(self.cep[:5],self.cep[5:])
def busca_dados (self):
url = "https://viacep.com.br/ws/{}/json/".format(self.cep)
r = requests.get(url)
return r
| 22.142857
| 66
| 0.540323
| 603
| 0.971014
| 0
| 0
| 0
| 0
| 0
| 0
| 61
| 0.098229
|
3d0b84039e886dcbf5a0882295390d0af7dd865b
| 3,928
|
py
|
Python
|
tools/convert_lightning2venot.py
|
ucl-exoplanets/TauREx_public
|
28d47f829a2873cf15e3bfb0419b8bc4e5bc03dd
|
[
"CC-BY-4.0"
] | 18
|
2019-07-22T01:35:24.000Z
|
2022-02-10T11:25:42.000Z
|
tools/convert_lightning2venot.py
|
ucl-exoplanets/TauREx_public
|
28d47f829a2873cf15e3bfb0419b8bc4e5bc03dd
|
[
"CC-BY-4.0"
] | null | null | null |
tools/convert_lightning2venot.py
|
ucl-exoplanets/TauREx_public
|
28d47f829a2873cf15e3bfb0419b8bc4e5bc03dd
|
[
"CC-BY-4.0"
] | 1
|
2017-10-19T15:14:06.000Z
|
2017-10-19T15:14:06.000Z
|
#! /usr/bin/python
#small script that shits out the venot file format equivalent for the lightening project
import numpy as np
import pylab as pl
import pyfits as pf
import glob, os, sys
AMU = 1.660538921e-27
KBOLTZ = 1.380648813e-23
G = 6.67384e-11
RSOL = 6.955e8
RJUP = 6.9911e7
#RJUP = 7.1492e7 # Jo's radius
MJUP = 1.898e27
AU = 1.49e11
DIR = '/Users/ingowaldmann/Dropbox/UCLlocal/REPOS/taurex/Input/lightening'
# FILENAME = 'Earth-Today-Lightning-Full.dat'
FILENAME = 'Modern-Earth-noLightning-Full.dat'
#EARTH
planet_mass = 5.97237e24 #kg
planet_radius = 6.371e6 #m
planet_mu = 28.97 * AMU#kg
data = np.loadtxt(os.path.join(DIR,FILENAME),skiprows=1)
[nlayers,ncols] = np.shape(data)
fheader = open(os.path.join(DIR,FILENAME),'r')
header = fheader.readlines()
c=0
for line in header:
head = line
break
fheader.close()
#rebuilding header line
newhead = 'alt(km) '+head[:2]+'m'+head[2:]
newhead_small = head[62:]
print head.split()
molnames = ['C_1D','H','N','O','O_1D','O_1S', 'CO', 'H2','HO','N2', 'NO', 'O2', 'O2_D', 'O3', 'CH4',
'CO2', 'H2O', 'HO2', 'N2O', 'NO2', 'H2O2', 'HNO3', 'CH2O2', 'HCOOH', 'CH3ONO', 'e-',
'H+', 'O+', 'NO+','O2+', 'C','HN','CNC','H2N','H3N','C+','C-','N+','O-','CO+','HO+','N2+','CHO+',
'CH3','CHO','HCN','HNO','NO3','C2H2','C2H6','CH2O','HNO2','N2O3','CH3O2','CH3OH','CH4O2','H3O+']
molweights = [14,1,14,16,18,18,28,2,17,28,30,32,34,48,16,44,18,33,44,46,34,63,46,46,61,0,1,16,30,32,12,15,38,16,17,12,12,14,16,28,17,28,29,
15,29,27,31,62,26,28,30,48,76,47,32,52,19]
badwords = ['p(bar)' ,'T(K)' , 'NH(cm-3)' , 'Kzz(cm2s-1)' , 'Hz(cm)', 'zeta(s-1)']
mollist = []
for mol in head.split():
if mol in molnames:
mollist.append(molweights[molnames.index(mol)])
elif mol not in badwords:
mollist.append(2.3)
print 'FILLED: ',mol
else:
print 'OUT: ',mol
# mollist.append(2.3)
moleweigthstr = ' '.join(str(e) for e in mollist)
#create ranking of most important molecules according to abundance
molabundance =[]
mnamelist =[]
c=0
for mol in head.split():
mnamelist.append(mol)
molabundance.append(np.max(data[:,c]))
c+=1
mnamelist = np.asarray(mnamelist[6:])
molabundance = np.asarray(molabundance[6:])
midx = np.argsort(molabundance)
print midx[::-1]
print mnamelist[midx][::-1]
print molabundance[midx][::-1]
pressure_profile_levels = data[:,0] * 1000.0 #converting bar to mbar
temperature_profile = data[:,1]
H = np.zeros(nlayers)
g = np.zeros(nlayers)
z = np.zeros((nlayers,1))
g[0] = (G * planet_mass) / (planet_radius**2) # surface gravity (0th layer)
H[0] = (KBOLTZ*temperature_profile[0])/(planet_mu*g[0]) # scaleheight at the surface (0th layer)
for i in xrange(1, nlayers):
deltaz = (-1.)*H[i-1]*np.log(pressure_profile_levels[i]/pressure_profile_levels[i-1])
z[i] = z[i-1] + deltaz # altitude at the i-th layer
with np.errstate(over='ignore'):
g[i] = (G * planet_mass) / ((planet_radius + z[i])**2) # gravity at the i-th layer
with np.errstate(divide='ignore'):
H[i] = (KBOLTZ*temperature_profile[i])/(planet_mu*g[i])
z /=1e3 #converting m to km
OUT = np.hstack((z,data))
OUT2 = OUT[:,:3]
[s1,s2] = np.shape(data[:,6:])
OUT3 = np.zeros((s1,s2+2))
OUT3[:,0] = z[:,0]
OUT3[:,1] = data[:,0]
OUT3[:,2:] = data[:,6:]
with open(FILENAME[:-4]+'_conv.dat','wb') as outfile:
outfile.write(newhead)
outfile.write(moleweigthstr+'\n')
np.savetxt(outfile, OUT)
with open(FILENAME[:-4]+'_mixing.dat','wb') as outfile:
outfile.write(newhead_small)
outfile.write(moleweigthstr+'\n')
np.savetxt(outfile, OUT3)
np.savetxt(FILENAME[:-4]+'_tp.dat',OUT2)
pl.figure(1)
pl.plot(np.log(molabundance[midx][::-1]),linewidth=3.0)
pl.gca().xaxis.set_ticks(np.arange(0, len(molabundance), 1.0))
pl.gca().set_xticklabels(mnamelist[midx][::-1])
pl.ylabel('log(mixing ratio)')
pl.show()
| 26.90411
| 139
| 0.630855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,050
| 0.267312
|
3d0d5a4bdcb6949d58811e00ce041b7deeb69354
| 288
|
py
|
Python
|
setup.py
|
NWeis97/ML_Ops_Project
|
cc4c65fec679b08675e76a24ad7e44de1b5df29a
|
[
"MIT"
] | null | null | null |
setup.py
|
NWeis97/ML_Ops_Project
|
cc4c65fec679b08675e76a24ad7e44de1b5df29a
|
[
"MIT"
] | null | null | null |
setup.py
|
NWeis97/ML_Ops_Project
|
cc4c65fec679b08675e76a24ad7e44de1b5df29a
|
[
"MIT"
] | null | null | null |
from setuptools import find_packages, setup
setup(
name="src",
packages=find_packages(),
version="0.1.0",
description="This project contains the final exercise of S1, "
+ "in which we will continue to build upon",
author="Nicolai Weisbjerg",
license="MIT",
)
| 24
| 66
| 0.677083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 127
| 0.440972
|
3d0f1eb7e22b4173d6bb7ae45401f9b8d7518586
| 2,534
|
py
|
Python
|
webapi.py
|
rhalkyard/gmdrec
|
d81f0c714d302f655660f15d1e62d2c3fbe40e33
|
[
"BSD-3-Clause"
] | null | null | null |
webapi.py
|
rhalkyard/gmdrec
|
d81f0c714d302f655660f15d1e62d2c3fbe40e33
|
[
"BSD-3-Clause"
] | null | null | null |
webapi.py
|
rhalkyard/gmdrec
|
d81f0c714d302f655660f15d1e62d2c3fbe40e33
|
[
"BSD-3-Clause"
] | null | null | null |
# Talking to the music player and sanitizing data.
import datetime
import requests
from requests.exceptions import Timeout
from unihandecode import Unihandecoder
from settings import server_url
def asciify(script, args):
if args.lang_code is None:
return Unihandecoder().decode(script)
return Unihandecoder(lang=args.lang_code.casefold()).decode(script)
def check_connection():
try:
requests.get(server_url, timeout=0.2)
except Timeout:
print("Connection timed out. Make sure Foobar is running and the beefsam plugin is installed.")
raise()
def request_playlist_content(args):
t_list = []
total_time = 0
response_playlist = requests.get(server_url + '/api/playlists')
playlist_list = response_playlist.json()['playlists']
for dictionary in playlist_list:
if dictionary['isCurrent']:
global playlist_id # cop-out
playlist_id = dictionary['id']
item_count = dictionary['itemCount']
payload = {'playlists': 'false', 'playlistItems': 'true',
'plref': playlist_id, 'plrange': '0:' + str(item_count),
'plcolumns': args.label+', %length_seconds%'}
response = requests.get(server_url+'/api/query', params=payload)
for i in range(item_count):
ascii_track_name = asciify(response.json()['playlistItems']['items'][i]['columns'][0], args)
print(ascii_track_name)
t_list.append(ascii_track_name)
total_time += int(response.json()['playlistItems']['items'][i]['columns'][1])
print(f'Total playlist duration: {datetime.timedelta(seconds=total_time)}')
if total_time >= 4800:
print('Warning: duration exceeds 80 minutes!')
if item_count > 254:
print('Warning: cannot record more than 254 tracks!')
# return a list of tracks to label and total time
return t_list
def request_track_time():
response = requests.get(server_url + '/api/player')
duration = response.json()['player']['activeItem']['duration']
position = response.json()['player']['activeItem']['position']
# return remaining time in track (seconds)
return duration - position
def set_player(command):
if command == 'mode_play':
# unmute, no shuffle
requests.post(server_url + '/api/player', params={'isMuted': 'false', 'playbackMode': '0'})
requests.post(server_url + f'/api/player/play/{playlist_id}/0') # start from the top
requests.post(server_url + '/api/player/' + command) # play, pause, stop
| 36.724638
| 103
| 0.670876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 846
| 0.33386
|
3d10fbe580e5ebf53db4ece3b62cd88392386b54
| 7,239
|
py
|
Python
|
puresnmp/aio/api/pythonic.py
|
badboybeyer/puresnmp
|
2f2757e0d064f1017f86e0e07661ac8e3c9f2eca
|
[
"MIT"
] | null | null | null |
puresnmp/aio/api/pythonic.py
|
badboybeyer/puresnmp
|
2f2757e0d064f1017f86e0e07661ac8e3c9f2eca
|
[
"MIT"
] | null | null | null |
puresnmp/aio/api/pythonic.py
|
badboybeyer/puresnmp
|
2f2757e0d064f1017f86e0e07661ac8e3c9f2eca
|
[
"MIT"
] | null | null | null |
"""
This module contains the high-level functions to access the library with
asyncio. Care is taken to make this as pythonic as possible and hide as many
of the gory implementations as possible.
This module provides "syntactic sugar" around the lower-level, but almost
identical, module :py:mod:`puresnmp.aio.api.raw`. The "raw" module
returns the variable types unmodified which are all subclasses of
:py:class:`puresnmp.x690.types.Type`.
"""
# TODO (advanced): This module should not make use of it's own functions. The
# is beginning to be too "thick", containing too much business logic for a
# mere abstraction layer.
# module exists as an abstraction layer only. If one function uses a
# "siblng" function, valuable information is lost. In general, this module
from __future__ import unicode_literals
import logging
from collections import OrderedDict
from datetime import datetime, timedelta
from typing import TYPE_CHECKING
from . import raw
from ...pdu import VarBind
from ...util import BulkResult
from ...x690.types import Type
from ...x690.util import tablify
if TYPE_CHECKING: # pragma: no cover
# pylint: disable=unused-import, invalid-name
from typing import Any, Callable, Dict, Generator, List, Tuple, Union
Pythonized = Union[str, bytes, int, datetime, timedelta]
try:
unicode # type: Callable[[Any], str]
except NameError:
# pylint: disable=invalid-name
unicode = str # type: Callable[[Any], str]
_set = set
LOG = logging.getLogger(__name__)
async def get(ip, community, oid, port=161, timeout=6):
# type: (str, str, str, int, int) -> Pythonized
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.get` but returns simple Python
types.
See the "raw" equivalent for detailed documentation & examples.
"""
raw_value = await raw.get(ip, community, oid, port, timeout=timeout)
return raw_value.pythonize()
async def multiget(ip, community, oids, port=161, timeout=6):
# type: (str, str, List[str], int, int) -> List[Pythonized]
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.multiget` but returns simple
Python types.
See the "raw" equivalent for detailed documentation & examples.
"""
raw_output = await raw.multiget(ip, community, oids, port, timeout)
pythonized = [value.pythonize() for value in raw_output]
return pythonized
async def getnext(ip, community, oid, port=161, timeout=6):
# type: (str, str, str, int, int) -> VarBind
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.getnext` but returns simple
Python types.
See the "raw" equivalent for detailed documentation & examples.
"""
return (await multigetnext(ip, community, [oid], port, timeout=timeout))[0]
async def multigetnext(ip, community, oids, port=161, timeout=6):
# type: (str, str, List[str], int, int) -> List[VarBind]
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.multigetnext` but returns
simple Python types.
See the "raw" equivalent for detailed documentation & examples.
"""
raw_output = await raw.multigetnext(ip, community, oids, port, timeout)
pythonized = [VarBind(oid, value.pythonize()) for oid, value in raw_output]
return pythonized
async def walk(ip, community, oid, port=161, timeout=6):
# type: (str, str, str, int, int) -> Generator[VarBind, None, None]
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.walk` but returns simple
Python types.
See the "raw" equivalent for detailed documentation & examples.
"""
raw_result = raw.walk(ip, community, oid, port, timeout)
async for raw_oid, raw_value in raw_result:
yield VarBind(raw_oid, raw_value.pythonize())
async def multiwalk(ip, community, oids, port=161, timeout=6,
fetcher=multigetnext):
# type: (str, str, List[str], int, int, Callable[[str, str, List[str], int, int], List[VarBind]]) -> Generator[VarBind, None, None]
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.multiwalk` but returns simple
Python types.
See the "raw" equivalent for detailed documentation & examples.
"""
raw_output = raw.multiwalk(ip, community, oids, port, timeout, fetcher)
async for oid, value in raw_output:
if isinstance(value, Type):
value = value.pythonize()
yield VarBind(oid, value)
async def set(ip, community, oid, value, port=161, timeout=6): # pylint: disable=redefined-builtin
# type: (str, str, str, Type, int, int) -> Type
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.set` but returns simple Python
types.
See the "raw" equivalent for detailed documentation & examples.
"""
result = await multiset(ip, community, [(oid, value)],
port, timeout=timeout)
return result[oid]
async def multiset(ip, community, mappings, port=161, timeout=6):
# type: (str, str, List[Tuple[str, Type]], int, int) -> Dict[str, Type]
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.multiset` but returns simple
Python types.
See the "raw" equivalent for detailed documentation & examples.
"""
raw_output = await raw.multiset(ip, community, mappings, port, timeout)
pythonized = {unicode(oid): value.pythonize()
for oid, value in raw_output.items()}
return pythonized
async def bulkget(ip, community, scalar_oids, repeating_oids, max_list_size=1,
port=161, timeout=6):
# type: (str, str, List[str], List[str], int, int, int) -> BulkResult
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.bulkget` but returns simple
Python types.
See the "raw" equivalent for detailed documentation & examples.
"""
raw_output = await raw.bulkget(ip, community, scalar_oids, repeating_oids,
max_list_size=max_list_size,
port=port,
timeout=timeout)
pythonized_scalars = {oid: value.pythonize()
for oid, value in raw_output.scalars.items()}
pythonized_list = OrderedDict(
[(oid, value.pythonize())
for oid, value in raw_output.listing.items()])
return BulkResult(pythonized_scalars, pythonized_list)
async def bulkwalk(ip, community, oids, bulk_size=10, port=161):
# type: (str, str, List[str], int, int) -> Generator[VarBind, None, None]
"""
Delegates to :py:func:`~puresnmp.aio.api.raw.bulkwalk` but returns simple
Python types.
See the "raw" equivalent for detailed documentation & examples.
"""
result = multiwalk(
ip, community, oids, port=port,
fetcher=raw._bulkwalk_fetcher(bulk_size)) # pylint: disable=protected-access
async for oid, value in result:
yield VarBind(oid, value)
async def table(ip, community, oid, port=161, num_base_nodes=0):
# type (str, str, str, int, int) ->
"""
Converts a "walk" result into a pseudo-table. See
:py:func:`puresnmp.aio.api.raw.table` for more information.
"""
tmp = []
async for varbind in walk(ip, community, oid, port=port):
tmp.append(varbind)
as_table = tablify(tmp, num_base_nodes=num_base_nodes)
return as_table
| 35.485294
| 135
| 0.668739
| 0
| 0
| 1,659
| 0.229175
| 0
| 0
| 5,693
| 0.786435
| 3,573
| 0.493576
|
3d11c0e83e935667e3b5fa635f505dab77f68c4f
| 471
|
py
|
Python
|
dataset_models/normalization/aroundZeroNormalizer.py
|
Zvezdin/blockchain-predictor
|
df6f939037471dd50b7b9c96673d89b04b646ef2
|
[
"MIT"
] | 35
|
2017-10-25T17:10:35.000Z
|
2022-03-20T18:12:06.000Z
|
dataset_models/normalization/aroundZeroNormalizer.py
|
Zvezdin/blockchain-predictor
|
df6f939037471dd50b7b9c96673d89b04b646ef2
|
[
"MIT"
] | 2
|
2017-09-20T17:39:15.000Z
|
2018-04-01T17:20:29.000Z
|
dataset_models/normalization/aroundZeroNormalizer.py
|
Zvezdin/blockchain-predictor
|
df6f939037471dd50b7b9c96673d89b04b646ef2
|
[
"MIT"
] | 10
|
2017-12-01T13:47:04.000Z
|
2021-12-16T06:53:17.000Z
|
import numpy as np
from .normalizer import Normalizer
class AroundZeroNormalizer(Normalizer):
def __init__(self, data):
self.minVal = np.min(data)
self.maxVal = np.max(data)
self.maxVal = max(abs(self.maxVal), abs(self.minVal))
def transform(self, data):
if self.maxVal == 0: #can't normalize if the whole array is zeroes
return data
return ((data / self.maxVal) + 1) / 2
def inverse_transform(self, data):
return ((data * 2) - 1) * self.maxVal
| 23.55
| 68
| 0.694268
| 414
| 0.878981
| 0
| 0
| 0
| 0
| 0
| 0
| 45
| 0.095541
|
3d12eb10a495e1684e9f8cb66cfdd0b53f9884df
| 1,160
|
py
|
Python
|
viz/scripts/closest_over_time_with_shading.py
|
zhicongchen/histwords
|
12fb83492fdccca795d266966a8b58c13f81c54c
|
[
"Apache-2.0"
] | 2
|
2022-01-05T10:32:56.000Z
|
2022-02-14T16:45:59.000Z
|
viz/scripts/closest_over_time_with_shading.py
|
zhicongchen/histwords
|
12fb83492fdccca795d266966a8b58c13f81c54c
|
[
"Apache-2.0"
] | null | null | null |
viz/scripts/closest_over_time_with_shading.py
|
zhicongchen/histwords
|
12fb83492fdccca795d266966a8b58c13f81c54c
|
[
"Apache-2.0"
] | null | null | null |
import helpers
import sys
from representations.sequentialembedding import SequentialEmbedding
"""
Let's examine the closest neighbors for a word over time
"""
import collections
from sklearn.manifold import TSNE
import numpy as np
import matplotlib.pyplot as plt
WORDS = helpers.get_words()
if __name__ == "__main__":
embeddings = helpers.load_embeddings()
for word1 in WORDS:
time_sims, lookups, nearests, sims = helpers.get_time_sims(embeddings, word1)
helpers.clear_figure()
# we remove word1 from our words because we just want to plot the different
# related words
words = filter(lambda word: word.split("|")[0] != word1, lookups.keys())
words = list(words)
values = [lookups[word] for word in words]
fitted = helpers.fit_tsne(values)
if not len(fitted):
print("Couldn't model word", word1)
continue
cmap = helpers.get_cmap(len(time_sims))
annotations = helpers.plot_words(word1, words, fitted, cmap, sims)
helpers.savefig("%s_shaded" % word1)
for year, sim in time_sims.items():
print(year, sim)
| 28.292683
| 85
| 0.667241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 199
| 0.171552
|
3d1335d9fc99401818ca88efe979fffcb933a101
| 10,108
|
py
|
Python
|
Products/LDAPUserFolder/interfaces.py
|
phgv/Products.LDAPUserFolder
|
eb9db778916f47a80b3df069a31d0a2100b26423
|
[
"ZPL-2.1"
] | null | null | null |
Products/LDAPUserFolder/interfaces.py
|
phgv/Products.LDAPUserFolder
|
eb9db778916f47a80b3df069a31d0a2100b26423
|
[
"ZPL-2.1"
] | null | null | null |
Products/LDAPUserFolder/interfaces.py
|
phgv/Products.LDAPUserFolder
|
eb9db778916f47a80b3df069a31d0a2100b26423
|
[
"ZPL-2.1"
] | null | null | null |
##############################################################################
#
# Copyright (c) 2000-2009 Jens Vagelpohl and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Interfaces for LDAPUserFolder package classes
"""
from AccessControl.interfaces import IStandardUserFolder
from AccessControl.interfaces import IUser
class ILDAPUser(IUser):
""" IUser interface with extended API for the LDAPUserFolder
This interface is supported by user objects which
are returned by user validation through the LDAPUserFolder
product and used for access control.
"""
def getProperty(name, default=''):
""" Retrieve the value of a property of name "name". If this
property does not exist, the default value is returned.
Properties can be any public attributes that are part of the
user record in LDAP. Refer to them by their LDAP attribute
name or the name they have been mapped to in the LDAP User
Folder
Permission - Access contents information
"""
def getUserDN():
""" Retrieve the user object's Distinguished Name attribute.
Permission - Access contents information
"""
def getCreationTime():
""" Return a DataTime object representing the user object creation time
Permission - Access contents information
"""
class ILDAPUserFolder(IStandardUserFolder):
""" This interface lists methods available for scripting
LDAPUserFolder objects.
Some others are accessible given the correct permissions but since
they are used only in the internal workings of the LDAPUserFolder
they are not listed here.
"""
def getUsers():
""" Return all user objects. Since the number of user records in
an LDAP database is potentially very large this method will
only return those user objects that are in the internal cache
of the LDAPUserFolder and not expired.
Permission - *Manage users*
"""
def getUserNames():
""" Return a list of user IDs for all users that can be found
given the selected user search base and search scope.
This method will return a simple error message if the
number of users exceeds the limit of search hits that is
built into the python-ldap module.
Permission - *Manage users*
"""
def getUser(name):
""" Return the user object for the user "name". if the user
cannot be found, None will be returned.
Permission - *Manage users*
"""
def getUserById(id):
""" Return the user object with the UserID "id". The User ID
may be different from the "Name", the Login. To get a user
by its Login, call getUser.
Permission - *Manage users*
"""
def getGroups(dn='*', attr=None, pwd=''):
""" Return a list of available group records under the group record
base as defined in the LDAPUserFolder, or a specific group if the
``dn`` parameter is provided. The attr argument determines
what gets returned and it can have the following values:
o None: A list of tuples is returned where the group CN is the first
and the group full DN is the second element.
o cn: A list of CN strings is returned.
o dn: A list of full DN strings is returned.
Permission: *Manage users*
"""
def manage_addGroup(newgroup_name, newgroup_type='groupOfUniqueNames',
REQUEST=None):
""" Add a new group under the group record base of type
``newgroup_type``. If REQUEST is not None a MessageDialog screen will
be returned. The group_name argument forms the new group CN while the
full DN will be formed by combining this new CN with the group base DN.
Since a group record cannot be empty, meaning there must be at least
a single uniqueMember element in it, the DN given as the binduid in
the LDAPUserFolder configuration is inserted.
Permission: *Manage users*
"""
def manage_deleteGroups(dns=[], REQUEST=None):
""" Delete groups specified by a list of group DN strings which are
handed in as the *dns* argument.
Permission: *Manage users*
"""
def findUser(search_param, search_term, attrs=(), exact_match=False):
""" Find user records given the *search_param* string (which is the
name of an LDAP attribute) and the *search_term* value. The
``attrs`` argument can be used the desired attributes to return, and
``exact_match`` determines whether the search is a wildcard search
or not.
This method will return a list of dictionaries where each matching
record is represented by a dictionary. The dictionary will contain
a key/value pair for each LDAP attribute, including *dn*, that is
present for the given user record.
Permission: *Manage users*
"""
def searchUsers(attrs=(), exact_match=False, **kw):
""" Search for user records by one or more attributes.
This method takes any passed-in search parameters and values as
keyword arguments and will sort out invalid keys automatically. It
accepts all three forms an attribute can be known as, its real
ldap name, the name an attribute is mapped to explicitly, and the
friendly name it is known by.
Permission: *Manage users*
"""
def getUserDetails(encoded_dn, format=None, attrs=()):
""" Retrieves all details for a user record represented by the DN that
is handed in as the URL-encoded *encoded_dn* argument. The format
argument determines the format of the returned data and can have
two values:
o None: All user attributes are handed back as a list of tuples
where the first element of each tuple contains the LDAP attribute
name and the second element contains the value.
o dictionary: The user record is handed back as a simple dictionary
of attributes as key/value pairs.
The desired attributes can be limited by passing in a sequence of
attribute names as the attrs argument.
Permission: *Manage users*
"""
def isUnique(attr, value):
""" Determine whether a given LDAP attribute (attr) and its value
(value) are unique in the LDAP tree branch set as the user record
base in the LDAPUserFolder. This method should be called before
inserting a new user record with attr being the attribute chosen as
the login name in your LDAPUserFolder because that attribute value
must be unique.
This method will return a truth value (1) if the given attribute value
is indeed unique, 0 if it is not and in the case of an exception it
will return the string describing the exception.
Permission: *Manage users*
"""
def manage_addUser(REQUEST, kwargs):
""" Create a new user record. If REQUEST is not None, it will be
used to retrieve the values for the user record.
To use this method from Python you must pass None as the REQUEST
argument and a dictionary called *kwargs* containing key/value pairs
for the user record attributes.
The dictionary of values passed in, be it REQUEST or kwargs, must at
the very least contain the following keys and values:
o *cn* or *uid* (depending on what you set the RDN attribute to)
o *user_pw* (the new user record's password)
o *confirm_pw* (This must match password)
o all attributes your user record LDAP schema must contain (consult
your LDAP server schema)
Only those attributes and values are used that are specified on the
LDAP Schema tab of your LDAPUserFolder.
Permission: *Manage users*
"""
def manage_editUser(user_dn, REQUEST, kwargs):
""" Edit an existing user record. If REQUEST is not None, it will
be used to retrieve the values for the user record.
To use this method from Python you must pass None as the REQUEST
argument and a dictionary called *kwargs* containing key/value pairs
for the user record attributes.
Only those attributes and values are used that are specified on the
LDAP Schema tab of your LDAPUserFolder.
This method will handle modified RDN (Relative Distinguished name)
attributes correctly and execute a *modrdn* as well if needed,
including changing the DN in all group records it is part of.
Permission: *Manage users*
"""
def manage_editUserPassword(dn, new_pw, REQUEST):
""" Change a users password. The *dn* argument contains the full DN
for the user record in question and new_pw contains the new password.
Permission: *Manage users*
"""
def manage_editUserRoles(user_dn, role_dns, REQUEST):
""" Change a user's group memberships. The user is specified by a
full DN string, handed in as the *user_dn* attribute. All group
records the user is supposed to be part of are handed in as
*role_dns*, a list of DN strings for group records.
Permission: *Manage users*
"""
def manage_deleteUsers(dns, REQUEST):
""" Delete the user records given by a list of DN strings. The user
records will be deleted and their mentioning in any group record
as well.
Permission: *Manage users*
"""
| 38.580153
| 79
| 0.662347
| 9,309
| 0.920954
| 0
| 0
| 0
| 0
| 0
| 0
| 8,893
| 0.879798
|
3d162a8de2cf611aacdd649aadbeb0516127e28a
| 461
|
py
|
Python
|
arequests/exceptions.py
|
fhag/telegram2
|
65a685637b444e40ef47a17c2a3b83c2ddb81459
|
[
"BSD-2-Clause"
] | null | null | null |
arequests/exceptions.py
|
fhag/telegram2
|
65a685637b444e40ef47a17c2a3b83c2ddb81459
|
[
"BSD-2-Clause"
] | null | null | null |
arequests/exceptions.py
|
fhag/telegram2
|
65a685637b444e40ef47a17c2a3b83c2ddb81459
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Exceptions for Arequests
Created on Tue Nov 13 08:34:14 2018
@author: gfi
"""
class ArequestsError(Exception):
"""Basic exception for errors raised by Arequests"""
pass
class AuthorizationError(ArequestsError):
'''401 error new authentification required'''
pass
class SomeClientError(ArequestsError):
'''4xx client error'''
pass
class SomeServerError(ArequestsError):
'''5xx server error'''
pass
| 19.208333
| 56
| 0.694143
| 346
| 0.750542
| 0
| 0
| 0
| 0
| 0
| 0
| 246
| 0.533623
|
3d178f904fde1c17f64bf8f943648ae02b442d5e
| 5,014
|
py
|
Python
|
NASA/Python_codes/drivers/02_remove_outliers_n_jumps/01_intersect_remove_jumps_JFD/01_remove_jumps_JFD_intersect.py
|
HNoorazar/Kirti
|
fb7108dac1190774bd90a527aaa8a3cb405f127d
|
[
"MIT"
] | null | null | null |
NASA/Python_codes/drivers/02_remove_outliers_n_jumps/01_intersect_remove_jumps_JFD/01_remove_jumps_JFD_intersect.py
|
HNoorazar/Kirti
|
fb7108dac1190774bd90a527aaa8a3cb405f127d
|
[
"MIT"
] | null | null | null |
NASA/Python_codes/drivers/02_remove_outliers_n_jumps/01_intersect_remove_jumps_JFD/01_remove_jumps_JFD_intersect.py
|
HNoorazar/Kirti
|
fb7108dac1190774bd90a527aaa8a3cb405f127d
|
[
"MIT"
] | null | null | null |
####
#### Nov 16, 2021
####
"""
Regularize the EVI and NDVI of fields in individual years for training set creation.
"""
import csv
import numpy as np
import pandas as pd
from math import factorial
import scipy
import scipy.signal
import os, os.path
from datetime import date
import datetime
import time
import sys
start_time = time.time()
# search path for modules
# look @ https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path
####################################################################################
###
### Aeolus Core path
###
####################################################################################
sys.path.append('/home/hnoorazar/NASA/')
import NASA_core as nc
import NASA_plot_core as ncp
####################################################################################
###
### Parameters
###
####################################################################################
indeks = sys.argv[1]
batch_number = int(sys.argv[2])
print ("Terminal Arguments are: ")
print (indeks)
print (batch_number)
print ("__________________________________________")
if indeks == "NDVI":
NoVI = "EVI"
else:
NoVI = "NDVI"
IDcolName = "ID"
####################################################################################
###
### Aeolus Directories
###
####################################################################################
data_base = "/data/hydro/users/Hossein/NASA/"
data_dir = data_base + "/02_outliers_removed/"
SF_data_dir = "/data/hydro/users/Hossein/NASA/000_shapefile_data_part/"
output_dir = data_base + "/03_jumps_removed/"
os.makedirs(output_dir, exist_ok=True)
print ("data_dir is: " + data_dir)
print ("output_dir is: " + output_dir)
########################################################################################
###
### process data
###
########################################################################################
SF_data_IDs = pd.read_csv(SF_data_dir + "10_intersect_East_Irr_2008_2018_2cols_data_part.csv")
SF_data_IDs.sort_values(by=['ID'], inplace=True)
SF_data_IDs.reset_index(drop=True, inplace=True)
# there are
batch_size = int(np.ceil(69271/40))
batch_IDs = SF_data_IDs.loc[(batch_number-1)*batch_size : (batch_number*batch_size-1)]
out_name = output_dir + "NoJump_intersect_" + indeks + "_JFD.csv"
common_part = "T1C2L2_inters_2008_2018_EastIrr_2008-01-01_2022-01-01"
f_names = ["noOutlier_" + "L5_" + common_part + "_" + indeks + ".csv",
"noOutlier_" + "L7_" + common_part + "_" + indeks + ".csv",
"noOutlier_" + "L8_" + common_part + "_" + indeks + ".csv"]
L5 = pd.read_csv(data_dir + f_names[0], low_memory=False)
L7 = pd.read_csv(data_dir + f_names[1], low_memory=False)
L8 = pd.read_csv(data_dir + f_names[2], low_memory=False)
L5.drop([NoVI], axis=1, inplace=True)
L5 = L5[L5[indeks].notna()]
L7.drop([NoVI], axis=1, inplace=True)
L7 = L7[L7[indeks].notna()]
L8.drop([NoVI], axis=1, inplace=True)
L8 = L8[L8[indeks].notna()]
L578 = pd.concat([L5, L7, L8])
del(L5, L7, L8)
L578['human_system_start_time'] = pd.to_datetime(L578['human_system_start_time'])
L578["ID"] = L578["ID"].astype(str)
L578 = L578[L578.ID.isin(list(batch_IDs.ID))].copy()
########################################################################################
###
### List of unique polygons
###
IDs = L578[IDcolName].unique()
print(len(IDs))
########################################################################################
###
### initialize output data.
###
output_df = pd.DataFrame(data = None,
index = np.arange(L578.shape[0]),
columns = L578.columns)
counter = 0
row_pointer = 0
for a_poly in IDs:
if (counter % 1000 == 0):
print (counter)
curr_field = L578[L578[IDcolName]==a_poly].copy()
################################################################
# Sort by DoY (sanitary check)
curr_field.sort_values(by=['human_system_start_time'], inplace=True)
curr_field.reset_index(drop=True, inplace=True)
################################################################
no_Outlier_TS = nc.correct_big_jumps_1DaySeries_JFD(dataTMS_jumpie = curr_field,
give_col = indeks,
maxjump_perDay = 0.018)
output_df[row_pointer: row_pointer + curr_field.shape[0]] = no_Outlier_TS.values
counter += 1
row_pointer += curr_field.shape[0]
####################################################################################
###
### Write the outputs
###
####################################################################################
output_df.drop_duplicates(inplace=True)
output_df.to_csv(out_name, index = False)
end_time = time.time()
print ("it took {:.0f} minutes to run this code.".format((end_time - start_time)/60))
| 31.936306
| 94
| 0.500798
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,343
| 0.467292
|
3d17f39c53c3cfba5e53d1120441f6ea46dbc0cf
| 7,227
|
py
|
Python
|
pyquante2/ints/one.py
|
Konjkov/pyquante2
|
4ca0c8c078cafb769d20a4624b9bd907a748b1a2
|
[
"BSD-3-Clause"
] | null | null | null |
pyquante2/ints/one.py
|
Konjkov/pyquante2
|
4ca0c8c078cafb769d20a4624b9bd907a748b1a2
|
[
"BSD-3-Clause"
] | null | null | null |
pyquante2/ints/one.py
|
Konjkov/pyquante2
|
4ca0c8c078cafb769d20a4624b9bd907a748b1a2
|
[
"BSD-3-Clause"
] | null | null | null |
"""
One electron integrals.
"""
from numpy import pi,exp,floor,array,isclose
from math import factorial
from pyquante2.utils import binomial, fact2, Fgamma, norm2
# Notes:
# The versions S,T,V include the normalization constants
# The version overlap,kinetic,nuclear_attraction do not.
# This is so, for example, the kinetic routines can call the potential routines
# without the normalization constants getting in the way.
def S(a,b):
"""
Simple interface to the overlap function.
>>> from pyquante2 import pgbf,cgbf
>>> s = pgbf(1)
>>> isclose(S(s,s),1.0)
True
>>> sc = cgbf(exps=[1],coefs=[1])
>>> isclose(S(sc,sc),1.0)
True
>>> sc = cgbf(exps=[1],coefs=[1])
>>> isclose(S(sc,s),1.0)
True
>>> isclose(S(s,sc),1.0)
True
"""
if b.contracted:
return sum(cb*S(pb,a) for (cb,pb) in b)
elif a.contracted:
return sum(ca*S(b,pa) for (ca,pa) in a)
return a.norm*b.norm*overlap(a.exponent,a.powers,
a.origin,b.exponent,b.powers,b.origin)
def T(a,b):
"""
Simple interface to the kinetic function.
>>> from pyquante2 import pgbf,cgbf
>>> from pyquante2.basis.pgbf import pgbf
>>> s = pgbf(1)
>>> isclose(T(s,s),1.5)
True
>>> sc = cgbf(exps=[1],coefs=[1])
>>> isclose(T(sc,sc),1.5)
True
>>> sc = cgbf(exps=[1],coefs=[1])
>>> isclose(T(sc,s),1.5)
True
>>> isclose(T(s,sc),1.5)
True
"""
if b.contracted:
return sum(cb*T(pb,a) for (cb,pb) in b)
elif a.contracted:
return sum(ca*T(b,pa) for (ca,pa) in a)
return a.norm*b.norm*kinetic(a.exponent,a.powers,a.origin,
b.exponent,b.powers,b.origin)
def V(a,b,C):
"""
Simple interface to the nuclear attraction function.
>>> from pyquante2 import pgbf,cgbf
>>> s = pgbf(1)
>>> isclose(V(s,s,(0,0,0)),-1.595769)
True
>>> sc = cgbf(exps=[1],coefs=[1])
>>> isclose(V(sc,sc,(0,0,0)),-1.595769)
True
>>> sc = cgbf(exps=[1],coefs=[1])
>>> isclose(V(sc,s,(0,0,0)),-1.595769)
True
>>> isclose(V(s,sc,(0,0,0)),-1.595769)
True
"""
if b.contracted:
return sum(cb*V(pb,a,C) for (cb,pb) in b)
elif a.contracted:
return sum(ca*V(b,pa,C) for (ca,pa) in a)
return a.norm*b.norm*nuclear_attraction(a.exponent,a.powers,a.origin,
b.exponent,b.powers,b.origin,C)
def overlap(alpha1,lmn1,A,alpha2,lmn2,B):
"""
Full form of the overlap integral. Taken from THO eq. 2.12
>>> isclose(overlap(1,(0,0,0),array((0,0,0),'d'),1,(0,0,0),array((0,0,0),'d')),1.968701)
True
"""
l1,m1,n1 = lmn1
l2,m2,n2 = lmn2
rab2 = norm2(A-B)
gamma = alpha1+alpha2
P = gaussian_product_center(alpha1,A,alpha2,B)
pre = pow(pi/gamma,1.5)*exp(-alpha1*alpha2*rab2/gamma)
wx = overlap1d(l1,l2,P[0]-A[0],P[0]-B[0],gamma)
wy = overlap1d(m1,m2,P[1]-A[1],P[1]-B[1],gamma)
wz = overlap1d(n1,n2,P[2]-A[2],P[2]-B[2],gamma)
return pre*wx*wy*wz
def overlap1d(l1,l2,PAx,PBx,gamma):
"""
The one-dimensional component of the overlap integral. Taken from THO eq. 2.12
>>> isclose(overlap1d(0,0,0,0,1),1.0)
True
"""
total = 0
for i in range(1+int(floor(0.5*(l1+l2)))):
total += binomial_prefactor(2*i,l1,l2,PAx,PBx)* \
fact2(2*i-1)/pow(2*gamma,i)
return total
def gaussian_product_center(alpha1,A,alpha2,B):
"""
The center of the Gaussian resulting from the product of two Gaussians:
>>> gaussian_product_center(1,array((0,0,0),'d'),1,array((0,0,0),'d'))
array([ 0., 0., 0.])
"""
return (alpha1*A+alpha2*B)/(alpha1+alpha2)
def binomial_prefactor(s,ia,ib,xpa,xpb):
"""
The integral prefactor containing the binomial coefficients from Augspurger and Dykstra.
>>> binomial_prefactor(0,0,0,0,0)
1
"""
total= 0
for t in range(s+1):
if s-ia <= t <= ib:
total += binomial(ia,s-t)*binomial(ib,t)* \
pow(xpa,ia-s+t)*pow(xpb,ib-t)
return total
def kinetic(alpha1,lmn1,A,alpha2,lmn2,B):
"""
The full form of the kinetic energy integral
>>> isclose(kinetic(1,(0,0,0),array((0,0,0),'d'),1,(0,0,0),array((0,0,0),'d')),2.953052)
True
"""
l1,m1,n1 = lmn1
l2,m2,n2 = lmn2
term0 = alpha2*(2*(l2+m2+n2)+3)*\
overlap(alpha1,(l1,m1,n1),A,\
alpha2,(l2,m2,n2),B)
term1 = -2*pow(alpha2,2)*\
(overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2+2,m2,n2),B)
+ overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2,m2+2,n2),B)
+ overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2,m2,n2+2),B))
term2 = -0.5*(l2*(l2-1)*overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2-2,m2,n2),B) +
m2*(m2-1)*overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2,m2-2,n2),B) +
n2*(n2-1)*overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2,m2,n2-2),B))
return term0+term1+term2
def nuclear_attraction(alpha1,lmn1,A,alpha2,lmn2,B,C):
"""
Full form of the nuclear attraction integral
>>> isclose(nuclear_attraction(1,(0,0,0),array((0,0,0),'d'),1,(0,0,0),array((0,0,0),'d'),array((0,0,0),'d')),-3.141593)
True
"""
l1,m1,n1 = lmn1
l2,m2,n2 = lmn2
gamma = alpha1+alpha2
P = gaussian_product_center(alpha1,A,alpha2,B)
rab2 = norm2(A-B)
rcp2 = norm2(C-P)
dPA = P-A
dPB = P-B
dPC = P-C
Ax = A_array(l1,l2,dPA[0],dPB[0],dPC[0],gamma)
Ay = A_array(m1,m2,dPA[1],dPB[1],dPC[1],gamma)
Az = A_array(n1,n2,dPA[2],dPB[2],dPC[2],gamma)
total = 0.
for I in range(l1+l2+1):
for J in range(m1+m2+1):
for K in range(n1+n2+1):
total += Ax[I]*Ay[J]*Az[K]*Fgamma(I+J+K,rcp2*gamma)
val= -2*pi/gamma*exp(-alpha1*alpha2*rab2/gamma)*total
return val
def A_term(i,r,u,l1,l2,PAx,PBx,CPx,gamma):
"""
THO eq. 2.18
>>> A_term(0,0,0,0,0,0,0,0,1)
1.0
>>> A_term(0,0,0,0,1,1,1,1,1)
1.0
>>> A_term(1,0,0,0,1,1,1,1,1)
-1.0
>>> A_term(0,0,0,1,1,1,1,1,1)
1.0
>>> A_term(1,0,0,1,1,1,1,1,1)
-2.0
>>> A_term(2,0,0,1,1,1,1,1,1)
1.0
>>> A_term(2,0,1,1,1,1,1,1,1)
-0.5
>>> A_term(2,1,0,1,1,1,1,1,1)
0.5
"""
return pow(-1,i)*binomial_prefactor(i,l1,l2,PAx,PBx)*\
pow(-1,u)*factorial(i)*pow(CPx,i-2*r-2*u)*\
pow(0.25/gamma,r+u)/factorial(r)/factorial(u)/factorial(i-2*r-2*u)
def A_array(l1,l2,PA,PB,CP,g):
"""
THO eq. 2.18 and 3.1
>>> A_array(0,0,0,0,0,1)
[1.0]
>>> A_array(0,1,1,1,1,1)
[1.0, -1.0]
>>> A_array(1,1,1,1,1,1)
[1.5, -2.5, 1.0]
"""
Imax = l1+l2+1
A = [0]*Imax
for i in range(Imax):
for r in range(int(floor(i/2)+1)):
for u in range(int(floor((i-2*r)/2)+1)):
I = i-2*r-u
A[I] = A[I] + A_term(i,r,u,l1,l2,PA,PB,CP,g)
return A
if __name__ == '__main__':
import doctest; doctest.testmod()
| 28.908
| 123
| 0.530372
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,007
| 0.416079
|
3d189022514ffa92e24cccd1441a05b0577b4e2e
| 2,169
|
py
|
Python
|
tests/test_pyhive_runBCFTools_VC.py
|
elowy01/igsr_analysis
|
ffea4885227c2299f886a4f41e70b6e1f6bb43da
|
[
"Apache-2.0"
] | 3
|
2018-04-20T15:04:34.000Z
|
2022-03-30T06:36:02.000Z
|
tests/test_pyhive_runBCFTools_VC.py
|
elowy01/igsr_analysis
|
ffea4885227c2299f886a4f41e70b6e1f6bb43da
|
[
"Apache-2.0"
] | 7
|
2019-06-06T09:22:20.000Z
|
2021-11-23T17:41:52.000Z
|
tests/test_pyhive_runBCFTools_VC.py
|
elowy01/igsr_analysis
|
ffea4885227c2299f886a4f41e70b6e1f6bb43da
|
[
"Apache-2.0"
] | 5
|
2017-11-02T11:17:35.000Z
|
2021-12-11T19:34:09.000Z
|
import os
import subprocess
import glob
import pytest
# test_pyhive_runBCFTools_VC.py
def test_runBCFTools_VC(bcftools_folder, hive_dir, datadir, clean_tmp):
"""
Test function to run BCFTools mpileup|call on a BAM file
"""
bam_file = "{0}/exampleBAM.bam".format(datadir)
reference = "{0}/exampleFASTA.fasta".format(datadir)
work_dir = "{0}/outdir/".format(datadir)
annots = "\"['DP','SP','AD']\""
command = "perl {0}/scripts/standaloneJob.pl PyHive.VariantCalling.BCFTools_caller -language python3 \
-outprefix {1} -work_dir {2} -chunk {3} -bam {4} -reference {5} \
-bcftools_folder {6} -annots {7} -verbose True".format(hive_dir, 'out', work_dir,
"\"['chr1','10000','30000']\"", bam_file,
reference, bcftools_folder, annots)
try:
subprocess.check_output(command, shell=True)
assert True
except subprocess.CalledProcessError as exc:
assert False
raise Exception(exc.output)
def test_runBCFTools_VC_woptions(bcftools_folder, hive_dir, datadir, clean_tmp):
"""
Test function to run BCFTools mpileup|call on a BAM file
using some options and arguments
"""
bam_file = "{0}/exampleBAM.bam".format(datadir)
reference = "{0}/exampleFASTA.fasta".format(datadir)
work_dir = "{0}/outdir/".format(datadir)
annots = "\"['DP','SP','AD']\""
command = "perl {0}/scripts/standaloneJob.pl PyHive.VariantCalling.BCFTools_caller -language python3 \
-outprefix {1} -work_dir {2} -chunk {3} -bam {4} -reference {5} \
-bcftools_folder {6} -annots {7} -E 1 -p 1 -m_pileup 3 -m_call 1 -v 1 " \
"-F 0.05 -C 25 -verbose True".format(hive_dir, 'out', work_dir,
"\"['chr1','10000','30000']\"", bam_file,
reference, bcftools_folder, annots)
try:
subprocess.check_output(command, shell=True)
assert True
except subprocess.CalledProcessError as exc:
assert False
raise Exception(exc.output)
| 38.052632
| 106
| 0.599355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 921
| 0.42462
|
3d192b649d8b6388f0dcd7b9e46896429e77993c
| 2,497
|
py
|
Python
|
src/sentry/api/endpoints/organization_projects.py
|
seukjung/sentry-custom
|
c5f6bb2019aef3caff7f3e2b619f7a70f2b9b963
|
[
"BSD-3-Clause"
] | 1
|
2021-01-13T15:40:03.000Z
|
2021-01-13T15:40:03.000Z
|
src/sentry/api/endpoints/organization_projects.py
|
fotinakis/sentry
|
c5cfa5c5e47475bf5ef41e702548c2dfc7bb8a7c
|
[
"BSD-3-Clause"
] | 8
|
2019-12-28T23:49:55.000Z
|
2022-03-02T04:34:18.000Z
|
src/sentry/api/endpoints/organization_projects.py
|
fotinakis/sentry
|
c5cfa5c5e47475bf5ef41e702548c2dfc7bb8a7c
|
[
"BSD-3-Clause"
] | 1
|
2017-04-08T04:09:18.000Z
|
2017-04-08T04:09:18.000Z
|
from __future__ import absolute_import
import six
from rest_framework.response import Response
from sentry.api.base import DocSection
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.serializers import serialize
from sentry.models import Project, Team
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListOrganizationProjects')
def list_organization_projects_scenario(runner):
runner.request(
method='GET',
path='/organizations/%s/projects/' % runner.org.slug
)
class OrganizationProjectsEndpoint(OrganizationEndpoint):
doc_section = DocSection.ORGANIZATIONS
@attach_scenarios([list_organization_projects_scenario])
def get(self, request, organization):
"""
List an Organization's Projects
```````````````````````````````
Return a list of projects bound to a organization.
:pparam string organization_slug: the slug of the organization for
which the projects should be listed.
:auth: required
"""
if request.auth and not request.user.is_authenticated():
# TODO: remove this, no longer supported probably
if hasattr(request.auth, 'project'):
team_list = [request.auth.project.team]
project_list = [request.auth.project]
elif request.auth.organization is not None:
org = request.auth.organization
team_list = list(Team.objects.filter(
organization=org,
))
project_list = list(Project.objects.filter(
team__in=team_list,
).order_by('name'))
else:
return Response({'detail': 'Current access does not point to '
'organization.'}, status=400)
else:
team_list = list(request.access.teams)
project_list = list(Project.objects.filter(
team__in=team_list,
).order_by('name'))
team_map = {
d['id']: d
for d in serialize(team_list, request.user)
}
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
assert six.text_type(project.id) == pdata['id']
pdata['team'] = team_map[six.text_type(project.team_id)]
context.append(pdata)
return Response(context)
| 35.169014
| 87
| 0.60793
| 1,951
| 0.781338
| 0
| 0
| 2,040
| 0.81698
| 0
| 0
| 536
| 0.214658
|
3d19367388f755b58d5ae7968cf859f7a856e8cf
| 3,708
|
py
|
Python
|
account/models.py
|
Vicynet/kwiktalk
|
198efdd5965cc0cd3ee8dcf5e469d9022330ec25
|
[
"bzip2-1.0.6"
] | null | null | null |
account/models.py
|
Vicynet/kwiktalk
|
198efdd5965cc0cd3ee8dcf5e469d9022330ec25
|
[
"bzip2-1.0.6"
] | null | null | null |
account/models.py
|
Vicynet/kwiktalk
|
198efdd5965cc0cd3ee8dcf5e469d9022330ec25
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.shortcuts import render
from cloudinary.models import CloudinaryField
from .utils import get_random_code
from django.template.defaultfilters import slugify
from django.contrib.auth import get_user_model
from kwikposts.models import KwikPost, Comment, Like
from django.db.models import Q
# Create your models here.
class ProfileManager(models.Manager):
def get_all_profiles_to_invite(self, sender):
profiles = Profile.objects.all().exclude(user=sender)
profile = Profile.objects.get(user=sender)
friend_relation = Relationship.objects.filter(Q(sender=profile) | Q(receiver=profile))
print(friend_relation)
accepted = set([])
for rel in friend_relation:
if rel.status == 'accepted':
accepted.add(rel.receiver)
accepted.add(rel.sender)
print(accepted)
available = [profile for profile in profiles if profile not in accepted]
print(available)
return available
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
phone_number = models.CharField(max_length=11, blank=True, null=True)
date_of_birth = models.DateField(blank=True, null=True)
display_picture = CloudinaryField('users/%Y/%m/%d/', default='default_avatar.png')
bio = models.CharField(max_length=140, blank=True, null=True)
gender = models.CharField(max_length=20, blank=True, null=True)
friends = models.ManyToManyField(User, blank=True, related_name='friends')
slug = models.SlugField(unique=True, blank=True)
updated_at = models.DateTimeField(auto_now=True)
created_at = models.DateTimeField(auto_now_add=True)
def get_friends(self):
return self.friends.all()
def get_friends_number(self):
return self.friends.all().count()
def get_likes_given(self):
likes = self.Like.all().count()
total_liked = 0
for item in likes:
if item.values == 'Like':
total_liked += 1
return total_liked
def __str__(self):
return f"Profile for user {self.user.username}-{self.created_at.strftime('%d-%m-%Y')}"
def save(self, *args, **kwargs):
ex = False
if self.user.first_name and self.user.last_name:
to_slug = slugify(str(self.user.first_name) + " " + str(self.user.last_name))
ex = Profile.objects.filter(slug=to_slug).exists()
while ex:
to_slug = slugify(to_slug + " " + str(get_random_code()))
ex = Profile.objects.filter(slug=to_slug).exists()
else:
to_slug = str(self.user)
self.slug = to_slug
super().save(*args, **kwargs)
STATUS_CHOICES = (
('send', 'send'),
('accepted', 'accepted')
)
class RelationshipManager(models.Manager):
def invitations_received(self, receiver):
new_invitation = Relationship.objects.filter(receiver=receiver, status='send')
return new_invitation
class Relationship(models.Model):
sender = models.ForeignKey(Profile, on_delete=models.CASCADE, related_name='sender')
receiver = models.ForeignKey(Profile, on_delete=models.CASCADE, related_name='receiver')
status = models.CharField(max_length=8, choices=STATUS_CHOICES)
updated_at = models.DateTimeField(auto_now=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = RelationshipManager()
class Meta:
ordering = ('-created_at',)
def __str__(self):
return f"{self.sender} follows {self.receiver}-{self.status}"
| 35.653846
| 94
| 0.682848
| 3,188
| 0.859763
| 0
| 0
| 0
| 0
| 0
| 0
| 296
| 0.079827
|
3d1a374772b07f26b88bbef32d5d37abe99122f6
| 6,708
|
py
|
Python
|
senta/data/field_reader/generate_label_field_reader.py
|
zgzwelldone/Senta
|
e01986dd17217bed82023c81d06588d63e0e19c7
|
[
"Apache-2.0"
] | null | null | null |
senta/data/field_reader/generate_label_field_reader.py
|
zgzwelldone/Senta
|
e01986dd17217bed82023c81d06588d63e0e19c7
|
[
"Apache-2.0"
] | null | null | null |
senta/data/field_reader/generate_label_field_reader.py
|
zgzwelldone/Senta
|
e01986dd17217bed82023c81d06588d63e0e19c7
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*
"""
:py:class:`GenerateLabelFieldReader`
"""
import numpy as np
from senta.common.register import RegisterSet
from senta.common.rule import DataShape, FieldLength, InstanceName
from senta.data.field_reader.base_field_reader import BaseFieldReader
from senta.data.util_helper import generate_pad_batch_data
from senta.modules.token_embedding.custom_fluid_embedding import CustomFluidTokenEmbedding
@RegisterSet.field_reader.register
class GenerateLabelFieldReader(BaseFieldReader):
"""seq2seq label的专用field_reader
"""
def __init__(self, field_config):
"""
:param field_config:
"""
BaseFieldReader.__init__(self, field_config=field_config)
self.paddle_version_code = 1.6
if self.field_config.tokenizer_info:
tokenizer_class = RegisterSet.tokenizer.__getitem__(self.field_config.tokenizer_info["type"])
params = None
if self.field_config.tokenizer_info.__contains__("params"):
params = self.field_config.tokenizer_info["params"]
self.tokenizer = tokenizer_class(vocab_file=self.field_config.vocab_path,
split_char=self.field_config.tokenizer_info["split_char"],
unk_token=self.field_config.tokenizer_info["unk_token"],
params=params)
if self.field_config.embedding_info and self.field_config.embedding_info["use_reader_emb"]:
self.token_embedding = CustomFluidTokenEmbedding(emb_dim=self.field_config.embedding_info["emb_dim"],
vocab_size=self.tokenizer.vocabulary.get_vocab_size())
def init_reader(self):
""" 初始化reader格式
:return: reader的shape[]、type[]、level[]
"""
shape = []
types = []
levels = []
"""train_tar_ids"""
if self.field_config.data_type == DataShape.STRING:
"""src_ids"""
shape.append([-1, self.field_config.max_seq_len])
levels.append(0)
types.append('int64')
else:
raise TypeError("GenerateLabelFieldReader's data_type must be string")
"""mask_ids"""
shape.append([-1, self.field_config.max_seq_len])
levels.append(0)
types.append('float32')
"""seq_lens"""
shape.append([-1])
levels.append(0)
types.append('int64')
"""infer_tar_ids"""
shape.append([-1, self.field_config.max_seq_len, 1])
levels.append(0)
types.append('int64')
"""mask_ids"""
shape.append([-1, self.field_config.max_seq_len])
levels.append(0)
types.append('float32')
"""seq_lens"""
shape.append([-1])
levels.append(0)
types.append('int64')
return shape, types, levels
def convert_texts_to_ids(self, batch_text):
"""将一个batch的明文text转成id
:param batch_text:
:return:
"""
train_src_ids = []
infer_src_ids = []
for text in batch_text:
if self.field_config.need_convert:
tokens = self.tokenizer.tokenize(text)
src_id = self.tokenizer.convert_tokens_to_ids(tokens)
else:
src_id = text.split(" ")
# 加上截断策略
if len(src_id) > self.field_config.max_seq_len - 1:
src_id = src_id[0:self.field_config.max_seq_len - 1]
train_src_id = [self.field_config.label_start_id] + src_id
infer_src_id = src_id + [self.field_config.label_end_id]
train_src_ids.append(train_src_id)
infer_src_ids.append(infer_src_id)
return_list = []
train_label_ids, train_label_mask, label_lens = generate_pad_batch_data(train_src_ids,
pad_idx=self.field_config.padding_id,
return_input_mask=True,
return_seq_lens=True,
paddle_version_code=self.paddle_version_code)
infer_label_ids, infer_label_mask, label_lens = generate_pad_batch_data(infer_src_ids,
pad_idx=self.field_config.padding_id,
return_input_mask=True,
return_seq_lens=True,
paddle_version_code=self.paddle_version_code)
infer_label_ids = np.reshape(infer_label_ids, (infer_label_ids.shape[0], infer_label_ids.shape[1], 1))
return_list.append(train_label_ids)
return_list.append(train_label_mask)
return_list.append(label_lens)
return_list.append(infer_label_ids)
return_list.append(infer_label_mask)
return_list.append(label_lens)
return return_list
def structure_fields_dict(self, slots_id, start_index, need_emb=True):
"""静态图调用的方法,生成一个dict, dict有两个key:id , emb. id对应的是pyreader读出来的各个field产出的id,emb对应的是各个
field对应的embedding
:param slots_id: pyreader输出的完整的id序列
:param start_index:当前需要处理的field在slot_id_list中的起始位置
:param need_emb:是否需要embedding(预测过程中是不需要embedding的)
:return:
"""
record_id_dict = {}
record_id_dict[InstanceName.TRAIN_LABEL_SRC_IDS] = slots_id[start_index]
record_id_dict[InstanceName.TRAIN_LABEL_MASK_IDS] = slots_id[start_index + 1]
record_id_dict[InstanceName.TRAIN_LABEL_SEQ_LENS] = slots_id[start_index + 2]
record_id_dict[InstanceName.INFER_LABEL_SRC_IDS] = slots_id[start_index + 3]
record_id_dict[InstanceName.INFER_LABEL_MASK_IDS] = slots_id[start_index + 4]
record_id_dict[InstanceName.INFER_LABEL_SEQ_LENS] = slots_id[start_index + 5]
record_emb_dict = None
if need_emb and self.token_embedding:
record_emb_dict = self.token_embedding.get_token_embedding(record_id_dict)
record_dict = {}
record_dict[InstanceName.RECORD_ID] = record_id_dict
record_dict[InstanceName.RECORD_EMB] = record_emb_dict
return record_dict
def get_field_length(self):
"""获取当前这个field在进行了序列化之后,在slot_id_list中占多少长度
:return:
"""
return FieldLength.GENERATE_LABEL_FIELD
| 42.455696
| 125
| 0.595707
| 6,500
| 0.933908
| 0
| 0
| 6,535
| 0.938937
| 0
| 0
| 1,213
| 0.174282
|
3d1af19d66ed56a399f8f9e67b61d733395f81e4
| 1,270
|
py
|
Python
|
algorithm/python/alphabet_board_path.py
|
cocoa-maemae/leetcode
|
b7724b4d10387797167b18ec36d77e7418a6d85a
|
[
"MIT"
] | 1
|
2021-09-29T11:22:02.000Z
|
2021-09-29T11:22:02.000Z
|
algorithm/python/alphabet_board_path.py
|
cocoa-maemae/leetcode
|
b7724b4d10387797167b18ec36d77e7418a6d85a
|
[
"MIT"
] | null | null | null |
algorithm/python/alphabet_board_path.py
|
cocoa-maemae/leetcode
|
b7724b4d10387797167b18ec36d77e7418a6d85a
|
[
"MIT"
] | null | null | null |
class Solution:
def alphabetBoardPath(self, target):
"""
Time Complexity: O(N)
Space Complexity: O(N)
"""
m = {c: [i // 5, i % 5] for i, c in enumerate("abcdefghijklmnopqrstuvwxyz")}
x0, y0 = 0, 0
res = []
for c in target:
x, y = m[c]
if y < y0:
res.append("L" * (y0 - y))
if x < x0:
res.append("U" * (x0 - x))
if x > x0:
res.append("D" * (x - x0))
if y > y0:
res.append("R" * (y - y0))
res.append("!")
x0, y0 = x, y
return "".join(res)
def stringToString(input):
import json
return json.loads(input)
def main():
import sys
import io
def readlines():
for line in io.TextIOWrapper(sys.stdin.buffer, encoding="utf-8"):
yield line.strip("\n")
lines = readlines()
while True:
try:
line = next(lines)
#target = stringToString(line)
target = line
ret = Solution().alphabetBoardPath(target)
out = ret
print(out)
break
except StopIteration:
break
if __name__ == "__main__":
main()
| 23.090909
| 84
| 0.451969
| 660
| 0.519685
| 491
| 0.386614
| 0
| 0
| 0
| 0
| 172
| 0.135433
|
3d1b05e27dcbcf9ee33da727db0c1fba95fb1a61
| 20,881
|
py
|
Python
|
src/virtual-wan/azext_vwan/custom.py
|
michimune/azure-cli-extensions
|
697e2c674e5c0825d44c72d714542fe01331e107
|
[
"MIT"
] | 1
|
2022-03-22T15:02:32.000Z
|
2022-03-22T15:02:32.000Z
|
src/virtual-wan/azext_vwan/custom.py
|
michimune/azure-cli-extensions
|
697e2c674e5c0825d44c72d714542fe01331e107
|
[
"MIT"
] | 1
|
2021-02-10T22:04:59.000Z
|
2021-02-10T22:04:59.000Z
|
src/virtual-wan/azext_vwan/custom.py
|
michimune/azure-cli-extensions
|
697e2c674e5c0825d44c72d714542fe01331e107
|
[
"MIT"
] | 1
|
2021-06-03T19:31:10.000Z
|
2021-06-03T19:31:10.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.util import CLIError
from knack.log import get_logger
from azure.cli.core.util import sdk_no_wait
from ._client_factory import network_client_factory, network_client_route_table_factory
from ._util import _get_property
logger = get_logger(__name__)
class UpdateContext(object):
def __init__(self, instance):
self.instance = instance
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def update_param(self, prop, value, allow_clear):
if value == '' and allow_clear:
setattr(self.instance, prop, None)
elif value is not None:
setattr(self.instance, prop, value)
def _generic_list(cli_ctx, operation_name, resource_group_name):
ncf = network_client_factory(cli_ctx)
operation_group = getattr(ncf, operation_name)
if resource_group_name:
return operation_group.list_by_resource_group(resource_group_name)
return operation_group.list()
def _get_property(items, name):
result = next((x for x in items if x.name.lower() == name.lower()), None)
if not result:
raise CLIError("Property '{}' does not exist".format(name))
return result
def _upsert(parent, collection_name, obj_to_add, key_name, warn=True):
if not getattr(parent, collection_name, None):
setattr(parent, collection_name, [])
collection = getattr(parent, collection_name, None)
value = getattr(obj_to_add, key_name)
if value is None:
raise CLIError(
"Unable to resolve a value for key '{}' with which to match.".format(key_name))
match = next((x for x in collection if getattr(x, key_name, None) == value), None)
if match:
if warn:
logger.warning("Item '%s' already exists. Replacing with new values.", value)
collection.remove(match)
collection.append(obj_to_add)
def _find_item_at_path(instance, path):
# path accepts the pattern property/name/property/name
curr_item = instance
path_comps = path.split('.')
for i, comp in enumerate(path_comps):
if i % 2:
# name
curr_item = next((x for x in curr_item if x.name == comp), None)
else:
# property
curr_item = getattr(curr_item, comp, None)
if not curr_item:
raise CLIError("not found: '{}' not found at path '{}'".format(comp, '.'.join(path_comps[:i])))
return curr_item
# region VirtualWAN
def create_virtual_wan(cmd, resource_group_name, virtual_wan_name, tags=None, location=None,
security_provider_name=None, branch_to_branch_traffic=None,
vnet_to_vnet_traffic=None, office365_category=None, disable_vpn_encryption=None,
vwan_type=None):
client = network_client_factory(cmd.cli_ctx).virtual_wans
VirtualWAN = cmd.get_models('VirtualWAN')
wan = VirtualWAN(
tags=tags,
location=location,
disable_vpn_encryption=disable_vpn_encryption,
security_provider_name=security_provider_name,
allow_branch_to_branch_traffic=branch_to_branch_traffic,
allow_vnet_to_vnet_traffic=vnet_to_vnet_traffic,
office365_local_breakout_category=office365_category,
type=vwan_type
)
return client.create_or_update(resource_group_name, virtual_wan_name, wan)
def update_virtual_wan(instance, tags=None, security_provider_name=None, branch_to_branch_traffic=None,
vnet_to_vnet_traffic=None, office365_category=None, disable_vpn_encryption=None,
vwan_type=None):
with UpdateContext(instance) as c:
c.update_param('tags', tags, True)
c.update_param('security_provider_name', security_provider_name, False)
c.update_param('allow_branch_to_branch_traffic', branch_to_branch_traffic, False)
c.update_param('allow_vnet_to_vnet_traffic', vnet_to_vnet_traffic, False)
c.update_param('office365_local_breakout_category', office365_category, False)
c.update_param('disable_vpn_encryption', disable_vpn_encryption, False)
c.update_param('type', vwan_type, False)
return instance
def list_virtual_wans(cmd, resource_group_name=None):
return _generic_list(cmd.cli_ctx, 'virtual_wans', resource_group_name)
# endregion
# region VirtualHubs
def create_virtual_hub(cmd, resource_group_name, virtual_hub_name, address_prefix, virtual_wan,
location=None, tags=None, no_wait=False, sku=None):
client = network_client_factory(cmd.cli_ctx).virtual_hubs
VirtualHub, SubResource = cmd.get_models('VirtualHub', 'SubResource')
hub = VirtualHub(
tags=tags,
location=location,
address_prefix=address_prefix,
virtual_wan=SubResource(id=virtual_wan),
sku=sku
)
return sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, virtual_hub_name, hub)
def update_virtual_hub(instance, cmd, address_prefix=None, virtual_wan=None, tags=None, sku=None):
SubResource = cmd.get_models('SubResource')
with UpdateContext(instance) as c:
c.update_param('tags', tags, True)
c.update_param('address_prefix', address_prefix, False)
c.update_param('virtual_wan', SubResource(id=virtual_wan) if virtual_wan else None, False)
c.update_param('sku', sku, False)
return instance
def list_virtual_hubs(cmd, resource_group_name=None):
return _generic_list(cmd.cli_ctx, 'virtual_hubs', resource_group_name)
def create_hub_vnet_connection(cmd, resource_group_name, virtual_hub_name, connection_name, remote_virtual_network,
allow_hub_to_remote_vnet_transit=None, allow_remote_vnet_to_use_hub_vnet_gateways=None,
enable_internet_security=None, no_wait=False):
HubVirtualNetworkConnection, SubResource = cmd.get_models(
'HubVirtualNetworkConnection', 'SubResource')
client = network_client_factory(cmd.cli_ctx).virtual_hubs
hub = client.get(resource_group_name, virtual_hub_name)
connection = HubVirtualNetworkConnection(
name=connection_name,
remote_virtual_network=SubResource(id=remote_virtual_network),
allow_hub_to_remote_vnet_transit=allow_hub_to_remote_vnet_transit,
allow_remote_vnet_to_use_hub_vnet_gateway=allow_remote_vnet_to_use_hub_vnet_gateways,
enable_internet_security=enable_internet_security
)
_upsert(hub, 'virtual_network_connections', connection, 'name', warn=True)
poller = sdk_no_wait(no_wait, client.create_or_update, resource_group_name, virtual_hub_name, hub)
return _get_property(poller.result().virtual_network_connections, connection_name)
# pylint: disable=inconsistent-return-statements
def add_hub_route(cmd, resource_group_name, virtual_hub_name, address_prefixes, next_hop_ip_address, no_wait=False):
VirtualHubRoute = cmd.get_models('VirtualHubRoute')
client = network_client_factory(cmd.cli_ctx).virtual_hubs
hub = client.get(resource_group_name, virtual_hub_name)
route = VirtualHubRoute(address_prefixes=address_prefixes, next_hop_ip_address=next_hop_ip_address)
hub.route_table.routes.append(route)
poller = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, virtual_hub_name, hub)
try:
return poller.result().route_table.routes
except AttributeError:
return
def list_hub_routes(cmd, resource_group_name, virtual_hub_name):
client = network_client_factory(cmd.cli_ctx).virtual_hubs
hub = client.get(resource_group_name, virtual_hub_name)
return hub.route_table.routes
# pylint: disable=inconsistent-return-statements
def remove_hub_route(cmd, resource_group_name, virtual_hub_name, index, no_wait=False):
client = network_client_factory(cmd.cli_ctx).virtual_hubs
hub = client.get(resource_group_name, virtual_hub_name)
try:
hub.route_table.routes.pop(index - 1)
except IndexError:
raise CLIError('invalid index: {}. Index can range from 1 to {}'.format(index, len(hub.route_table.routes)))
poller = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, virtual_hub_name, hub)
try:
return poller.result().route_table.routes
except AttributeError:
return
# pylint: disable=inconsistent-return-statements
def create_vhub_route_table(cmd, resource_group_name, virtual_hub_name, route_table_name,
attached_connections, destination_type, destinations,
next_hop_type, next_hops,
tags=None, no_wait=False, location=None):
VirtualHubRouteTableV2, VirtualHubRouteV2 = cmd.get_models('VirtualHubRouteTableV2', 'VirtualHubRouteV2')
client = network_client_route_table_factory(cmd.cli_ctx).virtual_hub_route_table_v2s
route = VirtualHubRouteV2(destination_type=destination_type,
destinations=destinations,
next_hop_type=next_hop_type,
next_hops=next_hops)
route_table = VirtualHubRouteTableV2(location=location,
tags=tags,
attached_connections=attached_connections,
routes=[route])
poller = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, virtual_hub_name, route_table_name, route_table)
try:
return poller.result()
except AttributeError:
return
def update_vhub_route_table(instance, attached_connections=None, tags=None):
with UpdateContext(instance) as c:
c.update_param('tags', tags, True)
c.update_param('attached_connections', attached_connections, False)
return instance
# pylint: disable=inconsistent-return-statements
def add_hub_routetable_route(cmd, resource_group_name, virtual_hub_name, route_table_name,
destination_type, destinations,
next_hop_type, next_hops, no_wait=False):
VirtualHubRouteV2 = cmd.get_models('VirtualHubRouteV2')
client = network_client_route_table_factory(cmd.cli_ctx).virtual_hub_route_table_v2s
route_table = client.get(resource_group_name, virtual_hub_name, route_table_name)
route = VirtualHubRouteV2(destination_type=destination_type,
destinations=destinations,
next_hop_type=next_hop_type,
next_hops=next_hops)
route_table.routes.append(route)
poller = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, virtual_hub_name, route_table_name, route_table)
try:
return poller.result().routes
except AttributeError:
return
def list_hub_routetable_route(cmd, resource_group_name, virtual_hub_name, route_table_name):
client = network_client_route_table_factory(cmd.cli_ctx).virtual_hub_route_table_v2s
route_table = client.get(resource_group_name, virtual_hub_name, route_table_name)
return route_table.routes
# pylint: disable=inconsistent-return-statements
def remove_hub_routetable_route(cmd, resource_group_name, virtual_hub_name, route_table_name, index, no_wait=False):
client = network_client_route_table_factory(cmd.cli_ctx).virtual_hub_route_table_v2s
route_table = client.get(resource_group_name, virtual_hub_name, route_table_name)
try:
route_table.routes.pop(index - 1)
except IndexError:
raise CLIError('invalid index: {}. Index can range from 1 to {}'.format(index, len(route_table.routes)))
poller = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, virtual_hub_name, route_table_name, route_table)
try:
return poller.result().routes
except AttributeError:
return
# endregion
# region VpnGateways
def create_vpn_gateway(cmd, resource_group_name, gateway_name, virtual_hub,
location=None, tags=None, scale_unit=None,
asn=None, bgp_peering_address=None, peer_weight=None, no_wait=False):
client = network_client_factory(cmd.cli_ctx).vpn_gateways
VpnGateway, SubResource = cmd.get_models('VpnGateway', 'SubResource')
gateway = VpnGateway(
location=location,
tags=tags,
virtual_hub=SubResource(id=virtual_hub) if virtual_hub else None,
vpn_gateway_scale_unit=scale_unit,
bgp_settings={
'asn': asn,
'bgpPeeringAddress': bgp_peering_address,
'peerWeight': peer_weight
}
)
return sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, gateway_name, gateway)
def update_vpn_gateway(instance, cmd, virtual_hub=None, tags=None, scale_unit=None,
asn=None, bgp_peering_address=None, peer_weight=None):
SubResource = cmd.get_models('SubResource')
with UpdateContext(instance) as c:
c.update_param('virtual_hub', SubResource(id=virtual_hub) if virtual_hub else None, True)
c.update_param('tags', tags, True)
c.update_param('vpn_gateway_scale_unit', scale_unit, False)
bgp_settings = instance.bgp_settings
with UpdateContext(bgp_settings) as c:
c.update_param('asn', asn, False)
c.update_param('bgp_peering_address', bgp_peering_address, False)
c.update_param('peer_weight', peer_weight, False)
return instance
def create_vpn_gateway_connection(cmd, resource_group_name, gateway_name, connection_name,
remote_vpn_site, routing_weight=None, protocol_type=None,
connection_bandwidth=None, shared_key=None, enable_bgp=None,
enable_rate_limiting=None, enable_internet_security=None, no_wait=False):
client = network_client_factory(cmd.cli_ctx).vpn_gateways
VpnConnection, SubResource = cmd.get_models('VpnConnection', 'SubResource')
gateway = client.get(resource_group_name, gateway_name)
conn = VpnConnection(
name=connection_name,
remote_vpn_site=SubResource(id=remote_vpn_site),
routing_weight=routing_weight,
protocol_type=protocol_type,
connection_bandwidth=connection_bandwidth,
shared_key=shared_key,
enable_bgp=enable_bgp,
enable_rate_limiting=enable_rate_limiting,
enable_internet_security=enable_internet_security
)
_upsert(gateway, 'connections', conn, 'name')
return sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, gateway_name, gateway)
def list_vpn_gateways(cmd, resource_group_name=None):
return _generic_list(cmd.cli_ctx, 'vpn_gateways', resource_group_name)
# pylint: disable=inconsistent-return-statements
def add_vpn_gateway_connection_ipsec_policy(cmd, resource_group_name, gateway_name, connection_name,
sa_life_time_seconds, sa_data_size_kilobytes, ipsec_encryption,
ipsec_integrity, ike_encryption, ike_integrity, dh_group, pfs_group,
no_wait=False):
IpsecPolicy = cmd.get_models('IpsecPolicy')
client = network_client_factory(cmd.cli_ctx).vpn_gateways
gateway = client.get(resource_group_name, gateway_name)
conn = _find_item_at_path(gateway, 'connections.{}'.format(connection_name))
conn.ipsec_policies.append(
IpsecPolicy(
sa_life_time_seconds=sa_life_time_seconds,
sa_data_size_kilobytes=sa_data_size_kilobytes,
ipsec_encryption=ipsec_encryption,
ipsec_integrity=ipsec_integrity,
ike_encryption=ike_encryption,
ike_integrity=ike_integrity,
dh_group=dh_group,
pfs_group=pfs_group
)
)
_upsert(gateway, 'connections', conn, 'name', warn=False)
poller = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, gateway_name, gateway)
try:
return _get_property(poller.result().connections, connection_name)
except AttributeError:
return
def list_vpn_conn_ipsec_policies(cmd, resource_group_name, gateway_name, connection_name):
client = network_client_factory(cmd.cli_ctx).vpn_gateways
gateway = client.get(resource_group_name, gateway_name)
conn = _find_item_at_path(gateway, 'connections.{}'.format(connection_name))
return conn.ipsec_policies
# pylint: disable=inconsistent-return-statements
def remove_vpn_conn_ipsec_policy(cmd, resource_group_name, gateway_name, connection_name, index, no_wait=False):
client = network_client_factory(cmd.cli_ctx).vpn_gateways
gateway = client.get(resource_group_name, gateway_name)
conn = _find_item_at_path(gateway, 'connections.{}'.format(connection_name))
try:
conn.ipsec_policies.pop(index - 1)
except IndexError:
raise CLIError('invalid index: {}. Index can range from 1 to {}'.format(index, len(conn.ipsec_policies)))
_upsert(gateway, 'connections', conn, 'name', warn=False)
poller = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, gateway_name, gateway)
try:
return _get_property(poller.result().connections, connection_name)
except AttributeError:
return
# endregion
# region VpnSites
def create_vpn_site(cmd, resource_group_name, vpn_site_name, ip_address,
asn=None, bgp_peering_address=None,
virtual_wan=None, location=None, tags=None,
site_key=None, address_prefixes=None, is_security_site=None,
device_vendor=None, device_model=None, link_speed=None,
peer_weight=None, no_wait=False):
client = network_client_factory(cmd.cli_ctx).vpn_sites
VpnSite, SubResource = cmd.get_models('VpnSite', 'SubResource')
site = VpnSite(
location=location,
tags=tags,
is_security_site=is_security_site,
ip_address=ip_address,
site_key=site_key,
virtual_wan=SubResource(id=virtual_wan) if virtual_wan else None,
address_space={'addressPrefixes': address_prefixes},
device_properties={
'deviceVendor': device_vendor,
'deviceModel': device_model,
'linkSpeedInMbps': link_speed
},
bgp_properties={
'asn': asn,
'bgpPeeringAddress': bgp_peering_address,
'peerWeight': peer_weight
}
)
if not any([asn, bgp_peering_address, peer_weight]):
site.bgp_properties = None
return sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, vpn_site_name, site)
def update_vpn_site(instance, cmd, ip_address=None, virtual_wan=None, tags=None,
site_key=None, address_prefixes=None, is_security_site=None,
device_vendor=None, device_model=None, link_speed=None,
asn=None, bgp_peering_address=None, peer_weight=None):
SubResource = cmd.get_models('SubResource')
with UpdateContext(instance) as c:
c.update_param('tags', tags, True)
c.update_param('ip_address', ip_address, False)
c.update_param('virtual_wan', SubResource(id=virtual_wan) if virtual_wan else None, False)
c.update_param('is_security_site', is_security_site, False)
c.update_param('site_key', site_key, True)
device_properties = instance.device_properties
with UpdateContext(device_properties) as c:
c.update_param('device_vendor', device_vendor, True)
c.update_param('device_model', device_model, True)
c.update_param('link_speed_in_mbps', link_speed, False)
address_space = instance.address_space
with UpdateContext(address_space) as c:
c.update_param('address_prefixes', address_prefixes, False)
bgp_properties = instance.bgp_properties
with UpdateContext(bgp_properties) as c:
c.update_param('asn', asn, False)
c.update_param('bgp_peering_address', bgp_peering_address, False)
c.update_param('peer_weight', peer_weight, False)
return instance
def list_vpn_sites(cmd, resource_group_name=None):
return _generic_list(cmd.cli_ctx, 'vpn_sites', resource_group_name)
# endregion
| 44.239407
| 118
| 0.696135
| 429
| 0.020545
| 0
| 0
| 0
| 0
| 0
| 0
| 2,288
| 0.109573
|
3d1b0b413997d06798ff0dafc9e1b1d24a206754
| 720
|
py
|
Python
|
aiogram/contrib/middlewares/environment.py
|
muhammedfurkan/aiogram
|
692c1340b4dda556da640e5f9ea2200848c06840
|
[
"MIT"
] | null | null | null |
aiogram/contrib/middlewares/environment.py
|
muhammedfurkan/aiogram
|
692c1340b4dda556da640e5f9ea2200848c06840
|
[
"MIT"
] | 4
|
2020-11-04T15:55:55.000Z
|
2020-11-08T21:36:02.000Z
|
aiogram/contrib/middlewares/environment.py
|
muhammedfurkan/aiogram
|
692c1340b4dda556da640e5f9ea2200848c06840
|
[
"MIT"
] | null | null | null |
import asyncio
from aiogram.dispatcher.middlewares import BaseMiddleware
class EnvironmentMiddleware(BaseMiddleware):
def __init__(self, context=None):
super(EnvironmentMiddleware, self).__init__()
if context is None:
context = {}
self.context = context
def update_data(self, data):
dp = self.manager.dispatcher
data.update(bot=dp.bot, dispatcher=dp,
loop=dp.loop or asyncio.get_event_loop())
if self.context:
data.update(self.context)
async def trigger(self, action, args):
if "error" not in action and action.startswith("pre_process_"):
self.update_data(args[-1])
return True
| 28.8
| 71
| 0.6375
| 643
| 0.893056
| 0
| 0
| 0
| 0
| 173
| 0.240278
| 21
| 0.029167
|
3d1b7856aab4b6896a8bd50f1e84b7518ab5535b
| 21
|
py
|
Python
|
custom_components/ztm/__init__.py
|
peetereczek/ztm
|
1fd4870720dca16863d085759a360f1ebdd9ab1f
|
[
"MIT"
] | 4
|
2020-02-23T08:08:12.000Z
|
2021-06-26T15:46:27.000Z
|
custom_components/ztm/__init__.py
|
peetereczek/ztm
|
1fd4870720dca16863d085759a360f1ebdd9ab1f
|
[
"MIT"
] | 15
|
2020-01-30T09:54:58.000Z
|
2022-02-02T11:13:32.000Z
|
custom_components/ztm/__init__.py
|
peetereczek/ztm
|
1fd4870720dca16863d085759a360f1ebdd9ab1f
|
[
"MIT"
] | 1
|
2022-01-17T08:51:34.000Z
|
2022-01-17T08:51:34.000Z
|
"""
module init
"""
| 7
| 12
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 21
| 1
|
3d1bc451ecce134829f141f42c2d16c8641046f1
| 899
|
py
|
Python
|
Aula37/Controller/squad_controller.py
|
PabloSchumacher/TrabalhosPython
|
828edd35eb40442629211bc9f1477f75fb025d74
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
Aula37/Controller/squad_controller.py
|
PabloSchumacher/TrabalhosPython
|
828edd35eb40442629211bc9f1477f75fb025d74
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
Aula37/Controller/squad_controller.py
|
PabloSchumacher/TrabalhosPython
|
828edd35eb40442629211bc9f1477f75fb025d74
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
from Dao.squad_dao import SquadDao
from Model.squad import *
from Controller.backend_controller import BackendController
from Controller.frontend_controller import FrontendController
from Controller.sgbd_controller import SgbdController
class SquadController:
dao = SquadDao()
be = BackendController()
fro = FrontendController()
bd = SgbdController()
def listar_todos(self):
return self.dao.listar_todos()
def buscar_por_id(self, id):
return self.dao.buscar_por_id(id)
def salvar(self, squad:Squad):
squad.backend.idbackend = self.be.salvar(squad.backend)
squad.frontend.idfrontend = self.fro.salvar(squad.frontend)
squad.sgbd.idsgbd = self.bd.salvar(squad.sgbd)
return self.dao.salvar(squad)
def alterar(self, squad:Squad):
self.dao.alterar(squad)
def deletar(self, id):
self.dao.deletar(id)
| 29.966667
| 67
| 0.716352
| 660
| 0.734149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
3d1cffcdcc57d52b339c43b36508e37229c2109b
| 1,065
|
py
|
Python
|
airbyte-integrations/connectors/source-square/source_square/utils.py
|
OTRI-Unipd/OTRI-airbyte
|
50eeeb773f75246e86c6e167b0cd7d2dda6efe0d
|
[
"MIT"
] | 6,215
|
2020-09-21T13:45:56.000Z
|
2022-03-31T21:21:45.000Z
|
airbyte-integrations/connectors/source-square/source_square/utils.py
|
OTRI-Unipd/OTRI-airbyte
|
50eeeb773f75246e86c6e167b0cd7d2dda6efe0d
|
[
"MIT"
] | 8,448
|
2020-09-21T00:43:50.000Z
|
2022-03-31T23:56:06.000Z
|
airbyte-integrations/connectors/source-square/source_square/utils.py
|
OTRI-Unipd/OTRI-airbyte
|
50eeeb773f75246e86c6e167b0cd7d2dda6efe0d
|
[
"MIT"
] | 1,251
|
2020-09-20T05:48:47.000Z
|
2022-03-31T10:41:29.000Z
|
#
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
from typing import Union
def separate_by_count(total_length: int, part_count: int) -> (int, int):
"""
Calculates parts needed to separate count by part_count value
For example: separate_by_count(total_length=196582, part_count=10000) returns (19, 6582) -> 19*10000 + 6582=196582
:param total_length:
:param part_count:
:return: Returns the total_parts and last part count
"""
total_parts = total_length // part_count
last_part = total_length - (part_count * total_parts)
return total_parts, last_part
def separate_items_by_count(item_list: Union[list, tuple], part_count: int) -> list:
if not item_list:
return []
total_parts, _ = separate_by_count(len(item_list), part_count)
result_list = []
for i in range(total_parts):
result_list.append(item_list[part_count * i : part_count * (i + 1)])
if len(item_list) % part_count != 0:
result_list.append(item_list[total_parts * part_count :])
return result_list
| 29.583333
| 118
| 0.697653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 360
| 0.338028
|
3d1f0309843d138ae38f266195c03b7a4fc0d461
| 678
|
py
|
Python
|
packages/awrams/utils/templates.py
|
kaamilah/awra_cms
|
bbbb85ad8864e2c835926439acc1e6dabb137a97
|
[
"NetCDF"
] | 20
|
2016-12-01T03:13:50.000Z
|
2021-12-02T23:43:38.000Z
|
packages/awrams/utils/templates.py
|
kaamilah/awra_cms
|
bbbb85ad8864e2c835926439acc1e6dabb137a97
|
[
"NetCDF"
] | 2
|
2018-02-05T03:42:11.000Z
|
2018-04-27T05:49:44.000Z
|
packages/awrams/utils/templates.py
|
kaamilah/awra_cms
|
bbbb85ad8864e2c835926439acc1e6dabb137a97
|
[
"NetCDF"
] | 22
|
2016-12-13T19:57:43.000Z
|
2021-12-08T02:52:19.000Z
|
def transform_file(infile,outfile,templates):
with open(infile,'r') as fh:
indata = fh.read()
lines = indata.split('\n')
outlines = []
for line in lines:
if '//ATL_BEGIN' in line:
start = line.find('//ATL_BEGIN')
spacing = line[:start]
start = line.find('<') + 1
end = line.find('>')
tkey = line[start:end]
ttxt = templates[tkey]
for tl in ttxt:
outlines.append(spacing + tl)
else:
outlines.append(line)
with open(outfile,'w') as fh:
for line in outlines:
fh.write(line+'\n')
| 28.25
| 46
| 0.482301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 46
| 0.067847
|
3d1ff7cb54534895504bf777de713d7de280d59d
| 19,218
|
py
|
Python
|
hypermapper/plot_pareto.py
|
adelejjeh/hypermapper
|
02bd83b5b1d3feb9907cf1187864ded66ba2c539
|
[
"MIT"
] | null | null | null |
hypermapper/plot_pareto.py
|
adelejjeh/hypermapper
|
02bd83b5b1d3feb9907cf1187864ded66ba2c539
|
[
"MIT"
] | null | null | null |
hypermapper/plot_pareto.py
|
adelejjeh/hypermapper
|
02bd83b5b1d3feb9907cf1187864ded66ba2c539
|
[
"MIT"
] | null | null | null |
"""
Plots design space exploration results.
"""
import json
from collections import OrderedDict, defaultdict
import matplotlib
from jsonschema import Draft4Validator
from pkg_resources import resource_stream
matplotlib.use("agg") # noqa
import matplotlib.pyplot as plt
from matplotlib.legend_handler import HandlerLine2D
import os
import sys
import warnings
# ensure backward compatibility
try:
from hypermapper import space
from hypermapper.utility_functions import (
deal_with_relative_and_absolute_path,
get_next_color,
get_last_dir_and_file_names,
Logger,
extend_with_default,
)
except ImportError:
if os.getenv("HYPERMAPPER_HOME"): # noqa
warnings.warn(
"Found environment variable 'HYPERMAPPER_HOME', used to update the system path. Support might be discontinued in the future. Please make sure your installation is working without this environment variable, e.g., by installing with 'pip install hypermapper'.",
DeprecationWarning,
2,
) # noqa
sys.path.append(os.environ["HYPERMAPPER_HOME"]) # noqa
ppath = os.getenv("PYTHONPATH")
if ppath:
path_items = ppath.split(":")
scripts_path = ["hypermapper/scripts", "hypermapper_dev/scripts"]
if os.getenv("HYPERMAPPER_HOME"):
scripts_path.append(os.path.join(os.getenv("HYPERMAPPER_HOME"), "scripts"))
truncated_items = [
p for p in sys.path if len([q for q in scripts_path if q in p]) == 0
]
if len(truncated_items) < len(sys.path):
warnings.warn(
"Found hypermapper in PYTHONPATH. Usage is deprecated and might break things. "
"Please remove all hypermapper references from PYTHONPATH. Trying to import"
"without hypermapper in PYTHONPATH..."
)
sys.path = truncated_items
sys.path.append(".") # noqa
sys.path = list(OrderedDict.fromkeys(sys.path))
from hypermapper import space
from hypermapper.utility_functions import (
deal_with_relative_and_absolute_path,
get_next_color,
get_last_dir_and_file_names,
Logger,
extend_with_default,
)
debug = False
def plot(parameters_file, list_of_pairs_of_files=[], image_output_file=None):
"""
Plot the results of the previously run design space exploration.
"""
try:
hypermapper_pwd = os.environ["PWD"]
hypermapper_home = os.environ["HYPERMAPPER_HOME"]
os.chdir(hypermapper_home)
except:
hypermapper_home = "."
hypermapper_pwd = "."
show_samples = False
filename, file_extension = os.path.splitext(parameters_file)
if file_extension != ".json":
print(
"Error: invalid file name. \nThe input file has to be a .json file not a %s"
% file_extension
)
exit(1)
with open(parameters_file, "r") as f:
config = json.load(f)
schema = json.load(resource_stream("hypermapper", "schema.json"))
DefaultValidatingDraft4Validator = extend_with_default(Draft4Validator)
DefaultValidatingDraft4Validator(schema).validate(config)
application_name = config["application_name"]
optimization_metrics = config["optimization_objectives"]
feasible_output = config["feasible_output"]
feasible_output_name = feasible_output["name"]
run_directory = config["run_directory"]
if run_directory == ".":
run_directory = hypermapper_pwd
config["run_directory"] = run_directory
xlog = config["output_image"]["image_xlog"]
ylog = config["output_image"]["image_ylog"]
if "optimization_objectives_labels_image_pdf" in config["output_image"]:
optimization_objectives_labels_image_pdf = config["output_image"][
"optimization_objectives_labels_image_pdf"
]
else:
optimization_objectives_labels_image_pdf = optimization_metrics
# Only consider the files in the json file if there are no input files.
if list_of_pairs_of_files == []:
output_pareto_file = config["output_pareto_file"]
if output_pareto_file == "output_pareto.csv":
output_pareto_file = application_name + "_" + output_pareto_file
output_data_file = config["output_data_file"]
if output_data_file == "output_samples.csv":
output_data_file = application_name + "_" + output_data_file
list_of_pairs_of_files.append(
(
deal_with_relative_and_absolute_path(run_directory, output_pareto_file),
deal_with_relative_and_absolute_path(run_directory, output_data_file),
)
)
else:
for idx, (output_pareto_file, output_data_file) in enumerate(
list_of_pairs_of_files
):
list_of_pairs_of_files[idx] = (
deal_with_relative_and_absolute_path(run_directory, output_pareto_file),
deal_with_relative_and_absolute_path(run_directory, output_data_file),
)
if image_output_file != None:
output_image_pdf_file = image_output_file
output_image_pdf_file = deal_with_relative_and_absolute_path(
run_directory, output_image_pdf_file
)
filename = os.path.basename(output_image_pdf_file)
path = os.path.dirname(output_image_pdf_file)
if path == "":
output_image_pdf_file_with_all_samples = "all_" + filename
else:
output_image_pdf_file_with_all_samples = path + "/" + "all_" + filename
else:
tmp_file_name = config["output_image"]["output_image_pdf_file"]
if tmp_file_name == "output_pareto.pdf":
tmp_file_name = application_name + "_" + tmp_file_name
output_image_pdf_file = deal_with_relative_and_absolute_path(
run_directory, tmp_file_name
)
filename = os.path.basename(output_image_pdf_file)
path = os.path.dirname(output_image_pdf_file)
if path == "":
output_image_pdf_file_with_all_samples = "all_" + filename
else:
output_image_pdf_file_with_all_samples = path + "/" + "all_" + filename
str_files = ""
for e in list_of_pairs_of_files:
str_files += str(e[0] + " " + e[1] + " ")
print("######### plot_pareto.py ##########################")
print("### Parameters file is %s" % parameters_file)
print("### The Pareto and DSE data files are: %s" % str_files)
print("### The first output pdf image is %s" % output_image_pdf_file)
print(
"### The second output pdf image is %s" % output_image_pdf_file_with_all_samples
)
print("################################################")
param_space = space.Space(config)
xelem = optimization_metrics[0]
yelem = optimization_metrics[1]
handler_map_for_legend = {}
xlabel = optimization_objectives_labels_image_pdf[0]
ylabel = optimization_objectives_labels_image_pdf[1]
x_max = float("-inf")
x_min = float("inf")
y_max = float("-inf")
y_min = float("inf")
print_legend = True
fig = plt.figure()
ax1 = plt.subplot(1, 1, 1)
if xlog:
ax1.set_xscale("log")
if ylog:
ax1.set_yscale("log")
objective_1_max = objective_2_max = 1
objective_1_is_percentage = objective_2_is_percentage = False
if "objective_1_max" in config["output_image"]:
objective_1_max = config["output_image"]["objective_1_max"]
objective_1_is_percentage = True
if "objective_2_max" in config["output_image"]:
objective_2_max = config["output_image"]["objective_2_max"]
objective_2_is_percentage = True
input_data_array = {}
fast_addressing_of_data_array = {}
non_valid_optimization_obj_1 = defaultdict(list)
non_valid_optimization_obj_2 = defaultdict(list)
for (
file_pair
) in (
list_of_pairs_of_files
): # file_pair is tuple containing: (pareto file, DSE file)
next_color = get_next_color()
#############################################################################
###### Load data from files and do preprocessing on the data before plotting.
#############################################################################
for file in file_pair:
print(("Loading data from %s ..." % file))
(
input_data_array[file],
fast_addressing_of_data_array[file],
) = param_space.load_data_file(file, debug)
if input_data_array[file] == None:
print("Error: no data found in input data file: %s. \n" % file_pair[1])
exit(1)
if (xelem not in input_data_array[file]) or (
yelem not in input_data_array[file]
):
print(
"Error: the optimization variables have not been found in input data file %s. \n"
% file
)
exit(1)
print(("Parameters are " + str(list(input_data_array[file].keys())) + "\n"))
input_data_array[file][xelem] = [
float(input_data_array[file][xelem][i]) / objective_1_max
for i in range(len(input_data_array[file][xelem]))
]
input_data_array[file][yelem] = [
float(input_data_array[file][yelem][i]) / objective_2_max
for i in range(len(input_data_array[file][yelem]))
]
if objective_1_is_percentage:
input_data_array[file][xelem] = [
input_data_array[file][xelem][i] * 100
for i in range(len(input_data_array[file][xelem]))
]
if objective_2_is_percentage:
input_data_array[file][yelem] = [
input_data_array[file][yelem][i] * 100
for i in range(len(input_data_array[file][yelem]))
]
x_max, x_min, y_max, y_min = compute_min_max_samples(
input_data_array[file], x_max, x_min, xelem, y_max, y_min, yelem
)
input_data_array_size = len(
input_data_array[file][list(input_data_array[file].keys())[0]]
)
print("Size of the data file %s is %d" % (file, input_data_array_size))
file_pareto = file_pair[0] # This is the Pareto file
file_search = file_pair[1] # This is the DSE file
######################################################################################################
###### Compute invalid samples to be plot in a different color (and remove them from the data arrays).
######################################################################################################
if show_samples:
i = 0
for ind in range(len(input_data_array[file][yelem])):
if input_data_array[file][feasible_output_name][i] == False:
non_valid_optimization_obj_2[file_search].append(
input_data_array[file][yelem][i]
)
non_valid_optimization_obj_1[file_search].append(
input_data_array[file][xelem][i]
)
for key in list(input_data_array[file].keys()):
del input_data_array[file][key][i]
else:
i += 1
label_is = get_last_dir_and_file_names(file_pareto)
(all_samples,) = plt.plot(
input_data_array[file_search][xelem],
input_data_array[file_search][yelem],
color=next_color,
linestyle="None",
marker=".",
mew=0.5,
markersize=3,
fillstyle="none",
label=label_is,
)
plt.plot(
input_data_array[file_pareto][xelem],
input_data_array[file_pareto][yelem],
linestyle="None",
marker=".",
mew=0.5,
markersize=3,
fillstyle="none",
)
handler_map_for_legend[all_samples] = HandlerLine2D(numpoints=1)
################################################################################################################
##### Create a straight Pareto plot: we need to add one point for each point of the data in paretoX and paretoY.
##### We also need to reorder the points on the x axis first.
################################################################################################################
straight_pareto_x = list()
straight_pareto_y = list()
if len(input_data_array[file_pareto][xelem]) != 0:
data_array_pareto_x, data_array_pareto_y = (
list(t)
for t in zip(
*sorted(
zip(
input_data_array[file_pareto][xelem],
input_data_array[file_pareto][yelem],
)
)
)
)
for j in range(len(data_array_pareto_x)):
straight_pareto_x.append(data_array_pareto_x[j])
straight_pareto_x.append(data_array_pareto_x[j])
straight_pareto_y.append(data_array_pareto_y[j])
straight_pareto_y.append(data_array_pareto_y[j])
straight_pareto_x.append(x_max) # Just insert the max on the x axis
straight_pareto_y.insert(0, y_max) # Just insert the max on the y axis
label_is = "Pareto - " + get_last_dir_and_file_names(file_pareto)
(pareto_front,) = plt.plot(
straight_pareto_x,
straight_pareto_y,
label=label_is,
linewidth=1,
color=next_color,
)
handler_map_for_legend[pareto_front] = HandlerLine2D(numpoints=1)
label_is = "Invalid Samples - " + get_last_dir_and_file_names(file_search)
if show_samples:
(non_valid,) = plt.plot(
non_valid_optimization_obj_1[file_search],
non_valid_optimization_obj_2[file_search],
linestyle="None",
marker=".",
mew=0.5,
markersize=3,
fillstyle="none",
label=label_is,
)
handler_map_for_legend[non_valid] = HandlerLine2D(numpoints=1)
plt.ylabel(ylabel, fontsize=16)
plt.xlabel(xlabel, fontsize=16)
for tick in ax1.xaxis.get_major_ticks():
tick.label.set_fontsize(
14
) # Set the fontsize of the label on the ticks of the x axis
for tick in ax1.yaxis.get_major_ticks():
tick.label.set_fontsize(
14
) # Set the fontsize of the label on the ticks of the y axis
# Add the legend with some customizations
if print_legend:
lgd = ax1.legend(
handler_map=handler_map_for_legend,
loc="best",
bbox_to_anchor=(1, 1),
fancybox=True,
shadow=True,
ncol=1,
prop={"size": 14},
) # Display legend.
font = {"size": 16}
matplotlib.rc("font", **font)
fig.savefig(output_image_pdf_file_with_all_samples, dpi=120, bbox_inches="tight")
if objective_1_is_percentage:
plt.xlim(0, 100)
if objective_2_is_percentage:
plt.ylim(0, 100)
fig.savefig(output_image_pdf_file, dpi=120, bbox_inches="tight")
def compute_min_max_samples(input_data_array, x_max, x_min, xelem, y_max, y_min, yelem):
"""
Compute the min and max on the x and y axis.
:param input_data_array: computes the max and min on this data.
:param x_max: input and output variable.
:param x_min: input and output variable.
:param xelem: variable to select the column that refers to the objective one in the array input_data_array.
:param y_max: input and output variable.
:param y_min: input and output variable.
:param yelem: variable to select the column that refers to the objective two in the array input_data_array.
:return: min and max on both axes
"""
for elem in zip(input_data_array[xelem], input_data_array[yelem]):
x_max = max(x_max, elem[0])
y_max = max(y_max, elem[1])
x_min = min(x_min, elem[0])
y_min = min(y_min, elem[1])
if x_min == float("inf"):
print("Warning: x_min is infinity. Execution not interrupted.")
if y_min == float("inf"):
print("Warning: y_min is infinity. Execution not interrupted.")
if x_max == float("-inf"):
print("Warning: x_max is - infinity. Execution not interrupted.")
if y_max == float("-inf"):
print("Warning: y_max is - infinity. Execution not interrupted.")
return x_max, x_min, y_max, y_min
def main():
# This handles the logger. The standard setting is that HyperMapper always logs both on screen and on the log file.
# In cases like the interactive mode we only want to log on the file.
sys.stdout = Logger()
list_of_pairs_of_files = []
image_output_file = None
parameters_file = ""
if len(sys.argv) >= 2:
parameters_file = sys.argv[1]
if len(sys.argv) >= 3:
i = 2
try:
image_output_file = sys.argv[i]
filename, file_extension = os.path.splitext(
image_output_file
) # Test on the file to have a pdf extension
if file_extension != ".pdf":
print(
"Error: file extension has to be a pdf. Given: %s"
% file_extension
)
exit(1)
i += 1
except:
print("Error reading the image name file for arguments.")
exit(1)
while i < len(sys.argv):
try:
list_of_pairs_of_files.append((sys.argv[i], sys.argv[i + 1]))
except:
print(
"Error: wrong number of files. Files have to be in pairs of pareto and search."
)
exit(1)
i += 2
else:
print("Error: more arguments needed.")
if parameters_file == "--help" or len(sys.argv) < 2:
print("################################################")
print("### Example 1: ")
print("### hm-plot-dse example_scenarios/spatial/BlackScholes_scenario.json")
print("### Example 2: ")
print(
"### hm-plot-dse example_scenarios/spatial/BlackScholes_scenario.json /path/to/output/image.pdf file1_pareto file1_search file2_pareto file2_search file3_pareto file3_search"
)
print("################################################")
exit(1)
plot(parameters_file, list_of_pairs_of_files, image_output_file)
print("End of the plot_dse script!")
if __name__ == "__main__":
main()
| 39.220408
| 271
| 0.57774
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 5,148
| 0.267874
|
3d200274ebad98aa4c72c04b9e6aca07e97be031
| 1,872
|
py
|
Python
|
foodshering/authapp/forms.py
|
malfin/silvehanger
|
c71a936a0c59c5a6fb909861cf2197b72782642d
|
[
"Apache-2.0"
] | null | null | null |
foodshering/authapp/forms.py
|
malfin/silvehanger
|
c71a936a0c59c5a6fb909861cf2197b72782642d
|
[
"Apache-2.0"
] | null | null | null |
foodshering/authapp/forms.py
|
malfin/silvehanger
|
c71a936a0c59c5a6fb909861cf2197b72782642d
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm, PasswordChangeForm, UserChangeForm
from authapp.models import UserProfile, Status
class LoginForm(AuthenticationForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = f'form-control {name}'
class RegisterForm(UserCreationForm, forms.ModelForm):
status = forms.CharField(label='Кто вы?', widget=forms.Select(choices=Status.choices))
class Meta:
model = UserProfile
fields = (
'username',
'first_name',
'last_name',
'status',
'email',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = f'form-control {name}'
item.help_text = ''
class ChangeForm(UserChangeForm):
class Meta:
model = UserProfile
fields = ('username', 'first_name', 'last_name', 'email', 'address', 'phone_number')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = 'form-control'
item.help_text = ''
if name == 'password':
item.widget = forms.HiddenInput()
class ChangePassword(PasswordChangeForm):
class Meta:
model = UserProfile
fields = 'password'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = 'form-control'
def get_form(self, form_class):
return form_class(self.request.user, **self.get_form_kwargs())
| 31.2
| 110
| 0.608974
| 1,681
| 0.895578
| 0
| 0
| 0
| 0
| 0
| 0
| 249
| 0.132658
|
3d220060f34001abd4191e581365ad915971f136
| 340
|
py
|
Python
|
devices/parser/serializers.py
|
City-of-Helsinki/hel-data-pipe
|
e473237cd00a54a791337ac611e99556dc37ea35
|
[
"MIT"
] | 1
|
2021-02-25T14:21:41.000Z
|
2021-02-25T14:21:41.000Z
|
devices/parser/serializers.py
|
City-of-Helsinki/hel-data-pipe
|
e473237cd00a54a791337ac611e99556dc37ea35
|
[
"MIT"
] | 9
|
2020-11-23T11:56:56.000Z
|
2021-02-25T12:20:05.000Z
|
devices/parser/serializers.py
|
City-of-Helsinki/hel-data-pipe
|
e473237cd00a54a791337ac611e99556dc37ea35
|
[
"MIT"
] | 1
|
2021-07-25T12:16:53.000Z
|
2021-07-25T12:16:53.000Z
|
from rest_framework import serializers
from .models import Device, SensorType
class SensorTypeSerializer(serializers.ModelSerializer):
class Meta:
model = SensorType
fields = "__all__"
class DeviceSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Device
fields = "__all__"
| 21.25
| 63
| 0.723529
| 255
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 18
| 0.052941
|
3d224cb8121fbd91cf794debf39fda90674c7943
| 82
|
py
|
Python
|
technews/__init__.py
|
WisChang005/technews_watcher
|
454ef30bab7731c629f0e3b577ce340c48a6cbe7
|
[
"MIT"
] | 1
|
2019-03-31T15:34:10.000Z
|
2019-03-31T15:34:10.000Z
|
technews/__init__.py
|
WisChang005/technews_watcher
|
454ef30bab7731c629f0e3b577ce340c48a6cbe7
|
[
"MIT"
] | null | null | null |
technews/__init__.py
|
WisChang005/technews_watcher
|
454ef30bab7731c629f0e3b577ce340c48a6cbe7
|
[
"MIT"
] | null | null | null |
from .technews_helper import TechNews
from .mail_helper import EmailContentHelper
| 27.333333
| 43
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
3d2303695d686e9f8b4033b5136f35315cde3220
| 696
|
py
|
Python
|
core/migrations/0002_auto_20191102_1734.py
|
manulangat1/djcommerce
|
2cd92631479ef949e0f05a255f2f50feca728802
|
[
"MIT"
] | 1
|
2020-02-08T16:29:41.000Z
|
2020-02-08T16:29:41.000Z
|
core/migrations/0002_auto_20191102_1734.py
|
manulangat1/djcommerce
|
2cd92631479ef949e0f05a255f2f50feca728802
|
[
"MIT"
] | 15
|
2020-05-04T13:22:32.000Z
|
2022-03-12T00:27:28.000Z
|
core/migrations/0002_auto_20191102_1734.py
|
manulangat1/djcommerce
|
2cd92631479ef949e0f05a255f2f50feca728802
|
[
"MIT"
] | 1
|
2020-10-17T08:54:31.000Z
|
2020-10-17T08:54:31.000Z
|
# Generated by Django 2.2.6 on 2019-11-02 17:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='item',
name='category',
field=models.CharField(blank=True, choices=[('S', 'Shirt'), ('Sw', 'Sport wear'), ('Ow', 'Outwear')], max_length=10, null=True),
),
migrations.AddField(
model_name='item',
name='label',
field=models.CharField(blank=True, choices=[('P', 'primary'), ('S', 'secondary'), ('D', 'danger')], max_length=10, null=True),
),
]
| 29
| 140
| 0.556034
| 603
| 0.866379
| 0
| 0
| 0
| 0
| 0
| 0
| 172
| 0.247126
|
3d25c2e6e29e6e78df3ddd62294d2447deebe52c
| 28
|
py
|
Python
|
aoc_tools/__init__.py
|
dannyboywoop/AOC_Tools
|
b47374ae465c5772d7b4c09f40eb6e69d68cc144
|
[
"MIT"
] | null | null | null |
aoc_tools/__init__.py
|
dannyboywoop/AOC_Tools
|
b47374ae465c5772d7b4c09f40eb6e69d68cc144
|
[
"MIT"
] | null | null | null |
aoc_tools/__init__.py
|
dannyboywoop/AOC_Tools
|
b47374ae465c5772d7b4c09f40eb6e69d68cc144
|
[
"MIT"
] | null | null | null |
from ._advent_timer import *
| 28
| 28
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
3d2603c4df2972be551558b1de82be8e153176f4
| 915
|
py
|
Python
|
stamper/migrations/0004_auto_20161208_1658.py
|
uploadcare/stump
|
8070ff42f01972fa86b4a2eaba580dad65482ef2
|
[
"MIT"
] | null | null | null |
stamper/migrations/0004_auto_20161208_1658.py
|
uploadcare/stump
|
8070ff42f01972fa86b4a2eaba580dad65482ef2
|
[
"MIT"
] | null | null | null |
stamper/migrations/0004_auto_20161208_1658.py
|
uploadcare/stump
|
8070ff42f01972fa86b4a2eaba580dad65482ef2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-12-08 16:58
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('stamper', '0003_auto_20161122_1253'),
]
operations = [
migrations.AddField(
model_name='fileuploadmessage',
name='original_file_url',
field=models.CharField(default=datetime.datetime(2016, 12, 8, 16, 57, 50, 623808, tzinfo=utc), max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='imageuploadmessage',
name='original_file_url',
field=models.CharField(default=datetime.datetime(2016, 12, 8, 16, 58, 2, 437475, tzinfo=utc), max_length=255),
preserve_default=False,
),
]
| 30.5
| 123
| 0.64153
| 704
| 0.769399
| 0
| 0
| 0
| 0
| 0
| 0
| 181
| 0.197814
|
3d26af74dac8b1e4a7a1fd6ba44e20f27a15ed52
| 7,493
|
py
|
Python
|
lemon.py
|
lab-sigma/learning-to-rationalize
|
05678fdf67661651c39c7d754541b239cb1577eb
|
[
"MIT"
] | null | null | null |
lemon.py
|
lab-sigma/learning-to-rationalize
|
05678fdf67661651c39c7d754541b239cb1577eb
|
[
"MIT"
] | 1
|
2022-02-02T02:27:59.000Z
|
2022-02-02T02:28:51.000Z
|
lemon.py
|
lab-sigma/learning-to-rationalize
|
05678fdf67661651c39c7d754541b239cb1577eb
|
[
"MIT"
] | null | null | null |
import argparse,time,os,pickle
import matplotlib.pyplot as plt
import numpy as np
from player import *
plt.switch_backend('agg')
np.set_printoptions(precision=2)
class lemon:
def __init__(self, std, num_sellers, num_actions, unit, minx):
self.std = std
self.unit = unit
self.num_sellers = num_sellers
self.num_players = num_sellers + 1
self.quality = self.transform(np.arange(num_sellers) )
self.num_actions = num_actions
self.welfare_factor = 1.5
self.listing_cost = 3
def __str__(self):
return f"Lemon({self.num_sellers}) with noise std. {self.std},\nquality: {self.quality}\n"
def transform(self, x):
return x*unit + minx
def feedback(self, actions):
rewards = np.zeros(self.num_players)
seller_actions = actions[1:]
price = self.transform( actions[0] ) - 1
sold = seller_actions * (self.quality < price) ### quality below price and is selling
supply = np.sum(sold)
if supply > 0:
avg_quality = np.sum(sold * self.quality) / supply
q_noise = np.random.randn(self.num_sellers) * 5
rewards[1:] = seller_actions * [ (self.quality + q_noise < price) * (price - self.quality) - self.listing_cost ]
rewards[0] = ( self.welfare_factor * avg_quality - price )
noise = np.random.randn(self.num_players) * self.std
rewards += noise
else:
avg_quality = 0
rewards = np.zeros(self.num_players)
rewards[1:] = - seller_actions * self.listing_cost
rewards /= self.num_players
return rewards, supply, price, avg_quality
class logger:
def __init__(self, log_dir, env, iterations, samples=None):
self.log_dir = log_dir
self.env = env
self.supply_history = []
self.demand_history = []
self.price_history = []
self.avg_quality_history = []
self.iterations = iterations
self.samples = self.iterations if not samples else samples
self.step_size = self.iterations // self.samples
self.sampled_actions = []
def write(self, text):
with open(self.log_dir+ '.log', 'a') as f:
f.write(text)
def record_round(self, t, supply, price, avg_quality, actions):
if t % self.step_size == 0:
self.supply_history.append(supply)
self.price_history.append(price)
self.avg_quality_history.append(avg_quality)
self.sampled_actions.append(actions[1:].copy())
def plot(self):
time_axis = np.arange(0, self.iterations, step=self.step_size)
fig, (ax1, ax2) = plt.subplots(2, 1)
ax1.plot(time_axis, self.supply_history, label=f"supply")
ax1.set_ylabel('#units')
ax1.legend(loc="upper left")
ax2.plot(time_axis, self.price_history, label=f"price")
ax2.plot(time_axis, self.avg_quality_history, label=f"avg. quality")
ax2.set_ylabel('$')
ax2.set_xlabel('#round')
ax2.legend(loc="upper left")
fig.suptitle( f"Lemon({self.env.num_sellers}) with noise std. {self.env.std}")
plt.savefig(self.log_dir+ '_price' '.png')
plt.clf()
fig, ax3 = plt.subplots(1, 1)
im = ax3.imshow(np.asarray( self.sampled_actions).T, aspect="auto")
cbar = ax3.figure.colorbar(im, ax=ax3)
cbar.ax.set_ylabel("prob. to sell", rotation=-90, va="bottom")
ax3.set_yticks(np.arange(0, self.env.num_sellers, step=5))
ax3.set_ylabel('#player')
ax3.set_xlabel('#round')
fig.suptitle( f"Lemon({self.env.num_sellers}) with noise std. {self.env.std}")
plt.savefig(self.log_dir+ '_trend' '.png')
plt.clf()
with open(self.log_dir+'_history.pickle', 'wb') as f:
pickle.dump(self.sampled_actions, f)
def find_latest(prefix, suffix):
i = 0
while os.path.exists(f'{prefix}{i}{suffix}'):
i += 1
return i
if __name__ == '__main__':
parser = argparse.ArgumentParser()
describe = lambda names : ''.join( [', {}: {}'.format(i, n) for i,n in enumerate(names)] )
parser.add_argument('--std', type=float, default=0, help='noise std. in feedback')
parser.add_argument('--iterations', type=int, default=100, help='number of rounds to play')
parser.add_argument('--strategy', type=int, help='player strategy' + describe(strategy_choice_names))
parser.add_argument('--num_sellers', type=int, help='number of sellers ' )
parser.add_argument('--num_actions', type=int, help='number of buyers ')
parser.add_argument('--unit', type=float, default=1, help='discretized unit')
parser.add_argument('--minx', type=float, default=0, help='min action')
parser.add_argument('--samples', type=int, default=100, help='number of samples to save' )
parser.add_argument('--new', default=False, action='store_true', help='whether to generate a new env instance')
parser.add_argument('--num_repeat', type=int, default=1, help='number of repeated simulation')
parser.add_argument('--force_env', default=False, action='store_true', help='whether to use a specified env instance')
args = parser.parse_args()
std = args.std
iterations = args.iterations
strategy = args.strategy
num_sellers = args.num_sellers
num_buyers = 1
num_actions = args.num_actions
num_players = num_sellers+num_buyers
unit = args.unit
minx = args.minx
samples = args.samples
env_name = "lemon3"
strategy_name = strategy_choice_names[strategy]
j = 0
while j < args.num_repeat:
log_dir = f'results/{env_name}/{strategy_name}'
if not os.path.exists(log_dir):
os.makedirs(log_dir)
print("created directory")
else:
print("existing directory")
prefix = f'results/{env_name}/{num_sellers}_{num_buyers}|{std}|{unit}|{minx}#'
if not args.force_env:
i = find_latest(prefix, '.pickle')
if not args.new and i > 0:
env_dir = prefix + str(i-1) + '.pickle'
f = open(env_dir, 'rb')
env = pickle.load(f)
print("load env at " + env_dir)
f.close()
else:
env = lemon(std, num_sellers, num_actions, unit, minx)
env_dir = prefix + str(i) + '.pickle'
f = open(env_dir, 'wb')
pickle.dump(env, f )
print("save env at "+ env_dir)
f.close()
else:
i = specified_env[j]
env_dir = prefix + str(i) + '.pickle'
if not os.path.exists(log_dir):
print("env path not found ", log_dir)
exit()
f = open(env_dir, 'rb')
env = pickle.load(f)
print("load env at " + env_dir)
f.close()
player_module = __import__('player')
if strategy != 4:
players = [getattr(player_module, strategy_name)(num_actions, iterations) ]
players.extend( [getattr(player_module, strategy_name)(2, iterations) for i in range(num_sellers) ] )
else:
a0 = 50
b0 = 0.5
a1 = 50
b1 = 0.5
players = [getattr(player_module, strategy_name)(num_actions, iterations, a0, b0) ]
players.extend( [getattr(player_module, strategy_name)(2, iterations, a1, b1) for i in range(num_sellers) ] )
print(f'beta = {players[0].beta}, b = {players[0].b}, beta = {players[1].beta}, b = {players[1].b}' )
i = find_latest(f'{log_dir}/', '.log')
log_dir = f'{log_dir}/{i}'
L = logger(log_dir, env, iterations, samples=samples)
start = time.time()
L.write("iterations: "+str(iterations) + "\n")
L.write('Environment:\n\t'+str(env)+'\n')
actions = np.zeros(num_players, dtype=int)
action_probs = np.zeros(num_players, dtype=float)
for t in range(1, iterations+1):
for i, p in enumerate(players):
actions[i] = p.act()
action_probs[i] = p.action_prob[1]
rewards, supply, price, avg_quality = env.feedback( actions )
for a, p, r in zip(actions, players, rewards ):
p.feedback(a, r)
L.record_round(t, supply, price, avg_quality, action_probs)
for i, p in enumerate(players):
L.write(f'Player{i}:\n\t{p}\n')
L.plot()
end = time.time()
print(log_dir, end-start)
j += 1
| 31.091286
| 119
| 0.68237
| 3,238
| 0.432137
| 0
| 0
| 0
| 0
| 0
| 0
| 1,360
| 0.181503
|
3d26e189eb8a7096fbff4e3b70771b2698d8bd96
| 1,910
|
py
|
Python
|
src/osaction.py
|
ivan-georgiev/urlmonitor
|
1280127a1d8c52dcbcd871bba55abaf23a1ca3ce
|
[
"MIT"
] | null | null | null |
src/osaction.py
|
ivan-georgiev/urlmonitor
|
1280127a1d8c52dcbcd871bba55abaf23a1ca3ce
|
[
"MIT"
] | null | null | null |
src/osaction.py
|
ivan-georgiev/urlmonitor
|
1280127a1d8c52dcbcd871bba55abaf23a1ca3ce
|
[
"MIT"
] | null | null | null |
# pylint: disable=too-many-arguments
"""
Observer implemtation doing OS command
"""
from base.iobserver import IObserver
import subprocess
import logging
import os
import sys
logging.basicConfig(
format='%(asctime)s %(levelname)s:%(name)s: %(message)s',
level=os.environ.get('LOGLEVEL', 'INFO').upper(),
datefmt='%H:%M:%S',
stream=sys.stderr,
)
logger = logging.getLogger('osaction')
class OsAction(IObserver):
"""
Class implementing observer executing os commands
"""
# process execution timeout
_TIMEOUT = 20
def __init__(self, scope: set, name: str, cmd: tuple, useShell=False, waitToComplete=False):
self._name = name
self._cmd = cmd
self._useShell = useShell
self._waitToComplete = waitToComplete
self._scope = scope
def update(self, correlationId: str, msg: object) -> None:
"""
Executes predefined OS command
"""
if msg[0] not in self._scope: # type: ignore
return
try:
logger.info(f'{correlationId} - Execute {self._cmd}')
proc = subprocess.Popen(self._cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=self._useShell, env=os.environ.copy())
# if not set to wait - exit
if not self._waitToComplete:
return
# get command output
try:
outs, errs = proc.communicate(timeout=OsAction._TIMEOUT)
except Exception as e:
proc.kill()
outs, errs = proc.communicate()
logger.error(f'{correlationId} - {str(e)} - {str(errs)}')
logger.debug(f'{correlationId} - command output: {str(outs)}')
except Exception as e:
logger.error(f'{correlationId} - {str(e)}')
@property
def name(self) -> str:
return self._name
| 28.088235
| 104
| 0.587958
| 1,504
| 0.787435
| 0
| 0
| 62
| 0.032461
| 0
| 0
| 534
| 0.279581
|
3d297440ee04e65ec5e37779068515dbbf97d948
| 1,407
|
py
|
Python
|
CellProfiler/tests/modules/test_opening.py
|
aidotse/Team-rahma.ai
|
66857731e1ca2472e0783e37ba472b55a7ac9cd4
|
[
"MIT"
] | null | null | null |
CellProfiler/tests/modules/test_opening.py
|
aidotse/Team-rahma.ai
|
66857731e1ca2472e0783e37ba472b55a7ac9cd4
|
[
"MIT"
] | null | null | null |
CellProfiler/tests/modules/test_opening.py
|
aidotse/Team-rahma.ai
|
66857731e1ca2472e0783e37ba472b55a7ac9cd4
|
[
"MIT"
] | null | null | null |
import numpy
import numpy.testing
import skimage.morphology
import cellprofiler.modules.opening
instance = cellprofiler.modules.opening.Opening()
def test_run(image, module, image_set, workspace):
module.x_name.value = "example"
module.y_name.value = "opening"
if image.dimensions == 3 or image.multichannel:
# test 3d structuring element
module.structuring_element.shape = "ball"
selem = skimage.morphology.ball(1)
module.run(workspace)
actual = image_set.get_image("opening")
desired = skimage.morphology.opening(image.pixel_data, selem)
numpy.testing.assert_array_equal(actual.pixel_data, desired)
# test planewise
selem = skimage.morphology.disk(1)
module.structuring_element.shape = "disk"
module.run(workspace)
actual = image_set.get_image("opening")
desired = numpy.zeros_like(image.pixel_data)
for index, plane in enumerate(image.pixel_data):
desired[index] = skimage.morphology.opening(plane, selem)
numpy.testing.assert_array_equal(actual.pixel_data, desired)
else:
selem = skimage.morphology.disk(1)
module.run(workspace)
actual = image_set.get_image("opening")
desired = skimage.morphology.opening(image.pixel_data, selem)
numpy.testing.assert_array_equal(actual.pixel_data, desired)
| 25.581818
| 69
| 0.68941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 102
| 0.072495
|
3d29b2ee51f536c799b3a2e3518fab0b83469961
| 26
|
py
|
Python
|
pug-bot/apitoken.py
|
stevenktruong/pug-bot
|
315c21363eebb51d67d5b5c9fa9326cd8bcb2b54
|
[
"MIT"
] | 17
|
2018-06-27T03:49:03.000Z
|
2021-04-13T07:32:43.000Z
|
pug-bot/apitoken.py
|
stevenktruong/pug-bot
|
315c21363eebb51d67d5b5c9fa9326cd8bcb2b54
|
[
"MIT"
] | 3
|
2020-03-26T06:49:10.000Z
|
2020-04-23T07:20:41.000Z
|
pug-bot/apitoken.py
|
stevenktruong/pug-bot
|
315c21363eebb51d67d5b5c9fa9326cd8bcb2b54
|
[
"MIT"
] | 14
|
2018-06-27T03:49:06.000Z
|
2021-10-07T23:28:44.000Z
|
TOKEN = "YOUR_TOKEN_HERE"
| 13
| 25
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 17
| 0.653846
|
3d2a32296fc0285fa514d89f51675b89a2c96e0a
| 52,972
|
py
|
Python
|
proxy/web/app_web.py
|
5GCity/5GCity-infrastructure-abstraction
|
a743666cdd760bbbf511825600f313b2b88477d8
|
[
"Apache-2.0"
] | null | null | null |
proxy/web/app_web.py
|
5GCity/5GCity-infrastructure-abstraction
|
a743666cdd760bbbf511825600f313b2b88477d8
|
[
"Apache-2.0"
] | null | null | null |
proxy/web/app_web.py
|
5GCity/5GCity-infrastructure-abstraction
|
a743666cdd760bbbf511825600f313b2b88477d8
|
[
"Apache-2.0"
] | 1
|
2021-11-27T11:16:04.000Z
|
2021-11-27T11:16:04.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2017-2022 Univertity of Bristol - High Performance Networks Group
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import Column, ForeignKey, Integer, String
from datetime import datetime
from werkzeug.middleware.proxy_fix import ProxyFix
from flask import Flask, Response, jsonify, render_template, request
import logging
import os
import sys
import json
import uuid
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from lib.adapters.ruckus import RuckusWiFi
from lib.adapters.i2cat import I2catController
from conf.config import CONTROLLERS, RUCKUS_ID_MAPPING, RUCKUS_INIT_TOPOLOGY
# Logger configuration
log_filename = "logs/output.log"
os.makedirs(os.path.dirname(log_filename), exist_ok=True)
logging.basicConfig(
format="%(asctime)s [%(levelname)s] %(funcName)s %(message)s",
datefmt='%Y-%m-%d %H:%M:%S', filename=log_filename, level=logging.INFO)
logging.getLogger('requests').setLevel(logging.ERROR)
logger = logging.getLogger()
log_base = "{}:{}:{}" # INTERFACE,endpoint,REQ/RESP,content
# Flask app
app = Flask(__name__)
app.config.from_object(__name__)
# Define database
Base = declarative_base()
engine = create_engine('sqlite:///file.db', echo=False)
def generate_uuid():
return str(uuid.uuid4())
class Chunk(Base):
__tablename__ = 'chunks'
id = Column(String, primary_key=True, default=generate_uuid)
# controllers_chunk is a dictionary where the keys are the ids of the
# controller and the value is a list of the chunk in that controller
# in the form "{controllerid1:[chunkid,...],controllerid2:...}"
controllers_chunk = Column(String)
# controllers_phys is a dictionary where the keys are the ids of the
# controller and the value is a list of the chunk in that controller
# in the form "{controllerid1:[chunkid,...],controllerid2:...}"
controllers_phys = Column(String)
phyList = Column(String)
name = Column(String)
assignedQuota = Column(String)
serviceList = Column(String)
linkList = Column(String)
chunk_json = Column(String)
def __repr__(self):
return "{}, {}, {}, {}, {}, {}, {}, {}, {}".format(
self.id,
self._controllers_chunk,
self.controllers_phys,
self.phyList,
self.name,
self.assignedQuota,
self.serviceList,
self.linkList,
self.chunk_json
)
class Box(Base):
__tablename__ = 'boxes'
id = Column(String, primary_key=True, default=generate_uuid)
controller_id = Column(Integer)
box_id_controller = Column(String)
name = Column(String)
location = Column(String)
phys = Column(String)
box_json = Column(String)
def __repr__(self):
return "{}, {}, {}, {}, {}, {}, {}".format(
self.id,
self.controller_id,
self.box_id_controller,
self.name,
self.location,
self.phys,
self.box_json
)
class Phy(Base):
__tablename__ = 'phys'
id = Column(String, primary_key=True, default=generate_uuid)
controller_id = Column(Integer)
phy_id_controller = Column(String)
type = Column(String)
name = Column(String)
config = Column(String)
virtualInterfaceList = Column(String)
phy_json = Column(String)
def __repr__(self):
return "{}, {}, {}, {}, {}, {}, {}, {}".format(
self.id,
self.controller_id,
self.phy_id_controller,
self.type, self.name,
self.config,
self.virtualInterfaceList,
self.phy_json
)
class Vif(Base):
__tablename__ = 'vifs'
id = Column(String, primary_key=True, default=generate_uuid)
service_id = Column(String)
controller_id = Column(Integer)
phy_id = Column(String)
name = Column(String)
vif_json = Column(String)
def __repr__(self):
return "{}, {}, {}, {}, {}, {}".format(
self.id,
self.service_id,
self.controller_id,
self.phy_id,
self.name,
self.vif_json
)
class Vlan(Base):
__tablename__ = 'vlans'
id = Column(String, primary_key=True, default=generate_uuid)
service_id = Column(String)
tag = Column(Integer)
controllers_vlans_id = Column(String)
def __repr__(self):
return "{}, {}, {}".format(
self.id,
self.service_id,
self.tag,
self.controller_vlans_id,
)
class Service(Base):
__tablename__ = 'services'
id = Column(String, primary_key=True, default=generate_uuid)
# controllers_services is a dictionary where the keys are the ids of the
# controller and the value is a list of the chunk in that controller
# in the form "{controllerid1:[serviceid,...],controllerid2:...}"
controllers_services = Column(String)
# controllers_phys is a dictionary where the keys are the ids of the
# controller and the value is a list of the chunk in that controller
# in the form "{controllerid1:[chunkid,...],controllerid2:...}"
controllers_phys = Column(String)
lteConfigCellReserved = Column(String)
lteConfigMMEAddress = Column(String)
lteConfigMMEPort = Column(Integer)
lteConfigPLMNId = Column(String)
selectedPhys = Column(String)
selectedVifs = Column(String)
wirelessConfigEncryption = Column(String)
wirelessConfigPassword = Column(String)
wirelessConfigSSID = Column(String)
vlanId = Column(String)
service_json = Column(String)
def __repr__(self):
return "{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}".format(
self.id,
self.controllers_services,
self.controllers_phys,
self.lteConfigCellReserved,
self.lteConfigMMEAddress,
self.lteConfigMMEPort,
self.lteConfigPLMNId,
self.selectedPhys,
self.selectedVifs,
self.wirelessConfigSSID,
self.wirelessConfigEncryption,
self.wirelessConfigPassword,
self.vlanId,
self.service_json
)
# helpers to translate dabatase type class objects into dictionaries
def _dictService(service):
vlan = session.query(Vlan).filter(Vlan.service_id == service.id).one()
if service.wirelessConfigSSID:
wirelessConfig = {
"ssid": service.wirelessConfigSSID,
"encryption": service.wirelessConfigEncryption,
"password": service.wirelessConfigPassword
}
else:
wirelessConfig = None
if service.lteConfigPLMNId:
lteConfig = {
"plmnId": service.lteConfigPLMNId,
"cellReserved": service.lteConfigCellReserved,
"mmeAddress": service.lteConfigMMEAddress,
"mmePort": service.lteConfigMMEPort
}
else:
lteConfig = None
response_data = {
"id": service.id,
"serviceType": "SWAM_SERVICE",
"selectedRoot": 0,
"vlanId": {
"id": vlan.id,
"vlanId": vlan.tag
},
"selectedVifs": [{"id": x} for x in eval(service.selectedVifs)],
"wirelessConfig": wirelessConfig,
"lteConfig": lteConfig
}
return response_data
def _dictChunk(chunk):
services = session.query(Service).filter(
Service.id.in_(eval(chunk.serviceList))).all()
phys = session.query(Phy).filter(Phy.id.in_(eval(chunk.phyList))).all()
response_data = {
"id": chunk.id,
"name": chunk.name,
"assignedQuota": 0,
"serviceList": [_dictService(service) for service in services],
"physicalInterfaceList": [_dictPhy(phy) for phy in phys],
"linkList": []
}
return response_data
def _dictPhy(phy):
vifs = session.query(Vif).filter(
Vif.id.in_(eval(phy.virtualInterfaceList))).all()
if phy.config:
config = eval(phy.config)
else:
config = phy.config
response_data = {
"id": phy.id,
"name": phy.name,
"type": phy.type,
"virtualInterfaceList": [_dictVif(vif) for vif in vifs],
"config": config
}
return response_data
def _dictVif(vif):
response_data = {
"id": vif.id,
"name": vif.name,
"toRootVlan": 0,
"toAccessVlan": 0,
"toAccessPort": 0,
"toRootPort": 0,
"openFlowPortList": []
}
return response_data
# Create database session
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Initialize controller list
controllers = []
# controllers = {}
# formatter for the returned errors
API_RESPONSE = {
"OK": {
"content": '',
"code": 200
},
"CREATED": {
"content": '',
"code": 201
},
"CONTROLLER": {
"content": 'Controller Error',
"code": 503
},
"NOTFOUND": {
"content": 'Not Found',
"code": 404
},
"DB_INTEGRITY": {
"content": 'DB Integrity',
"code": 401
},
"VERIFICATION_ERROR": {
"content": 'Verification Error',
"code": 401
}
}
def errorResponder(error, message):
# TODO: implement timestamp
dt = datetime.today()
return json.dumps({
"timestamp": dt.isoformat(sep='T'),
"status": API_RESPONSE[error]["code"],
"error": API_RESPONSE[error]["content"],
"message": message,
"path": request.path
}), API_RESPONSE[error]["code"]
NORTHBOUND = "NORTHBOUND"
SOUTHBOUND = "SOUTHBOUND"
INTERNAL = "INTERNAL"
REQUEST = "REQUEST"
RESPONSE = "RESPONSE"
REQRESP = "REQ/RESP"
ROLLBACK = "ROLLBACK"
# Load controllers info from config.py and register topologies
# Look for first phy_id free in database
db_id_phy_id_list = session.query(Phy.id, Phy.phy_id_controller).all()
# db_id_list = [r for (r, a) in db_id_phy_id_list]
# db_id_list.sort()
# if len(db_id_list) == 0:
# new_phy_id = 1
# else:
# new_phy_id = db_id_list[len(db_id_list)-1]+1
# # Look for first box_id free in database
db_id_box_id_list = session.query(Box.id, Box.box_id_controller).all()
# db_id_list = [r for (r, a) in db_id_box_id_list]
# db_id_list.sort()
# if len(db_id_list) == 0:
# new_box_id = 1
# else:
# new_box_id = db_id_list[len(db_id_list)-1]+1
new_box_id = str(uuid.uuid4())
# *******************************
# Initialize proxy runtime status
# *******************************
#
# INITIAL TOPOLOGY RECOVERY (Boxes, Phys):
# =========================
# -RUCKUS type controller initial topology recovered from config.py
# -I2CAT type controller initial topology recovered from live
# SOUTHBOUND REQUEST to controller
#
# CURRENT STATE (Chunks, Services, VirtualInterfaces):
# ==============
# -RUCKUS type controller current state recovered from database and
# controllers runtime status
# -I2CAT type controller current state kept on controller
#
for item in CONTROLLERS:
if item['type'] == 'ruckus':
# Recover the list of chunks from the database
db_chunks = session.query(Chunk).all()
chunks = []
for db_chunk in db_chunks:
if eval(db_chunk.controllers_chunk)[len(controllers)]:
chunk = _dictChunk(db_chunk)
phys_to_pop = []
services_to_pop = []
for service in chunk["serviceList"]:
db_service = session.query(Service).filter(
Service.id == service["id"]).one()
if len(controllers) in \
eval(db_service.controllers_services).keys():
service["id"] = eval(db_service.controllers_services)[
len(controllers)]
else:
services_to_pop.append(service)
[chunk["serviceList"].remove(service)
for service in services_to_pop]
for phy in chunk["physicalInterfaceList"]:
try:
db_phy = session.query(Phy).filter(
Phy.id == phy["id"],
Phy.controller_id == len(controllers)).one()
phy = db_phy.phy_id_controller
except NoResultFound:
phys_to_pop.append(phy)
[chunk["physicalInterfaceList"].remove(
phy) for phy in phys_to_pop]
chunk["id"] = eval(db_chunk.controllers_chunk)[
len(controllers)]
chunks.append(chunk)
phy_id_mapping = RUCKUS_ID_MAPPING
controller = RuckusWiFi(
controller_id=item['id'],
ip=item['ip'],
port=item['port'],
url=item['url'],
topology=item['topology'],
chunks=chunks,
phy_id_mapping=phy_id_mapping,
username=item['username'],
password=item['password']
)
controllers.append(controller)
# controllers[controller.controller_id] = controller
elif item['type'] == 'i2cat':
controller = I2catController(
controller_id=item['id'],
ip=item['ip'],
port=item['port'],
url=item['url']
)
controllers.append(controller)
# controllers[controller.controller_id] = controller
for box in controller.getChunketeTopology()[0]["boxes"]:
if box['id'] not in [r for (a, r) in db_id_box_id_list]:
try:
# initial_topology["boxes"].append(box)
new_box = Box(
name=box["name"],
location=json.dumps(box["location"]),
controller_id=item['id'],
box_id_controller=box['id'],
phys=json.dumps(box["phys"]),
box_json=json.dumps(box))
session.add(new_box)
# count_phys = 0
for phy in box["phys"]:
if phy['id'] not in [r for (a, r) in db_id_phy_id_list]:
new_phy = Phy(
name=phy["name"], type=phy["type"],
controller_id=item['id'],
phy_id_controller=phy['id'],
config=str(phy["config"]),
virtualInterfaceList=json.dumps([]),
phy_json=json.dumps(phy))
session.add(new_phy)
# count_phys += 1
session.commit()
# new_phy_id += count_phys
# new_box_id += 1
except IntegrityError as ex:
session.rollback()
session.close()
def root_dir():
return os.path.abspath(os.path.dirname(__file__))
def get_file(filename):
try:
src = os.path.join(root_dir(), filename)
# Figure out how flask returns static files
# Tried:
# - render_template
# - send_file
# This should not be so non-obvious
return open(src).read()
except IOError as exc:
logger.error("Impossible to read file", exc_info=True)
return str(exc)
@app.route('/')
def root_page():
# return render_template('proxy.html')
return API_RESPONSE["OK"]["content"], API_RESPONSE["OK"]["code"]
@app.after_request
def flaskResponse(response):
body = ""
if response.get_data():
response.headers["Content-Type"] = "application/json;charset=UTF-8"
body = json.loads(response.get_data())
log_content = " '{}' {} :code:{}:body:{}".format(
request.method, request.path, response.status_code, body)
logger.info(log_base.format(NORTHBOUND, RESPONSE, log_content))
return response
@app.before_request
def before():
# todo with request
# e.g. print request.headers
pass
# Topology API implementation
@app.route('/chunkete/topology', methods=['GET'])
def getChunketeTopology():
resp = {
"boxes": [],
"links": []
}
log_content = ""
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
for index_controller in range(len(controllers)):
try:
boxes = session.query(Box).filter(
Box.controller_id == index_controller).all()
(controller_resp,
code) = controllers[index_controller].getChunketeTopology()
log_content = "controller:{}:response:{}/{}".format(
index_controller, code, controller_resp)
logger.info(log_base.format(SOUTHBOUND, REQRESP, log_content))
if code == API_RESPONSE["OK"]["code"]:
for box in controller_resp["boxes"]:
for index_phy in range(len(box["phys"])):
phy = session.query(Phy).filter(
Phy.controller_id == index_controller).filter(
Phy.phy_id_controller ==
box["phys"][index_phy]["id"]
).one()
box["phys"][index_phy]["id"] = phy.id
for db_box in boxes:
if db_box.box_id_controller == box["id"]:
box["id"] = db_box.id
break
resp["boxes"].append(box)
else:
return controller_resp, code
except NoResultFound:
return json.dumps({
"timestamp": "2019-09-10T14:18:24.866+0000",
"status": API_RESPONSE["NOTFOUND"]["code"],
"error": API_RESPONSE["NOTFOUND"]["content"],
"message": "No Result Found for the request",
"path": request.path
}), API_RESPONSE["NOTFOUND"]["code"]
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
response = jsonify(resp)
return response, API_RESPONSE["OK"]["code"]
@app.route(
'/chunkete/topology/physicalInterface/<phy_id>/LTEConfig',
methods=['PUT'])
def putInterfaceLTEConfig(phy_id):
# {
# "cellIdentity": 256,
# "earfcndl": 41690,
# "phyCellId": 5,
# "prachrootseqindex": 100,
# "primaryMMEAddress": "192.168.100.25",
# "primaryMMEPort": 333,
# "primaryPlmnId": "00101",
# "refSignalPower": -40,
# "reservedForOperatorUse": "not-reserved",
# "trackingAreaCode": 67
# }
try:
content = request.data
content_dict = json.loads(content)
log_content = "phy_id:{}:content:{}".format(phy_id, content_dict)
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
if 0 > content_dict["cellIdentity"] > 256:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if content_dict["earfcndl"] not in [i for j in (
range(2750, 3449),
range(41690, 43489),
range(37750, 38249)) for i in j]:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if 0 > content_dict["phyCellId"] > 500:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if 0 > content_dict["prachrootseqindex"] > 1023:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if "primaryMMEAddress" not in content_dict.keys():
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if "primaryMMEPort" not in content_dict.keys():
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if "primaryPlmnId" not in content_dict.keys():
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if -40 > content_dict["refSignalPower"] > -10:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if content_dict["reservedForOperatorUse"] != "not-reserved":
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if 0 > content_dict["trackingAreaCode"] > 65535:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if content:
phy = session.query(Phy).filter(Phy.id == phy_id).one()
response, code = controllers[phy.controller_id].\
putInterfaceLTEConfig(
phy.phy_id_controller, content)
log_content = "controller:{}:phy_id_controller:{}:phy_id:{}"
log_content += ":content:{}:response:{}/{}".\
format(
phy.controller_id, phy.phy_id_controller,
phy_id, content, code, response)
logger.info(log_base.format(SOUTHBOUND, REQRESP, log_content))
return jsonify(response), code
else:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
except KeyError:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
@app.route(
'/chunkete/topology/physicalInterface/<phy_id>/type/<phy_type>',
methods=['PUT'])
def putInterfaceType(phy_id, phy_type):
try:
log_content = "phy_id:{}:phy_type:{}".format(phy_id, phy_type)
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
phy = session.query(Phy).filter(Phy.id == phy_id).one()
response, code = controllers[phy.controller_id].putInterfaceType(
phy.phy_id_controller, phy_type)
log_content = "controller:{}:phy_id_controller:{}:phy_id:{}"
log_content += ":phy_type:{}:response:{}/{}".\
format(
phy.controller_id, phy.phyid_controller,
phy_id, phy_type, code, response)
logger.info(
log_base.format(SOUTHBOUND, REQRESP, log_content))
return response, code
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
@app.route(
'/chunkete/topology/physicalInterface/<phy_id>/wiredConfig',
methods=['PUT'])
def putInterfaceWiredConfig(phy_id):
try:
content = request.data
log_content = "phy_id:{}:content:{}".format(
phy_id, json.loads(content))
logger.info(
log_base.format(NORTHBOUND, REQUEST, log_content))
if content:
phy = session.query(Phy).filter(Phy.id == phy_id).one()
response, code = controllers[phy.controller_id].\
putInterfaceWiredConfig(
phy.phy_id_controller, content)
log_content = "controller:{}:phy_id_controller:{}:phy_id:{}"
log_content += ":response:{}/{}".\
format(
phy.controller_id, phy.phy_id_controller,
phy_id, code, response)
logger.info(
log_base.format(SOUTHBOUND, REQRESP, log_content))
return response, code
except KeyError:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
@app.route(
'/chunkete/topology/physicalInterface/<phy_id>/wirelessConfig',
methods=['PUT'])
def putInterfaceWirelessConfig(phy_id):
# Verify content
# {
# "channelBandwidth": 20,
# (Se aceptan 20, 40 y 80)
# "channelNumber": 36,
# (Se acepta cualquier canal de la banda de 2.4 y/o de la banda de 5GHz;
# según el nodo puede o no sopotar DFS así que no está restringido
# a canales "normales")
# "txPower": 2000
# (Valor en mBm; se acepta desde 0 hasta 3500 aunque lo
# normal suelen ser 2300)
# }
try:
content = request.data
log_content = "phy_id:{}:content:{}".format(
phy_id, json.loads(content))
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
content_dict = json.loads(content)
if content_dict["channelBandwidth"] not in [20, 40, 80]:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if content_dict["channelNumber"] not in [i for j in (
range(1, 11),
range(36, 68, 4),
range(100, 140, 4)) for i in j]:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if 0 >= content_dict["txPower"] > 3500:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if content:
phy = session.query(Phy).filter(Phy.id == phy_id).one()
response, code = controllers[phy.controller_id].\
putInterfaceWirelessConfig(phy.phy_id_controller, content)
log_content = "controller:{}:phy_id_controller:{}:phy_id:{}"
log_content += ":content:{}:response:{}/{}".\
format(
phy.controller_id, phy.phy_id_controller,
phy_id, content, code, response)
logger.info(
log_base.format(SOUTHBOUND, REQRESP, log_content))
return jsonify(response), code
else:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
return (API_RESPONSE["CREATED"]["content"],
API_RESPONSE["CREATED"]["code"])
except KeyError:
logger.error("Malformed request")
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
# Chunk API implementation
@app.route('/chunkete/chunk', methods=['GET'])
def getAllChunks():
log_content = ""
logger.info(
log_base.format(NORTHBOUND, REQUEST, log_content))
# chunks = {}
# chunk_id_list =[]
response = []
try:
db_chunks = session.query(Chunk).all()
for db_chunk in db_chunks:
response.append(_dictChunk(db_chunk))
return jsonify(response), API_RESPONSE["OK"]["code"]
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
@app.route('/chunkete/chunk', methods=['POST'])
def registerNewChunk():
try:
content = request.data
log_content = "content:{}".format(json.loads(content))
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
if content:
chunk_dict = json.loads(content)
controllers_phys = {}
controllers_content = {}
# Split the phys included in the chunk per controller
for phy in chunk_dict["physicalInterfaceList"]:
phy = session.query(Phy).filter(Phy.id == phy["id"]).one()
phy_dict = json.loads(phy.phy_json)
phy_id_dict = {"id": phy_dict["id"]}
if phy.controller_id in controllers_phys.keys():
controllers_phys[phy.controller_id].append(
phy.phy_id_controller)
controllers_content[
phy.controller_id][
"physicalInterfaceList"].append(phy_id_dict)
else:
controllers_phys[phy.controller_id] = [phy.phy_id_controller]
controllers_content[phy.controller_id] = {
"name": chunk_dict["name"],
"physicalInterfaceList": [phy_id_dict],
}
if "assignedQuota" in chunk_dict.keys():
controllers_content[phy.controller_id]["assignedQuota"] = \
chunk_dict["assignedQuota"]
else:
chunk_dict["assignedQuota"] = 0
controllers_content[phy.controller_id]["assignedQuota"] = 0
if "linkList" in chunk_dict.keys():
controllers_content[phy.controller_id]["linkList"] = \
chunk_dict["linkList"]
else:
chunk_dict["linkList"] = []
controllers_content[phy.controller_id]["linkList"] = []
if "serviceList" in chunk_dict.keys():
controllers_content[phy.controller_id]["serviceList"] = \
chunk_dict["serviceList"]
else:
chunk_dict["serviceList"] = []
controllers_content[phy.controller_id]["serviceList"] = []
# # Create a new chunk and add to database
# # Get the next free ID in db
# db_id_list = session.query(Chunk.id).all()
# db_id_list = [r for (r, ) in db_id_list]
# db_id_list.sort()
# if len(db_id_list) == 0:
# new_chunk_id = 1
# else:
# new_chunk_id = db_id_list[len(db_id_list)-1]+1
# Add the chunk in the database
chunk = Chunk(
name=chunk_dict["name"],
serviceList=json.dumps([]),
assignedQuota=chunk_dict["assignedQuota"],
controllers_phys=str(controllers_phys),
phyList=str(
[phy["id"] for phy in chunk_dict["physicalInterfaceList"]]
),
linkList=json.dumps([]), chunk_json=json.dumps(chunk_dict))
session.add(chunk)
# Register the chunk on each of the controllers
controllers_chunk_dict = {}
for controller_id in controllers_content.keys():
response, code = controllers[controller_id].registerNewChunk(
json.dumps(controllers_content[controller_id]))
log_content = "controller:{}:content:{}"
log_content += ":response:{}/{}".\
format(
controller_id,
json.dumps(
controllers_content[controller_id]),
code, response)
logger.info(log_base.format(SOUTHBOUND, REQRESP, log_content))
if code == API_RESPONSE["CREATED"]["code"]:
controllers_chunk_dict[controller_id] = response["id"]
else:
return errorResponder(
"CONTROLLER", "Managed Controller returned an error")
# Update Service in Database
chunk_dict["id"] = chunk.id
chunk.chunk_json = json.dumps(chunk_dict)
chunk.controllers_chunk = str(controllers_chunk_dict)
session.commit()
return json.dumps(
{'id': chunk.id}), API_RESPONSE["CREATED"]["code"]
except KeyError:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
@app.route('/chunkete/chunk/<chunk_id>', methods=['GET'])
def getChunkById(chunk_id):
log_content = "chunk_id:{}".format(chunk_id)
logger.info(
log_base.format(NORTHBOUND, REQUEST, log_content))
try:
chunk = session.query(Chunk).filter(Chunk.id == chunk_id).one()
response_data = _dictChunk(chunk)
return jsonify(
response_data), API_RESPONSE["OK"]["code"]
except NoResultFound:
return errorResponder(
"NOTFOUND", "Object not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
@app.route('/chunkete/chunk/<chunk_id>', methods=['DELETE'])
def removeExistingChunk(chunk_id):
log_content = "chunk_id:{}".format(chunk_id)
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
try:
chunk = session.query(Chunk).filter(Chunk.id == chunk_id).one()
session.close()
controllers_phys = eval(chunk.controllers_phys)
serviceList = eval(chunk.serviceList)
# Remove the Services from the chunk
while serviceList:
removeExistingSWAMService(
chunk_id, serviceList[0], interface=INTERNAL)
serviceList.pop(0)
for controller_id in controllers_phys.keys():
response, code = controllers[controller_id].removeExistingChunk(
eval(chunk.controllers_chunk)[controller_id])
log_content = "controller:{}:chunk_id:{}"
log_content += ":response:{}/{}".\
format(controller_id, chunk_id, code, response)
logger.info(log_base.format(SOUTHBOUND, REQRESP, log_content))
# Remove the chunk from the database
session.delete(chunk)
session.commit()
return API_RESPONSE["OK"]["content"], API_RESPONSE["OK"]["code"]
except NoResultFound:
return errorResponder("NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder("DB_INTEGRITY", "Database integrity error")
finally:
session.close()
# Service API implementation
@app.route('/chunkete/chunk/<chunk_id>/service/SWAM', methods=['GET'])
def getAllSWAMServices(chunk_id):
log_content = "chunk_id:{}".format(chunk_id)
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
response = []
try:
db_chunk = session.query(Chunk).filter(Chunk.id == chunk_id).one()
for service_id in eval(db_chunk.serviceList):
db_service = session.query(Service).filter(
Service.id == service_id).one()
response.append(_dictService(db_service))
return jsonify(response), API_RESPONSE["OK"]["code"]
except NoResultFound:
return errorResponder("NOTFOUND", "Item not found")
finally:
session.close()
@app.route('/chunkete/chunk/<chunk_id>/service/SWAM', methods=['POST'])
def registerNewSWAMService(chunk_id):
# VERIFY CONTENT
# {
# "lteConfig": { (Más info en los mails que te he pasado de accelleran)
# "cellReserved": "not-reserved",
# "mmeAddress": "192.168.50.2",
# "mmePort": 333,
# "plmnId": "00101"
# },
# "selectedPhys": [
# (Sólo se aceptan interfaces de tipo SUB6_ACCESS,
# LTE_PRIMARY_PLMN y WIRED_TUNNEL)
# 14, 23
# ],
# "vlanId": 201, (1-4095)
# "wirelessConfig": {
# "encryption": "WPA", (NONE, WPA, WPA2, WEP aceptados)
# "password": "secret",
# (No se aceptan espacios. Debe contener un mínimo de
# 8 caracteres o estar vacia en caso de encryption == "NONE")
# "ssid": "Test" (No se aceptan espacios)
# }
# }
PHY_TYPES = ["SUB6_ACCESS", "LTE_PRIMARY_PLMN", "WIRED_TUNNEL"]
ENCRYPTION_TYPES = ["NONE", "WPA", "WPA2", "WEP"]
# Action record for rollback in case something fails
# {
# <controller>:{
# "chunk_id": <service_id>
# "service_id": <service_id>
# }
# }
rollback_flag = True
rollback = {}
try:
content = request.data
log_content = "chunk_id:{}:content:{}".format(
chunk_id, json.loads(content))
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
if content:
service_dict = json.loads(content)
# if "lteConfig" in service_dict.keys():
if "lteConfig" in service_dict.keys():
if service_dict["lteConfig"]:
pass
# if service_dict["lteConfig"]["encryption"] not in \
# ENCRYPTION_TYPES:
# return errorResponder(
# "VERIFICATION_ERROR", "Malformed request")
# elif len(service_dict["lteConfig"]["password"]) < 8:
# if service_dict[
# "wirelessConfig"]["encryption"] != "NONE":
# return errorResponder(
# "VERIFICATION_ERROR", "Malformed request")
# elif ' ' in service_dict["lteConfig"]["ssid"]:
# return errorResponder(
# "VERIFICATION_ERROR", "Malformed request")
else:
service_dict["lteConfig"] = {
"cellReserved": None,
"mmeAddress": None,
"mmePort": None,
"plmnId": None
}
if "wirelessConfig" in service_dict.keys():
if service_dict["wirelessConfig"]:
if service_dict["wirelessConfig"]["encryption"] not in \
ENCRYPTION_TYPES:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
elif len(service_dict["wirelessConfig"]["password"]) < 8:
if service_dict[
"wirelessConfig"]["encryption"] != "NONE":
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
elif ' ' in service_dict["wirelessConfig"]["ssid"]:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
else:
service_dict["wirelessConfig"] = {
"encryption": None,
"password": None,
"ssid": None
}
if 1 > service_dict["vlanId"] > 4095:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
controllers_phys = {}
controllers_content = {}
controllers_xref = {}
selected_vifs = []
db_vifs = []
chunk = session.query(Chunk).filter(Chunk.id == chunk_id).one()
for phy_id in service_dict["selectedPhys"]:
if phy_id not in eval(chunk.phyList):
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
phy = session.query(Phy).filter(Phy.id == phy_id).one()
if phy.type not in PHY_TYPES:
return errorResponder(
"VERIFICATION_ERROR", "Malformed request")
if phy.controller_id in controllers_phys.keys():
controllers_phys[phy.controller_id].append(phy.id)
controllers_xref[phy.controller_id].append(
phy.phy_id_controller)
controllers_content[phy.controller_id]["selectedPhys"].\
append(phy.phy_id_controller)
else:
controllers_phys[phy.controller_id] = [phy.id]
controllers_xref[phy.controller_id] = [
phy.phy_id_controller]
controllers_content[phy.controller_id] = {
"selectedPhys": [phy.phy_id_controller],
"vlanId": service_dict["vlanId"]
}
if "lteConfig" in service_dict.keys():
controllers_content[phy.controller_id]["lteConfig"] = \
service_dict["lteConfig"]
if "wirelessConfig" in service_dict.keys():
controllers_content[phy.controller_id][
"wirelessConfig"] = service_dict["wirelessConfig"]
# Create a new vif and add to database
# Get the next free ID in db
# db_id_list = session.query(Vif.id).all()
# db_id_list = [r for (r, ) in db_id_list]
# db_id_list.sort()
# if len(db_id_list) == 0:
# new_vif_id = 1
# else:
# new_vif_id = db_id_list[len(db_id_list)-1]+1
# Create a new service and add to database
# Get the next free ID in db
# db_id_list = session.query(Service.id).all()
# db_id_list = [r for (r, ) in db_id_list]
# db_id_list.sort()
# if len(db_id_list) == 0:
# new_service_id = 1
# else:
# new_service_id = db_id_list[len(db_id_list)-1]+1
# TODO: Name the new vif. At the moment, it just takes the
# phy name followed by the new_vif_id
new_vif_dict = {
'id': str(uuid.uuid4()),
'name': "",
"toRootVlan": 0,
"toAccessVlan": 0,
"toAccessPort": 0,
"toRootPort": 0,
"openFlowPortList": []
}
new_vif_dict['name'] = "{}_{}".\
format(phy.name, new_vif_dict['id'])
vif = Vif(
id=new_vif_dict['id'],
service_id="",
phy_id=phy.id,
controller_id=phy.controller_id,
vif_json=json.dumps(new_vif_dict))
session.add(vif)
db_vifs.append(vif)
selected_vifs.append(new_vif_dict['id'])
phy = session.query(Phy).filter(Phy.id == phy.id).one()
virtualInterfaceList = json.loads(phy.virtualInterfaceList)
virtualInterfaceList.append(vif.id)
phy.virtualInterfaceList = json.dumps(virtualInterfaceList)
phy_dict = json.loads(phy.phy_json)
if "virtualInterfaceList" in phy_dict:
phy_dict["virtualInterfaceList"].append(new_vif_dict)
else:
phy_dict["virtualInterfaceList"] = [new_vif_dict]
phy.phy_json = json.dumps(phy_dict)
# Add the service in the database
service = Service(
controllers_services=str({}),
controllers_phys=str(controllers_xref),
lteConfigCellReserved=service_dict[
"lteConfig"]["cellReserved"],
lteConfigMMEAddress=service_dict["lteConfig"]["mmeAddress"],
lteConfigMMEPort=service_dict["lteConfig"]["mmePort"],
lteConfigPLMNId=service_dict["lteConfig"]["plmnId"],
selectedPhys=str(service_dict["selectedPhys"]),
selectedVifs=str(selected_vifs),
wirelessConfigEncryption=service_dict[
"wirelessConfig"]["encryption"],
wirelessConfigPassword=service_dict[
"wirelessConfig"]["password"],
wirelessConfigSSID=service_dict["wirelessConfig"]["ssid"],
vlanId=service_dict["vlanId"],
service_json=json.dumps(service_dict)
)
vlan = Vlan(
tag=service_dict["vlanId"],
service_id="",
controllers_vlans_id="")
session.add(vlan)
session.add(service)
session.flush()
# Update Chunk in database
# update serviceList
serviceList = json.loads(chunk.serviceList)
serviceList.append(service.id)
chunk.serviceList = json.dumps(serviceList)
# update chunk json
service_dict["id"] = service.id
vlan.service_id = service.id
for db_vif in db_vifs:
db_vif.service_id = service.id
updated_chunk = json.loads(chunk.chunk_json)
updated_chunk["serviceList"].append(service_dict)
chunk.chunk_json = json.dumps(updated_chunk)
service.service_json = json.dumps(service_dict)
session.flush()
# Register the service on each controller
controllers_services_dict = {}
for controller_id in controllers_phys.keys():
data, code = controllers[controller_id].\
registerNewSWAMService(
eval(chunk.controllers_chunk)[controller_id],
json.dumps(controllers_content[controller_id]))
log_content = "controller:{}:chunk_id:{}:content:{}"
log_content += ":response:{}/{}".\
format(
controller_id, chunk_id,
json.dumps(controllers_content[controller_id]),
code, data)
logger.info(log_base.format(
SOUTHBOUND, REQRESP, log_content))
if code == API_RESPONSE["CREATED"]["code"]:
rollback[controller_id] = {
'chunk_id': eval(
chunk.controllers_chunk)[controller_id],
'service_id': data["id"]
}
controllers_services_dict[controller_id] = data["id"]
else:
return errorResponder(
"CONTROLLER",
"Managed Controller returned an error")
# Update and add vlan object
# vlan.service_id = service.id
# vlan.controllers_vlans_id = controllers_services_dict['vlanId']
# Update Service in Database
service.controllers_services = str(controllers_services_dict)
session.commit()
rollback_flag = False
return json.dumps(
{'id': service.id}), API_RESPONSE["CREATED"]["code"]
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
if rollback_flag:
if rollback:
for controller_id in rollback.keys():
data, code = controllers[controller_id].\
removeExistingSWAMService(
rollback[controller_id]["chunk_id"],
rollback[controller_id]["service_id"])
log_content = "controller:{}:chunk_id:{}:service_id:{}"
log_content += ":response:{}/{}".\
format(
controller_id,
rollback[controller_id]["chunk_id"],
rollback[controller_id]["service_id"],
code, data)
logger.info(log_base.format(
SOUTHBOUND, ROLLBACK, log_content))
session.close()
@app.route(
'/chunkete/chunk/<chunk_id>/service/SWAM/<service_id>',
methods=['GET'])
def getSWAMServiceById(chunk_id, service_id):
log_content = "chunk_id:{}:service_id:{}".format(chunk_id, service_id)
logger.info(log_base.format(NORTHBOUND, REQUEST, log_content))
try:
service = session.query(Service).filter(Service.id == service_id).one()
response_data = _dictService(service)
return jsonify(response_data), API_RESPONSE["OK"]["code"]
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
@app.route(
'/chunkete/chunk/<chunk_id>/service/SWAM/<service_id>',
methods=['DELETE'])
def removeExistingSWAMService(chunk_id, service_id, interface=NORTHBOUND):
log_content = "chunk_id:{}:service_id:{}".format(chunk_id, service_id)
logger.info(log_base.format(interface, REQUEST, log_content))
controllers_phys = {}
try:
# Update Chunk in database
chunk = session.query(Chunk).filter(Chunk.id == chunk_id).one()
vifs = session.query(Vif).filter(
Vif.service_id == service_id).all()
for vif in vifs:
phy = session.query(Phy).filter(Phy.id == vif.phy_id).one()
if phy.controller_id in controllers_phys.keys():
controllers_phys[phy.controller_id].append(phy.id)
else:
controllers_phys[phy.controller_id] = [phy.id]
virtualInterfaceList = eval(phy.virtualInterfaceList)
virtualInterfaceList.remove(vif.id)
phy.virtualInterfaceList = json.dumps(virtualInterfaceList)
session.delete(vif)
chunk_dict = json.loads(chunk.chunk_json)
serviceList = json.loads(chunk.serviceList)
for index in range(len(serviceList)):
if serviceList[index] == service_id:
service = session.query(Service).filter(
Service.id == service_id).one()
controllers_services_dict = eval(service.controllers_services)
for controller_id in controllers_phys.keys():
response, code = controllers[controller_id].\
removeExistingSWAMService(
eval(chunk.controllers_chunk)[controller_id],
controllers_services_dict[controller_id])
log_content = "controller:{}:chunk_id:{}:service_id:{}"
log_content += ":service_id_controller:{}:response:{}/{}".\
format(
controller_id, chunk_id,
service_id,
controllers_services_dict[controller_id],
code, response)
logger.info(log_base.format(
SOUTHBOUND, REQRESP, log_content))
chunk_dict["serviceList"].pop(index)
serviceList.pop(serviceList.index(service_id))
chunk.serviceList = json.dumps(serviceList)
chunk.chunk_json = json.dumps(chunk_dict)
vlan = session.query(Vlan).filter(
Vlan.service_id == service_id).one()
session.delete(vlan)
session.delete(service)
session.commit()
return (API_RESPONSE["OK"]["content"],
API_RESPONSE["OK"]["code"])
return errorResponder(
"NOTFOUND", "Item not found")
except NoResultFound:
return errorResponder(
"NOTFOUND", "Item not found")
except IntegrityError:
return errorResponder(
"DB_INTEGRITY", "Database integrity error")
finally:
session.close()
app.wsgi_app = ProxyFix(app.wsgi_app)
if __name__ == '__main__':
"""main function
Default host: 0.0.0.0
Default port: 8080
Default debug: False
"""
try:
app.run(
host='0.0.0.0',
port=8008,
debug=False)
except Exception:
logging.critical(
'server: CRASHED: Got exception on main handler')
raise
| 36.633472
| 79
| 0.564883
| 4,902
| 0.092529
| 0
| 0
| 36,360
| 0.686323
| 0
| 0
| 13,809
| 0.260655
|
3d2a3406b2c7fae09635aa25e074ee185903e975
| 6,179
|
py
|
Python
|
openstates/importers/tests/test_base_importer.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | null | null | null |
openstates/importers/tests/test_base_importer.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | null | null | null |
openstates/importers/tests/test_base_importer.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | null | null | null |
import os
import json
import shutil
import tempfile
import datetime
import pytest
from unittest import mock
from openstates.data.models import (
Bill,
Jurisdiction,
Division,
LegislativeSession,
Organization,
Person,
)
from openstates.scrape import Bill as ScrapeBill
from openstates.importers.base import omnihash, BaseImporter
from openstates.importers import BillImporter
from openstates.exceptions import UnresolvedIdError, DataImportError
def create_jurisdiction():
Division.objects.create(id="ocd-division/country:us", name="USA")
Jurisdiction.objects.create(id="jid", division_id="ocd-division/country:us")
Organization.objects.create(jurisdiction_id="jid", classification="legislature")
LegislativeSession.objects.create(
jurisdiction_id="jid", name="2020", identifier="2020"
)
class FakeImporter(BaseImporter):
_type = "test"
def test_omnihash_python_types():
# string
assert omnihash("test") == omnihash("test")
# list
assert omnihash(["this", "is", "a", "list"]) == omnihash(
["this", "is", "a", "list"]
)
# set
assert omnihash({"and", "a", "set"}) == omnihash({"set", "set", "and", "a"})
# dict w/ set and tuple as well
assert omnihash({"a": {("fancy", "nested"): {"dict"}}}) == omnihash(
{"a": {("fancy", "nested"): {"dict"}}}
)
def test_import_directory():
# write out some temp data to filesystem
datadir = tempfile.mkdtemp()
dicta = {"test": "A"}
dictb = {"test": "B"}
open(os.path.join(datadir, "test_a.json"), "w").write(json.dumps(dicta))
open(os.path.join(datadir, "test_b.json"), "w").write(json.dumps(dictb))
# simply ensure that import directory calls import_data with all dicts
ti = FakeImporter("jurisdiction-id")
with mock.patch.object(ti, attribute="import_data") as mockobj:
ti.import_directory(datadir)
# import_data should be called once
assert mockobj.call_count == 1
# kind of hacky, get the total list of args passed in
arg_objs = list(mockobj.call_args[0][0])
# 2 args only, make sure a and b are in there
assert len(arg_objs) == 2
assert dicta in arg_objs
assert dictb in arg_objs
# clean up datadir
shutil.rmtree(datadir)
def test_apply_transformers():
transformers = {
"capitalize": lambda x: x.upper(),
"cap_and_reverse": [lambda x: x.upper(), lambda y: y[::-1]],
"never_used": lambda x: 1 / 0,
"nested": {"replace": lambda x: "replaced"},
}
data = {
"capitalize": "words",
"cap_and_reverse": "simple",
"nested": {"replace": None},
}
ti = FakeImporter("jid")
ti.cached_transformers = transformers
output = ti.apply_transformers(data)
assert output["capitalize"] == "WORDS"
assert output["cap_and_reverse"] == "ELPMIS"
assert output["nested"]["replace"] == "replaced"
# doing these next few tests just on a Bill because it is the same code that handles it
# but for completeness maybe it is better to do these on each type?
@pytest.mark.django_db
def test_deduplication_identical_object():
create_jurisdiction()
p1 = ScrapeBill("HB 1", "2020", "Title").as_dict()
p2 = ScrapeBill("HB 1", "2020", "Title").as_dict()
BillImporter("jid").import_data([p1, p2])
assert Bill.objects.count() == 1
@pytest.mark.django_db
def test_exception_on_identical_objects_in_import_stream():
create_jurisdiction()
# these two objects aren't identical, but refer to the same thing
# at the moment we consider this an error (but there may be a better way to handle this?)
b1 = ScrapeBill("HB 1", "2020", "Title", chamber="upper").as_dict()
b2 = ScrapeBill("HB 1", "2020", "Title", chamber="lower").as_dict()
with pytest.raises(Exception):
BillImporter("jid").import_data([b1, b2])
@pytest.mark.django_db
def test_resolve_json_id():
create_jurisdiction()
p1 = ScrapeBill("HB 1", "2020", "Title").as_dict()
p2 = ScrapeBill("HB 1", "2020", "Title").as_dict()
bi = BillImporter("jid")
# do import and get database id
p1_id = p1["_id"]
p2_id = p2["_id"]
bi.import_data([p1, p2])
db_id = Bill.objects.get().id
# simplest case
assert bi.resolve_json_id(p1_id) == db_id
# duplicate should resolve to same id
assert bi.resolve_json_id(p2_id) == db_id
# a null id should map to None
assert bi.resolve_json_id(None) is None
# no such id
with pytest.raises(UnresolvedIdError):
bi.resolve_json_id("this-is-invalid")
@pytest.mark.django_db
def test_invalid_fields():
create_jurisdiction()
p1 = ScrapeBill("HB 1", "2020", "Title").as_dict()
p1["newfield"] = "shouldn't happen"
with pytest.raises(DataImportError):
BillImporter("jid").import_data([p1])
@pytest.mark.django_db
def test_invalid_fields_related_item():
create_jurisdiction()
p1 = ScrapeBill("HB 1", "2020", "Title")
p1.add_source("http://example.com")
p1 = p1.as_dict()
p1["sources"][0]["test"] = 3
with pytest.raises(DataImportError):
BillImporter("jid").import_data([p1])
@pytest.mark.django_db
def test_automatic_updated_at():
create_jurisdiction()
difference = Organization.objects.get().updated_at - datetime.datetime.utcnow()
# updated_at should be in UTC, a bit of clock drift notwithstanding
assert abs(difference) < datetime.timedelta(minutes=5)
@pytest.mark.django_db
def test_resolve_person_normal():
create_jurisdiction()
bi = BillImporter("jid")
org = Organization.objects.get(jurisdiction_id="jid", classification="legislature")
p = Person.objects.create(name="John McGuirk")
p.memberships.create(organization=org)
assert bi.resolve_person('~{"name": "John McGuirk"}') == p.id
@pytest.mark.django_db
def test_resolve_person_case_insensitive():
create_jurisdiction()
bi = BillImporter("jid")
org = Organization.objects.get(jurisdiction_id="jid", classification="legislature")
p = Person.objects.create(name="John McGuirk")
p.memberships.create(organization=org)
assert bi.resolve_person('~{"name": "JohN mCgUIrk"}') == p.id
| 31.365482
| 93
| 0.674057
| 52
| 0.008416
| 0
| 0
| 3,086
| 0.499434
| 0
| 0
| 1,735
| 0.28079
|
3d2ab40e18ce8de7c837398746d70bdad833cca8
| 3,777
|
py
|
Python
|
cloudml-template/template/trainer/metadata.py
|
VanessaDo/cloudml-samples
|
ae6cd718e583944beef9d8a90db12091ac399432
|
[
"Apache-2.0"
] | 3
|
2019-03-29T08:06:35.000Z
|
2019-04-12T13:19:18.000Z
|
cloudml-template/template/trainer/metadata.py
|
VanessaDo/cloudml-samples
|
ae6cd718e583944beef9d8a90db12091ac399432
|
[
"Apache-2.0"
] | 23
|
2020-09-25T22:44:06.000Z
|
2022-02-10T02:58:47.000Z
|
cloudml-template/template/trainer/metadata.py
|
VanessaDo/cloudml-samples
|
ae6cd718e583944beef9d8a90db12091ac399432
|
[
"Apache-2.0"
] | 2
|
2019-10-12T19:21:06.000Z
|
2019-10-13T17:38:30.000Z
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ************************************************************************************
# YOU NEED TO MODIFY THE FOLLOWING METADATA TO ADAPT THE TRAINER TEMPLATE TO YOUR DATA
# ************************************************************************************
# Task type can be either 'classification', 'regression', or 'custom'
# This is based on the target feature in the dataset, and whether you use a canned or a custom estimator
TASK_TYPE = '' # classification | regression | custom
# A List of all the columns (header) present in the input data file(s) in order to parse it.
# Note that, not all the columns present here will be input features to your model.
HEADER = []
# List of the default values of all the columns present in the input data.
# This helps decoding the data types of the columns.
HEADER_DEFAULTS = []
# List of the feature names of type int or float.
INPUT_NUMERIC_FEATURE_NAMES = []
# Numeric features constructed, if any, in process_features function in input.py module,
# as part of reading data.
CONSTRUCTED_NUMERIC_FEATURE_NAMES = []
# Dictionary of feature names with int values, but to be treated as categorical features.
# In the dictionary, the key is the feature name, and the value is the num_buckets (count of distinct values).
INPUT_CATEGORICAL_FEATURE_NAMES_WITH_IDENTITY = {}
# Categorical features with identity constructed, if any, in process_features function in input.py module,
# as part of reading data. Usually include constructed boolean flags.
CONSTRUCTED_CATEGORICAL_FEATURE_NAMES_WITH_IDENTITY = {}
# Dictionary of categorical features with few nominal values (to be encoded as one-hot indicators).
# In the dictionary, the key is the feature name, and the value is the list of feature vocabulary.
INPUT_CATEGORICAL_FEATURE_NAMES_WITH_VOCABULARY = {}
# Dictionary of categorical features with many values (sparse features).
# In the dictionary, the key is the feature name, and the value is the bucket size.
INPUT_CATEGORICAL_FEATURE_NAMES_WITH_HASH_BUCKET = {}
# List of all the categorical feature names.
# This is programmatically created based on the previous inputs.
INPUT_CATEGORICAL_FEATURE_NAMES = list(INPUT_CATEGORICAL_FEATURE_NAMES_WITH_IDENTITY.keys()) \
+ list(INPUT_CATEGORICAL_FEATURE_NAMES_WITH_VOCABULARY.keys()) \
+ list(INPUT_CATEGORICAL_FEATURE_NAMES_WITH_HASH_BUCKET.keys())
# List of all the input feature names to be used in the model.
# This is programmatically created based on the previous inputs.
INPUT_FEATURE_NAMES = INPUT_NUMERIC_FEATURE_NAMES + INPUT_CATEGORICAL_FEATURE_NAMES
# Column includes the relative weight of each record.
WEIGHT_COLUMN_NAME = None
# Target feature name (response or class variable).
TARGET_NAME = ''
# List of the class values (labels) in a classification dataset.
TARGET_LABELS = []
# List of the columns expected during serving (which is probably different to the header of the training data).
SERVING_COLUMNS = []
# List of the default values of all the columns of the serving data.
# This helps decoding the data types of the columns.
SERVING_DEFAULTS = []
| 46.62963
| 111
| 0.734975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,901
| 0.76807
|
3d2b2116bab967ee3e89a4236cdda8c96cc22676
| 14,678
|
py
|
Python
|
tests/models/test_models_base.py
|
harmsm/epistasis
|
741b25b3e28015aeeba8d4efc94af1e1d811cd63
|
[
"Unlicense"
] | null | null | null |
tests/models/test_models_base.py
|
harmsm/epistasis
|
741b25b3e28015aeeba8d4efc94af1e1d811cd63
|
[
"Unlicense"
] | null | null | null |
tests/models/test_models_base.py
|
harmsm/epistasis
|
741b25b3e28015aeeba8d4efc94af1e1d811cd63
|
[
"Unlicense"
] | 2
|
2020-04-02T00:58:24.000Z
|
2021-11-16T13:30:30.000Z
|
import pytest
import gpmap
from epistasis import models
import numpy as np
import pandas as pd
import os
def test__genotypes_to_X(test_data):
# Make sure function catches bad genotype passes
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
# Duplicated
g = list(gpm.genotype)
g.extend(g)
# not in gpmap
b = list(gpm.genotype)
b.append("stupid")
bad_genotypes = [g,b]
for bad in bad_genotypes:
with pytest.raises(ValueError):
models.base._genotypes_to_X(bad,gpm,order=1,model_type="local")
# Sample through various model comobos
allowed = {"local":set([0,1]),
"global":set([-1,1])}
for d in test_data:
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
for i in range(1,gpm.length+1,1):
for model_type in ["local","global"]:
X = models.base._genotypes_to_X(gpm.genotype,
gpm,
order=i,
model_type=model_type)
assert X.shape[0] == len(gpm.genotype)
assert set(np.unique(X)).issubset(allowed[model_type])
def test_arghandler_decorator():
class Yo:
def _a(self,data=5,method=None):
return data
def _b(self,data=None,method=None):
return 6
@models.base.arghandler
def test_method(self,a=None,b=None,**kwargs):
return a, b
@models.base.arghandler
def bad_method(self,c=None,d=None,**kwargs):
return c, d
yo = Yo()
assert yo.test_method() == (None,6)
assert yo.test_method(a=5) == (5,6)
assert yo.test_method(a=10) == (10,6)
assert yo.test_method(b=10) == (None,6)
with pytest.raises(AttributeError):
yo.bad_method()
### Tests for AbstractModel:
# AbstractModel cannot be instantiated on its own, as it is designed to be a
# mixin with sklearn classes. Many methods have to be defined in subclass
# (.fit, .predict, etc.) These will not be tested here, but instead in the
# subclass tests. For methods defined here that are never redefined in subclass
# (._X, .add_gpm, etc.) we test using the simplest mixed/subclass
# (EpistasisLinearRegression).
def test_abstractmodel_predict_to_df(test_data):
"""
Test basic functionality. Real test of values will be done on .predict
for subclasses.
"""
m = models.linear.EpistasisLinearRegression()
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
m.add_gpm(gpm)
# This should fail -- no fit run
with pytest.raises(Exception):
df = m.predict_to_df()
m.fit()
# This should work
df = m.predict_to_df()
assert type(df) is type(pd.DataFrame())
assert len(df) == len(d["genotype"])
# Create and fit a new model.
m = models.linear.EpistasisLinearRegression()
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
# No gpm added -- should fail
with pytest.raises(RuntimeError):
m.predict_to_df()
m.add_gpm(gpm)
m.fit()
df = m.predict_to_df(genotypes=d["genotype"][0])
assert len(df) == 1
bad_stuff = [1,{},[1,2],"STUPID",["STUPID","IS","REAL"]]
for b in bad_stuff:
with pytest.raises(ValueError):
print(f"Trying bad genotypes {b}")
m.predict_to_df(genotypes=b)
df = m.predict_to_df(genotypes=d["genotype"][:3])
assert len(df) == 3
def test_abstractmodel_predict_to_csv(test_data,tmp_path):
m = models.linear.EpistasisLinearRegression()
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
m.add_gpm(gpm)
m.fit()
csv_file = os.path.join(tmp_path,"tmp.csv")
m.predict_to_csv(filename=csv_file)
assert os.path.exists(csv_file)
df = pd.read_csv(csv_file)
assert len(df) == len(d["genotype"])
# Make sure genotypes pass works
m.predict_to_csv(filename=csv_file,genotypes=d["genotype"][0])
assert os.path.exists(csv_file)
df = pd.read_csv(csv_file)
assert len(df) == 1
def test_abstractmodel_predict_to_excel(test_data,tmp_path):
m = models.linear.EpistasisLinearRegression()
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
m.add_gpm(gpm)
m.fit()
excel_file = os.path.join(tmp_path,"tmp.xlsx")
m.predict_to_excel(filename=excel_file)
assert os.path.exists(excel_file)
df = pd.read_excel(excel_file)
assert len(df) == len(d["genotype"])
# Make sure genotypes pass works
m.predict_to_excel(filename=excel_file,genotypes=d["genotype"][0])
assert os.path.exists(excel_file)
df = pd.read_excel(excel_file)
assert len(df) == 1
def test_abstractmodel_add_gpm(test_data):
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
m = models.linear.EpistasisLinearRegression()
bad_gpm = [1,None,"test",[],{}]
for b in bad_gpm:
with pytest.raises(TypeError):
m.add_gpm(b)
m.add_gpm(gpm)
# Test genotype_column arg
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
m = models.linear.EpistasisLinearRegression()
bad_genotype_column = [1,None,[],{},(1,)]
for b in bad_genotype_column:
with pytest.raises(TypeError):
print(f"trying {b}")
m.add_gpm(gpm,genotype_column=b)
with pytest.raises(KeyError):
m.add_gpm(gpm,genotype_column="not_a_column")
m.add_gpm(gpm,genotype_column="genotype")
assert m.genotype_column == "genotype"
# Test phenotype_column arg
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"])
m = models.linear.EpistasisLinearRegression()
# Shouldn't work b/c no float column
with pytest.raises(ValueError):
m.add_gpm(gpm)
# Shouldn't work because there is no column with that name
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
with pytest.raises(KeyError):
m.add_gpm(gpm,phenotype_column="not_real")
# Shouldn't work because column is not numeric
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["genotype"])
with pytest.raises(ValueError):
m.add_gpm(gpm,phenotype_column="phenotype")
# Make sure it gets right column (first float that is not reserved)
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
coolness=d["phenotype"],
something_else=d["phenotype"])
m.add_gpm(gpm)
assert m.phenotype_column == "coolness"
# Test uncertainty_column arg.
# Do default = None
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
m.add_gpm(gpm)
assert m.uncertainty_column == "epi_zero_uncertainty"
unc = np.array(m.gpm.data.loc[:,"epi_zero_uncertainty"])
assert len(np.unique(unc)) == 1
assert np.isclose(unc[0],np.min(gpm.data.loc[:,m.phenotype_column])*1e-6)
# pass missing column
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"],
coolness=d["phenotype"],
not_float=d["genotype"])
# Send in same as phenotype
with pytest.raises(ValueError):
m.add_gpm(gpm,uncertainty_column="phenotype")
# send in not there
with pytest.raises(KeyError):
m.add_gpm(gpm,uncertainty_column="not_there")
# send in not float
with pytest.raises(ValueError):
m.add_gpm(gpm,uncertainty_column="not_float")
# Shoud work
m.add_gpm(gpm,uncertainty_column="coolness")
assert m.uncertainty_column == "coolness"
# Check final output
assert m.gpm is gpm
assert m.Xcolumns is not None
assert m.epistasis is not None
assert m._previous_X is None
def test_gpm_getter(test_data):
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
m = models.linear.EpistasisLinearRegression()
assert m.gpm is None
m.add_gpm(gpm)
assert m.gpm is gpm
def test_results_getter(test_data):
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"])
m = models.linear.EpistasisLinearRegression()
m.add_gpm(gpm)
assert m.results is None
m.fit()
assert isinstance(m.results,pd.DataFrame)
def test_column_getters(test_data):
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"],
uncertainty=d["phenotype"])
m = models.linear.EpistasisLinearRegression()
assert m.genotype_column is None
assert m.phenotype_column is None
assert m.uncertainty_column is None
m.add_gpm(gpm,uncertainty_column="uncertainty")
assert m.genotype_column == "genotype"
assert m.phenotype_column == "phenotype"
assert m.uncertainty_column == "uncertainty"
def test__X_arghandler(test_data):
m = models.linear.EpistasisLinearRegression()
with pytest.raises(ValueError):
m._X()
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
phenotype=d["phenotype"],
uncertainty=d["phenotype"])
m.add_gpm(gpm)
# Make sure calling _X() naked-ly populates previous_X
assert m._previous_X is None
X = m._X()
assert m._previous_X is X
# If we access after having run, make sure X is the same object
assert X is m._X()
# Should wipe out previous_X and force recalculation.
m.add_gpm(gpm)
assert X is not m._X()
# Get x for single genotype. should work. should not update _previous_X
X = m._X(d["genotype"][0])
assert len(X) == 1
assert X is not m._previous_X
# Get x for two genotypes. should work and not update _previous_X
X = m._X(d["genotype"][0:2])
assert len(X) == 2
assert X is not m._previous_X
# Get x for two genotypes. should work and not update _previous_X
X = m._X(np.array(d["genotype"][0:2]))
assert len(X) == 2
assert X is not m._previous_X
# Just keep the array, do not update previous_X
hack = np.ones((1,1))
X = m._X(data=hack)
assert X is hack
assert X is not m._previous_X
# pass in bad genotypes
with pytest.raises(ValueError):
X = m._X("NOT_A_GENOTYPE")
with pytest.raises(ValueError):
X = m._X([d["genotype"][0],"NOT_A_GENOTYPE"])
# pass in general badness
bad_passes = [np.ones((1,1,1)),[],"stupid",1,1.1,()]
for b in bad_passes:
with pytest.raises(ValueError):
m._X(b)
def test__y_arghandler(test_data):
m = models.linear.EpistasisLinearRegression()
with pytest.raises(ValueError):
m._y()
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
coolness=d["phenotype"],
uncertainty=d["phenotype"])
m.add_gpm(gpm,phenotype_column="coolness")
assert np.array_equal(m._y(),d["phenotype"])
# pass in general badness
bad_passes = [np.ones((1,1,1)),[],"stupid",1,1.1,()]
for b in bad_passes:
with pytest.raises(TypeError):
print(f"trying {b}")
m._y(b)
y = m._y([1.0])
assert np.array_equal(y,[1.0])
def test__yerr_arghandler(test_data):
m = models.linear.EpistasisLinearRegression()
with pytest.raises(ValueError):
m._yerr()
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
coolness=d["phenotype"],
uncertainty=d["phenotype"])
m.add_gpm(gpm,phenotype_column="coolness",uncertainty_column="uncertainty")
assert np.array_equal(m._yerr(),d["phenotype"])
# pass in general badness
bad_passes = [np.ones((1,1,1)),[],"stupid",1,1.1,()]
for b in bad_passes:
with pytest.raises(TypeError):
print(f"trying {b}")
m._yerr(b)
y = m._yerr([1.0])
assert np.array_equal(y,[1.0])
def test__thetas_arghandler(test_data):
m = models.linear.EpistasisLinearRegression()
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
coolness=d["phenotype"],
uncertainty=d["phenotype"])
m.add_gpm(gpm,phenotype_column="coolness",uncertainty_column="uncertainty")
# No thetas calcualted yet
with pytest.raises(RuntimeError):
m._thetas()
m.fit()
# Get thetas, calcualted
t = m._thetas()
assert len(t) == 4
# pass in general badness
bad_passes = [np.ones((1,1,1)),[],"stupid",1,1.1,()]
for b in bad_passes:
with pytest.raises(TypeError):
print(f"trying {b}")
m._thetas(b)
y = m._thetas([1.0])
assert np.array_equal(y,[1.0])
def test__lnprior(test_data):
m = models.linear.EpistasisLinearRegression()
with pytest.raises(ValueError):
m._lnprior()
d = test_data[0]
gpm = gpmap.GenotypePhenotypeMap(genotype=d["genotype"],
coolness=d["phenotype"],
uncertainty=d["phenotype"])
m.add_gpm(gpm,phenotype_column="coolness",uncertainty_column="uncertainty")
x = m._lnprior()
assert np.array_equal(x,np.zeros(len(d["genotype"])))
# pass in general badness
bad_passes = [np.ones((1,1,1)),[],"stupid",1,1.1,()]
for b in bad_passes:
with pytest.raises(TypeError):
print(f"trying {b}")
m._lnprior(b)
y = m._lnprior([1.0])
assert np.array_equal(y,[1.0])
| 30.579167
| 79
| 0.60417
| 364
| 0.024799
| 0
| 0
| 203
| 0.01383
| 0
| 0
| 3,096
| 0.210928
|
3d2cc12e10450aab89581a6101a64a041375bd58
| 871
|
py
|
Python
|
examples/write_spyview_meta.py
|
sourav-majumder/qtlab
|
96b2a127b1df7b45622c90229bd5ef8a4083614e
|
[
"MIT"
] | null | null | null |
examples/write_spyview_meta.py
|
sourav-majumder/qtlab
|
96b2a127b1df7b45622c90229bd5ef8a4083614e
|
[
"MIT"
] | null | null | null |
examples/write_spyview_meta.py
|
sourav-majumder/qtlab
|
96b2a127b1df7b45622c90229bd5ef8a4083614e
|
[
"MIT"
] | null | null | null |
# File name: spyview.py
#
# This example should be run with "execfile('spyview.py')"
from numpy import pi, linspace, sinc, sqrt
from lib.file_support.spyview import SpyView
x_vec = linspace(-2 * pi, 2 * pi, 100)
y_vec = linspace(-2 * pi, 2 * pi, 100)
qt.mstart()
data = qt.Data(name='testmeasurement')
# to make the spyview meta.txt file dimension info is required:
data.add_coordinate('X',
size=len(x_vec),
start=x_vec[0],
end=x_vec[-1])
data.add_coordinate('Y',
size=len(y_vec),
start=y_vec[0],
end=y_vec[-1])
data.add_value('Z')
data.create_file()
for y in y_vec:
for x in x_vec:
result = sinc(sqrt(x**2 + y**2))
data.add_data_point(x, y, result)
qt.msleep(0.001)
data.new_block()
data.close_file()
qt.mend()
# create the spyview meta.txt file:
SpyView(data).write_meta_file()
| 20.738095
| 63
| 0.640643
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 206
| 0.23651
|