blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5ddecfc8cb11d9c0029f0d99237baea75bb36a83 | 4f878cc67a5d447c0a7f76539db82719998d885e | /dash/tut/callback/7.py | 5ebbc8b38b8bc5c986155ca9b69e6de2b8bc7e19 | [] | no_license | masknugget/webapp_lowcode | 91110340c5cfd4d4e11dbea77826e42a3998a84c | fffe73ad87cf02e703529d20f034fb13c2add5ff | refs/heads/main | 2023-06-24T14:46:28.841698 | 2021-07-25T15:32:18 | 2021-07-25T15:32:18 | 389,378,583 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,056 | py | # -*- coding: utf-8 -*-
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output, State
import dash_design_kit as ddk
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
app.layout = html.Div([
dcc.Input(id='input-1-state', type='text', value='Montréal'),
dcc.Input(id='input-2-state', type='text', value='Canada'),
html.Button(id='submit-button-state', n_clicks=0, children='Submit'),
html.Div(id='output-state')
])
@app.callback(Output('output-state', 'children'),
Input('submit-button-state', 'n_clicks'),
State('input-1-state', 'value'),
State('input-2-state', 'value'))
def update_output(n_clicks, input1, input2):
return u'''
The Button has been pressed {} times,
Input 1 is "{}",
and Input 2 is "{}"
'''.format(n_clicks, input1, input2)
if __name__ == '__main__':
app.run_server(debug=True) | [
"946883098@qq.com"
] | 946883098@qq.com |
20afee652131381186c1524e777baef2bc3b5d6f | 058f6cf55de8b72a7cdd6e592d40243a91431bde | /tests/llvm/dynamic/test_warning_dont_abort/test.py | 09517e76e3243740f057aabb1099d60f3b7704e8 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | LLNL/FPChecker | 85e8ebf1d321b3208acee7ddfda2d8878a238535 | e665ef0f050316f6bc4dfc64c1f17355403e771b | refs/heads/master | 2023-08-30T23:24:43.749418 | 2022-04-14T19:57:44 | 2022-04-14T19:57:44 | 177,033,795 | 24 | 6 | Apache-2.0 | 2022-09-19T00:09:50 | 2019-03-21T22:34:14 | Python | UTF-8 | Python | false | false | 2,790 | py | #!/usr/bin/env python
import subprocess
# returns: tuple (error, op, file, line)
#
#+-------------------------- FPChecker Warning Report --------------------------+
# Error : Underflow
# Operation : ADD
# File : dot_product.cu
# Line : 9
#+------------------------------------------------------------------------------+
#
def getFPCReport(lines):
ret = ("", "", "", "")
for i in range(len(lines)):
l = lines[i]
if "FPChecker" in l and "Report" in l and "+" in l:
err = lines[i+1].split()[2]
op = lines[i+2].split()[2]
f = lines[i+3].split()[2]
line = lines[i+4].split()[2]
ret = (err, op, f, line)
break
return ret
def compileAndRun(op_level):
# --- compile code ---
cmd = ["make -f Makefile." + op_level]
try:
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
print e.output
exit()
# --- run code ---
cmd = ["./main"]
try:
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
print e.output
exit()
ret = cmdOutput.split("\n")
#print ret
return ret
# Check we get exactly 64 errors in the report (8 elems x 8 threads)
def checkForErrorReports(out):
ret = False
firstLine = False
secondLine = False
for l in out:
if "#FPCHECKER: Errors at dot_product.cu:8" in l and "#64" in l:
firstLine = True
if "#FPCHECKER: Errors at dot_product.cu:18" in l and "#64" in l:
secondLine = True
return (firstLine and secondLine)
def main():
op0_res = compileAndRun("0")
rep0 = getFPCReport(op0_res)
op1_res = compileAndRun("1")
rep1 = getFPCReport(op1_res)
op2_res = compileAndRun("2")
rep2 = getFPCReport(op2_res)
op3_res = compileAndRun("3")
rep3 = getFPCReport(op3_res)
no_aborts_are_seen = False
if rep0 == ("", "", "", "") and rep1 == ("", "", "", "") and rep2 == ("", "", "", "") and rep3 == ("", "", "", ""):
no_aborts_are_seen = True
error_report_is_correct = False
if checkForErrorReports(op0_res) == True and checkForErrorReports(op1_res) and checkForErrorReports(op2_res) and checkForErrorReports(op3_res):
error_report_is_correct = True
if no_aborts_are_seen==True and error_report_is_correct==True:
print "PASSED"
else:
print "failed"
main()
| [
"ilaguna@llnl.gov"
] | ilaguna@llnl.gov |
5eb0f81eef5b851f797d1d1d90714e03fb12a7bb | 5b08be996658aaae39306876d45c3a32ad8a0b43 | /roles/mbs/frontend/files/mbs.wsgi | e3c7bd5223607ce2abadb37107f3c5caaa926cc5 | [] | no_license | henrysher/fedora-infra-ansible | 84be23908bbbfee93da67b3bdaeb34f4efc193cb | 30418756785eb4079a7adb4a6d4f11ec59dd8e08 | refs/heads/master | 2021-01-23T17:07:45.846157 | 2019-12-13T14:38:07 | 2019-12-13T14:39:59 | 15,855,480 | 22 | 10 | null | 2017-07-26T10:47:23 | 2014-01-13T02:05:44 | JavaScript | UTF-8 | Python | false | false | 127 | wsgi | #-*- coding: utf-8 -*-
import logging
logging.basicConfig(level='DEBUG')
from module_build_service import app as application
| [
"rbean@redhat.com"
] | rbean@redhat.com |
8efcb3ae7a5a57096b4757b7a4f0fb639e5c59c8 | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /lib/python2.7/site-packages/odps/df/expr/errors.py | 7aa847a990aed1665fdf166912a16ec6059475df | [
"Python-2.0",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] | permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 689 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 1999-2017 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ExpressionError(Exception):
pass | [
"noreply@github.com"
] | wangyum.noreply@github.com |
2c557f523e47a0593ccd02f8d8c509ceaf504d3a | 71460476c5f5ebdca719def124f1a0650861fdab | /mint_work/custom/client_plan_upgrade/models/__init__.py | 0280776e04b90ee44aff3b2999f96ed1c2b472d6 | [] | no_license | merdhah/dubai_work | fc3a70dc0b1db6df19c825a3bf1eef2a373d79c0 | e24eb12b276a4cd5b47a4bd5470d915179872a4f | refs/heads/master | 2022-01-07T11:22:07.628435 | 2018-10-17T13:37:24 | 2018-10-17T13:37:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | # -*- coding: utf-8 -*-
# Copyright 2015 LasLabs Inc.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
from . import res_user
from . import ir_module_module
# from . import models_new
| [
"asghar0517@gmail.com"
] | asghar0517@gmail.com |
87b18f2181d9c52a66be0587a9b7b20999510dae | df191de3e8c14e10c2a78318c987371a59f1465c | /sturgisbank/settings.py | 3ddf4b654f66cb06f5a09d5e445759ea81d4e601 | [] | no_license | daniel-kanchev/sturgisbank | 500c1cb6351b839bd5d30f6d914f1a1c2fada783 | eb0e1122374c675d00937fe9b4a3f2931b665497 | refs/heads/main | 2023-04-05T04:38:37.418610 | 2021-04-02T07:43:47 | 2021-04-02T07:43:47 | 353,945,270 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | BOT_NAME = 'sturgisbank'
SPIDER_MODULES = ['sturgisbank.spiders']
NEWSPIDER_MODULE = 'sturgisbank.spiders'
USER_AGENT = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
ITEM_PIPELINES = {
'sturgisbank.pipelines.DatabasePipeline': 300,
}
FEED_EXPORT_ENCODING = 'utf-8'
ROBOTSTXT_OBEY = True
LOG_LEVEL = 'WARNING'
# LOG_LEVEL = 'DEBUG'
| [
"daniel.kanchev@adata.pro"
] | daniel.kanchev@adata.pro |
35eb78d6f9cd10f98b78a299d6e86811bd229b75 | b0cdec1a01255ca65da18433e9d2620c7fad181c | /manabot | d1182b224cb929bf6746b9839887d17a75dfaa00 | [
"MIT"
] | permissive | DronMDF/manabot | c7f05b90c1f0c125a3f30961c39dea642c64f8f1 | b412e8cb9b5247f05487bed4cbf4967f7b58327f | refs/heads/master | 2021-09-05T03:03:50.885632 | 2018-01-23T20:48:39 | 2018-01-23T20:48:39 | 112,320,984 | 1 | 0 | MIT | 2018-01-23T20:48:40 | 2017-11-28T10:27:35 | Python | UTF-8 | Python | false | false | 342 | #!/usr/bin/env python3
import sys
from tb import Application, ConfigFromArgs, ConfigFromFile, ConfigFile, ConfigDefault
app = Application(
ConfigFromArgs(
sys.argv,
ConfigFromFile(
ConfigFile(
ConfigFromArgs(
sys.argv,
ConfigDefault({'config': '/etc/manabot.conf'})
)
),
ConfigDefault({})
)
)
)
app.run()
| [
"dron.valyaev@gmail.com"
] | dron.valyaev@gmail.com | |
63cea6c8d102f66e4c4a8db82a3cb4d7ea41b7e5 | b5e4c4e3abb7f87bfd70ecd912810e2562cecdc5 | /section6/venv/Lib/site-packages/aniso8601/resolution.py | e118112f415bc40aef09eecf2ffc5985b075cfd2 | [] | no_license | chandshilpa/flaskapi | a89822707dc02f9c588af04f1f33f82a55b627b3 | 5f229d59d155e68e026566919d292c831ea00ed4 | refs/heads/master | 2022-12-09T10:59:14.563256 | 2019-01-08T17:33:46 | 2019-01-08T17:33:46 | 164,698,842 | 0 | 1 | null | 2022-12-07T16:24:53 | 2019-01-08T17:21:32 | Python | UTF-8 | Python | false | false | 424 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Brandon Nielsen
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from aniso8601 import compat
class DateResolution(object):
Year, Month, Week, Weekday, Day, Ordinal = list(compat.range(6))
class TimeResolution(object):
Seconds, Minutes, Hours = list(compat.range(3))
| [
"chandsandeep700@gmail.com"
] | chandsandeep700@gmail.com |
3d6124c3ce877101be4b17672474a2c7edb48eb4 | 8dcd3ee098b4f5b80879c37a62292f42f6b2ae17 | /venv/Lib/site-packages/pandas/tests/series/test_period.py | bf3096fd0a0d6d69bae147ecab6fa870afcc4eb3 | [] | no_license | GregVargas1999/InfinityAreaInfo | 53fdfefc11c4af8f5d2b8f511f7461d11a3f7533 | 2e4a7c6a2424514ca0ec58c9153eb08dc8e09a4a | refs/heads/master | 2022-12-01T20:26:05.388878 | 2020-08-11T18:37:05 | 2020-08-11T18:37:05 | 286,821,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,977 | py | import numpy as np
import pandas as pd
import pandas._testing as tm
import pytest
from pandas import DataFrame, Period, Series, period_range
from pandas.core.arrays import PeriodArray
class TestSeriesPeriod:
def setup_method(self, method):
self.series = Series(period_range("2000-01-01", periods=10, freq="D"))
def test_auto_conversion(self):
series = Series(list(period_range("2000-01-01", periods=10, freq="D")))
assert series.dtype == "Period[D]"
series = pd.Series(
[pd.Period("2011-01-01", freq="D"), pd.Period("2011-02-01", freq="D")]
)
assert series.dtype == "Period[D]"
def test_getitem(self):
assert self.series[1] == pd.Period("2000-01-02", freq="D")
result = self.series[[2, 4]]
exp = pd.Series(
[pd.Period("2000-01-03", freq="D"), pd.Period("2000-01-05", freq="D")],
index=[2, 4],
dtype="Period[D]",
)
tm.assert_series_equal(result, exp)
assert result.dtype == "Period[D]"
def test_isna(self):
# GH 13737
s = Series([pd.Period("2011-01", freq="M"), pd.Period("NaT", freq="M")])
tm.assert_series_equal(s.isna(), Series([False, True]))
tm.assert_series_equal(s.notna(), Series([True, False]))
def test_fillna(self):
# GH 13737
s = Series([pd.Period("2011-01", freq="M"), pd.Period("NaT", freq="M")])
res = s.fillna(pd.Period("2012-01", freq="M"))
exp = Series([pd.Period("2011-01", freq="M"), pd.Period("2012-01", freq="M")])
tm.assert_series_equal(res, exp)
assert res.dtype == "Period[M]"
def test_dropna(self):
# GH 13737
s = Series([pd.Period("2011-01", freq="M"), pd.Period("NaT", freq="M")])
tm.assert_series_equal(s.dropna(), Series([pd.Period("2011-01", freq="M")]))
def test_between(self):
left, right = self.series[[2, 7]]
result = self.series.between(left, right)
expected = (self.series >= left) & (self.series <= right)
tm.assert_series_equal(result, expected)
# ---------------------------------------------------------------------
# NaT support
@pytest.mark.xfail(reason="PeriodDtype Series not supported yet")
def test_NaT_scalar(self):
series = Series([0, 1000, 2000, pd._libs.iNaT], dtype="period[D]")
val = series[3]
assert pd.isna(val)
series[2] = val
assert pd.isna(series[2])
def test_NaT_cast(self):
result = Series([np.nan]).astype("period[D]")
expected = Series([pd.NaT], dtype="period[D]")
tm.assert_series_equal(result, expected)
def test_set_none(self):
self.series[3] = None
assert self.series[3] is pd.NaT
self.series[3:5] = None
assert self.series[4] is pd.NaT
def test_set_nan(self):
# Do we want to allow this?
self.series[5] = np.nan
assert self.series[5] is pd.NaT
self.series[5:7] = np.nan
assert self.series[6] is pd.NaT
def test_intercept_astype_object(self):
expected = self.series.astype("object")
df = DataFrame({"a": self.series, "b": np.random.randn(len(self.series))})
result = df.values.squeeze()
assert (result[:, 0] == expected.values).all()
df = DataFrame({"a": self.series, "b": ["foo"] * len(self.series)})
result = df.values.squeeze()
assert (result[:, 0] == expected.values).all()
def test_align_series(self, join_type):
rng = period_range("1/1/2000", "1/1/2010", freq="A")
ts = Series(np.random.randn(len(rng)), index=rng)
ts.align(ts[::2], join=join_type)
def test_truncate(self):
# GH 17717
idx1 = pd.PeriodIndex(
[pd.Period("2017-09-02"), pd.Period("2017-09-02"), pd.Period("2017-09-03")]
)
series1 = pd.Series([1, 2, 3], index=idx1)
result1 = series1.truncate(after="2017-09-02")
expected_idx1 = pd.PeriodIndex(
[pd.Period("2017-09-02"), pd.Period("2017-09-02")]
)
tm.assert_series_equal(result1, pd.Series([1, 2], index=expected_idx1))
idx2 = pd.PeriodIndex(
[pd.Period("2017-09-03"), pd.Period("2017-09-02"), pd.Period("2017-09-03")]
)
series2 = pd.Series([1, 2, 3], index=idx2)
result2 = series2.sort_index().truncate(after="2017-09-02")
expected_idx2 = pd.PeriodIndex([pd.Period("2017-09-02")])
tm.assert_series_equal(result2, pd.Series([2], index=expected_idx2))
@pytest.mark.parametrize(
"input_vals",
[
[Period("2016-01", freq="M"), Period("2016-02", freq="M")],
[Period("2016-01-01", freq="D"), Period("2016-01-02", freq="D")],
[
Period("2016-01-01 00:00:00", freq="H"),
Period("2016-01-01 01:00:00", freq="H"),
],
[
Period("2016-01-01 00:00:00", freq="M"),
Period("2016-01-01 00:01:00", freq="M"),
],
[
Period("2016-01-01 00:00:00", freq="S"),
Period("2016-01-01 00:00:01", freq="S"),
],
],
)
def test_end_time_timevalues(self, input_vals):
# GH 17157
# Check that the time part of the Period is adjusted by end_time
# when using the dt accessor on a Series
input_vals = PeriodArray._from_sequence(np.asarray(input_vals))
s = Series(input_vals)
result = s.dt.end_time
expected = s.apply(lambda x: x.end_time)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("input_vals", [("2001"), ("NaT")])
def test_to_period(self, input_vals):
# GH 21205
expected = Series([input_vals], dtype="Period[D]")
result = Series([input_vals], dtype="datetime64[ns]").dt.to_period("D")
tm.assert_series_equal(result, expected)
| [
"44142880+GregVargas1999@users.noreply.github.com"
] | 44142880+GregVargas1999@users.noreply.github.com |
9058c8094f9714649dceace28ed2e34cb079ee65 | 86857aa31757eb76afbbb6e1f803ebfb09375dd9 | /leetcode/leetcode208.py | 3af676c22ac75436e566917418a9800d87fc0c44 | [] | no_license | jingxiufenghua/algorithm_homework | 075efb3122e20411141d64c8e25d97411a2c7a1c | 5cd8a6c99c463ce01f512379bcb265b7f0b99885 | refs/heads/master | 2023-05-14T15:59:20.272453 | 2021-06-04T02:43:53 | 2021-06-04T02:43:53 | 337,891,553 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | import collections
class Node(object):
def __init__(self):
self.children = collections.defaultdict(Node)
self.isword = False
class Trie(object):
def __init__(self):
self.root = Node()
def insert(self, word):
current = self.root
for w in word:
current = current.children[w]
current.isword = True
def search(self, word):
current = self.root
for w in word:
current = current.children.get(w)
if current == None:
return False
return current.isword
def startsWith(self, prefix):
current = self.root
for w in prefix:
current = current.children.get(w)
if current == None:
return False
return True
| [
"2450392436@qq.com"
] | 2450392436@qq.com |
48c1a46f49c2ca8c340460b91ea52a5cd294d71c | 839fb68043bd3a827f6ed6d123844922419284e9 | /Chapter09/named_font_demo.py | 487217ec0f1a2a61dfcf1c7648af3d62d5a52c56 | [
"MIT"
] | permissive | LihengGong/Python-GUI-Programming-with-Tkinter-2E | 15220f2487686a04c82451fd212b6fc6095a888a | 9e9c7468982992d87358be09c11c2cfaaaecd615 | refs/heads/main | 2023-08-25T12:18:10.976232 | 2021-10-20T19:49:01 | 2021-10-20T19:49:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | import tkinter as tk
from tkinter import font
root = tk.Tk()
for name in font.names():
font_obj = font.nametofont(name)
tk.Label(root, text=name, font=font_obj).pack()
namedfont = tk.StringVar()
family = tk.StringVar()
size = tk.IntVar()
tk.OptionMenu(root, namedfont, *font.names()).pack()
tk.OptionMenu(root, family, *font.families()).pack()
tk.Spinbox(root, textvariable=size, from_=6, to=128).pack()
def setFont():
font_obj = font.nametofont(namedfont.get())
font_obj.configure(family=family.get(), size=size.get())
tk.Button(root, text='Change', command=setFont).pack()
root.mainloop()
| [
"me@alandmoore.com"
] | me@alandmoore.com |
a0d98bf52dab4cccef405a7ad9db5e5e13ff8a44 | 1a54763c0774679bffa193db3f41781ca68b0e96 | /concurrence/day04/thread_server.py | 977cf317fd26cc11412820949f9c7163ac61e832 | [] | no_license | RRCHcc/python_net | 137e6e50b5cd1c71a9decdd1ba18509177ba2f4e | 795d4e56e49101c3c0a81230a1d928454ddd2544 | refs/heads/master | 2020-05-30T10:54:47.744245 | 2019-06-01T03:17:35 | 2019-06-01T03:17:35 | 189,685,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,169 | py | """
基于threading的多线程网络并发
重点代码
1. 创建监听套接字
2. 循环接收客户端连接请求
3. 当有新的客户端连接创建线程处理客户端请求
4. 主线程继续等待其他客户端连接
5. 当客户端退出,则对应分支线程退出
"""
from socket import *
from threading import Thread
import os, sys
def handle(c):
print("客户端:", c.getpeername())
while True:
data = c.recv(1024)
if not data:
break
print(data.decode())
c.send(b"OK")
c.close()
# 创建监听套接字
HOST = "0.0.0.0"
PORT = 44447
ADDR = (HOST, PORT)
s = socket()
s.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
s.bind(ADDR)
s.listen(3)
print("Listen the port 44447...")
# 循环等待客户端连接
while True:
try:
c, addr = s.accept()
except KeyboardInterrupt:
sys.exit("退出服务器") # 退出进程
except Exception as e:
print(e)
continue
# 创建新的线程处理客户端请求
t = Thread(target=handle, args=(c,))
t.setDaemon(True) # 分支线程随主线程退出
t.start()
| [
"2570629639@qq.com"
] | 2570629639@qq.com |
b64b5c88b20511547667b6a44d8a76558febd6fc | 2153a7ecfa69772797e379ff5642d52072a69b7c | /library/test/test_compiler/testcorpus/60_try_except2.py | 4e4922306ec4d45af73a96485697855a890c93e1 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause",
"Python-2.0"
] | permissive | KCreate/skybison | a3789c84541f39dc6f72d4d3eb9783b9ed362934 | d1740e08d8de85a0a56b650675717da67de171a0 | refs/heads/trunk | 2023-07-26T04:50:55.898224 | 2021-08-31T08:20:46 | 2021-09-02T19:25:08 | 402,908,053 | 1 | 0 | NOASSERTION | 2021-09-03T22:05:57 | 2021-09-03T22:05:57 | null | UTF-8 | Python | false | false | 125 | py | # Copyright (c) Facebook, Inc. and its affiliates. (http://www.facebook.com)
try:
a
except Exc:
b
except Exc2:
c
| [
"emacs@fb.com"
] | emacs@fb.com |
eb7d099aa8ea32a713245571c139a2c0b88358e4 | d05c946e345baa67e7894ee33ca21e24b8d26028 | /machine-learning/blur-faces/blur_faces.py | dfe4f59dbc9e72ed7e87a6080f10cc661b823695 | [
"MIT"
] | permissive | x4nth055/pythoncode-tutorials | 327255550812f84149841d56f2d13eaa84efd42e | d6ba5d672f7060ba88384db5910efab1768c7230 | refs/heads/master | 2023-09-01T02:36:58.442748 | 2023-08-19T14:04:34 | 2023-08-19T14:04:34 | 199,449,624 | 1,858 | 2,055 | MIT | 2023-08-25T20:41:56 | 2019-07-29T12:35:40 | Jupyter Notebook | UTF-8 | Python | false | false | 1,882 | py | import cv2
import numpy as np
import sys
# https://raw.githubusercontent.com/opencv/opencv/master/samples/dnn/face_detector/deploy.prototxt
prototxt_path = "weights/deploy.prototxt.txt"
# https://raw.githubusercontent.com/opencv/opencv_3rdparty/dnn_samples_face_detector_20180205_fp16/res10_300x300_ssd_iter_140000_fp16.caffemodel
model_path = "weights/res10_300x300_ssd_iter_140000_fp16.caffemodel"
# load Caffe model
model = cv2.dnn.readNetFromCaffe(prototxt_path, model_path)
# get the image file name from the command line
image_file = sys.argv[1]
# read the desired image
image = cv2.imread(image_file)
# get width and height of the image
h, w = image.shape[:2]
# gaussian blur kernel size depends on width and height of original image
kernel_width = (w // 7) | 1
kernel_height = (h // 7) | 1
# preprocess the image: resize and performs mean subtraction
blob = cv2.dnn.blobFromImage(image, 1.0, (300, 300), (104.0, 177.0, 123.0))
# set the image into the input of the neural network
model.setInput(blob)
# perform inference and get the result
output = np.squeeze(model.forward())
for i in range(0, output.shape[0]):
confidence = output[i, 2]
# get the confidence
# if confidence is above 40%, then blur the bounding box (face)
if confidence > 0.4:
# get the surrounding box cordinates and upscale them to original image
box = output[i, 3:7] * np.array([w, h, w, h])
# convert to integers
start_x, start_y, end_x, end_y = box.astype(np.int)
# get the face image
face = image[start_y: end_y, start_x: end_x]
# apply gaussian blur to this face
face = cv2.GaussianBlur(face, (kernel_width, kernel_height), 0)
# put the blurred face into the original image
image[start_y: end_y, start_x: end_x] = face
cv2.imshow("image", image)
cv2.waitKey(0)
cv2.imwrite("image_blurred.jpg", image) | [
"fullclip@protonmail.com"
] | fullclip@protonmail.com |
fd7413e2f751d25f991c8131197624b90234bd14 | 5fbdbbd4d1f5b0f7c729f355d3ab930d7b55a726 | /dataDriver_text2.py | b0e45f24e59e20f51e59722b2bc0a20a59bf8bcc | [] | no_license | Shuimoningxiang/untitled | 28fb6b4b87116899ba907cca830e0e2119671546 | b3a7ca3de754a0173ed52e47012c279a91a64763 | refs/heads/master | 2021-09-04T18:52:19.162715 | 2018-01-21T09:48:53 | 2018-01-21T09:48:53 | 118,321,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 527 | py | import csv
import os
# path = os.path.dirname(__file__)
# final_path=path+"\date\huiyuan.csv"
# print(final_path)
def readData():
path = os.path.dirname(__file__)
final_path = path + "\date\huiyuan.csv"
print(final_path)
result=[]
#file=open(final_path,'r')
with open(final_path,'r') as file:
table=csv.reader(file)
for i in table:
result.append(i)
# print(i)
#file.clse()
return result
abcd=readData()
for i in abcd:
for i2 in i:
print(i2)
| [
"51Testing"
] | 51Testing |
41307a9f906dff5abb6d4e40b6ba58cd075e4c69 | a7a54bd1ac92e6054a67f8db0bde14e7eb77c7d1 | /2-5a.py | 4dbf24765a421fe8c3f79e0e5a8dc157981384ae | [] | no_license | xjr7670/corePython | 88bc2f1f05e293b8ce0b7aa8acb8abcf37a8252a | 1692cfc327ea9a065ee83b6b63005a8bf53001ba | refs/heads/master | 2021-01-21T12:58:56.683048 | 2016-05-18T08:36:46 | 2016-05-18T08:36:46 | 49,134,716 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
n = 0
while n <= 10:
print n
n += 1
| [
"xjr30226@126.com"
] | xjr30226@126.com |
f15daef86f2215d4497e0a7d238f99f873f1f3aa | 1a5a9bfa6ee62c328fc6ab828ad743c555b0f23a | /catagory/JianzhiOffer/stage-02/0365-count-1-in-binary.py | 02c5d9bde729f707f70f94ba181e303c223f577a | [] | no_license | zzy1120716/my-nine-chapter | 04b3e4d43a0d8086e5c958b81a3dc4356622d65f | c7bf3eed366b91d6bdebb79d0f11680cf7c18344 | refs/heads/master | 2020-03-30T03:07:14.748145 | 2019-05-15T13:07:44 | 2019-05-15T13:07:44 | 150,670,072 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 783 | py | """
365. 二进制中有多少个1
中文English
计算在一个 32 位的整数的二进制表示中有多少个 1。
样例
样例 1:
输入:32
输出:1
解释:
32(100000),返回 1。
样例 2:
输入:5
输出:2
解释:
5(101),返回 2。
挑战
如果整数有 n 位,并且有 m 位个 1。你能在 O(m) 的时间内解决它吗?
"""
class Solution:
"""
@param: num: An integer
@return: An integer
"""
def countOnes(self, num):
# write your code here
ones = 0
for i in range(32):
# % 2
ones += num & 1
# // 2
num >>= 1
return ones
if __name__ == '__main__':
# 32
print(Solution().countOnes(-1))
# 1
print(Solution().countOnes(256))
| [
"zzy1120716@126.com"
] | zzy1120716@126.com |
e3ac173a9d20800dfaa66659a5124a52142bea7e | 9405aa570ede31a9b11ce07c0da69a2c73ab0570 | /aliyun-python-sdk-drds/aliyunsdkdrds/request/v20190123/DescribeBroadcastTablesRequest.py | d33911f1b7888ef609601e7b7c689d0667b53a31 | [
"Apache-2.0"
] | permissive | liumihust/aliyun-openapi-python-sdk | 7fa3f5b7ea5177a9dbffc99e73cf9f00e640b72b | c7b5dd4befae4b9c59181654289f9272531207ef | refs/heads/master | 2020-09-25T12:10:14.245354 | 2019-12-04T14:43:27 | 2019-12-04T14:43:27 | 226,002,339 | 1 | 0 | NOASSERTION | 2019-12-05T02:50:35 | 2019-12-05T02:50:34 | null | UTF-8 | Python | false | false | 1,841 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeBroadcastTablesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Drds', '2019-01-23', 'DescribeBroadcastTables','drds')
def get_DbName(self):
return self.get_query_params().get('DbName')
def set_DbName(self,DbName):
self.add_query_param('DbName',DbName)
def get_Query(self):
return self.get_query_params().get('Query')
def set_Query(self,Query):
self.add_query_param('Query',Query)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_CurrentPage(self):
return self.get_query_params().get('CurrentPage')
def set_CurrentPage(self,CurrentPage):
self.add_query_param('CurrentPage',CurrentPage)
def get_DrdsInstanceId(self):
return self.get_query_params().get('DrdsInstanceId')
def set_DrdsInstanceId(self,DrdsInstanceId):
self.add_query_param('DrdsInstanceId',DrdsInstanceId) | [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
dc4be56ccb5ec62054aa914932f6e87516dcd00d | 0d734e7ad1f230d7a93aedd80abf7edb1aa31231 | /backend/manage.py | 8107d7ff914cb8f357397d2a87da2a4de9151c6c | [] | no_license | crowdbotics-apps/tuto-22970 | cda398d8b4ff71f5dee6683afd35eb37fac1813a | 3f8cc18d5e2e36aa98a097ec46d32f7dafb3dae0 | refs/heads/master | 2023-01-24T05:47:50.562325 | 2020-11-25T08:16:04 | 2020-11-25T08:16:04 | 315,871,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tuto_22970.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
372099bae0f1de2a220ba354ff3740ff7033ab95 | c138dd6db6969112188ff330dd99dcabcc1718bb | /flap/views.py | 2ff8048b3b37b5ec221ef1fc2fc3e21e96778ddf | [] | no_license | mnebuerquo/flip-flap | 946c3ac8226fa42bce2578048d6e29fcef862690 | 436fd42b80d5c27c426cc887192696854b849703 | refs/heads/master | 2021-01-23T04:33:56.600476 | 2017-03-26T16:37:29 | 2017-03-26T16:37:29 | 86,210,312 | 0 | 0 | null | 2017-03-26T05:38:29 | 2017-03-26T05:38:29 | null | UTF-8 | Python | false | false | 107 | py | from django.shortcuts import render
def index(request):
return render (request, "flap/index.html")
| [
"jasonemize@gmail.com"
] | jasonemize@gmail.com |
f66168c7f02a410dbb138535e1f3375b0ccbae9d | 87bd02d63966ed1539d107497b8fdbf931b02121 | /2018/07/aoc2018_07_part1.py | 1d03f024bb78b9ff72538dc5542137e8fd5f4757 | [] | no_license | kajott/adventofcode | c4764d97d4ad4045a7f055862a11077c7e155ea3 | 60f51bce5de5e94eb3763970f0524d281bc1978b | refs/heads/master | 2023-01-06T16:27:23.716873 | 2022-12-29T13:56:47 | 2022-12-29T13:56:47 | 161,079,423 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py | import re,collections as C
D=C.defaultdict
l,g,r=D(int),D(set),""
for a,b in(re.findall(r'\b\w\b',x)for x in open("input.txt")):g[a]|={b};l[a]+=0;l[b]+=1
while l:
x=min((c,x)for x,c in l.items())[1]
for y in g[x]:l[y]-=1
r+=x;del l[x]
print r
| [
"keyj@emphy.de"
] | keyj@emphy.de |
ffc10c964ddda8442bd5f414c795f7f8c76c2c05 | 9c3765dba0b249eb0a8da92076d2ae01291fc0e7 | /not_done/py_not_started/euler_306.py | 391ec906144e93d97cc62d77050ca4dc0c03555d | [] | no_license | saetar/pyEuler | 3a021f95a1856775bef87b38c753049b04282b80 | f0af7092e16c2109028b4b1aa5bed7a0057d3fe9 | refs/heads/master | 2020-03-21T12:05:15.430454 | 2018-06-15T03:50:50 | 2018-06-15T03:50:50 | 138,535,115 | 0 | 0 | null | 2018-06-25T02:40:43 | 2018-06-25T02:40:42 | null | UTF-8 | Python | false | false | 1,279 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ~ Jesse Rubin ~ project Euler ~
"""
Paper-strip Game
http://projecteuler.net/problem=306
The following game is a classic example of Combinatorial Game Theory:
Two players start with a strip of n white squares and they take alternate turns.
On each turn, a player picks two contiguous white squares and paints them black.
The first player who cannot make a move loses.
If n = 1, there are no valid moves, so the first player loses automatically.
If n = 2, there is only one valid move, after which the second player loses.
If n = 3, there are two valid moves, but both leave a situation where the second player loses.
If n = 4, there are three valid moves for the first player; she can win the game by painting the two middle squares.
If n = 5, there are four valid moves for the first player (shown below in red); but no matter what she does, the second player (blue) wins.
So, for 1 ≤ n ≤ 5, there are 3 values of n for which the first player can force a win.
Similarly, for 1 ≤ n ≤ 50, there are 40 values of n for which the first player can force a win.
For 1 ≤ n ≤ 1 000 000, how many values of n are there for which the first player can force a win?
"""
def p306():
pass
if __name__ == '__main__':
p306() | [
"jessekrubin@gmail.com"
] | jessekrubin@gmail.com |
c0f66b8b7bbe8922ed6c8bcc3fa84e62fd9fccf7 | 90df3cbdea7146a62e55cd74366aac8601d5da27 | /test_geweke.py | e89b12df62af6dd4defe1db8231750c4075a3e1f | [] | no_license | mattjj/gslrandom | fd7d13fea77640078b6a40b510136507399097f8 | 98eee548bdd14680393ac53cfc2536dd67e0efb3 | refs/heads/master | 2021-01-21T20:16:24.504708 | 2015-06-10T21:26:30 | 2015-06-10T21:47:29 | 36,503,828 | 0 | 0 | null | 2015-05-29T12:45:05 | 2015-05-29T12:45:05 | null | UTF-8 | Python | false | false | 2,136 | py | """
A simple Geweke test. I'm afraid there might be a slight bias toward
sampling the first entry in the multinomial.
"""
import os
import numpy as np
np.random.seed(1234)
from scipy.stats import probplot, beta
import matplotlib.pyplot as plt
from pybasicbayes.distributions import Multinomial
from pybasicbayes.util.text import progprint_xrange
from gslrandom import multinomial_par, multinomial,PyRNG, get_omp_num_threads
if "OMP_NUM_THREADS" in os.environ:
num_threads = int(os.environ["OMP_NUM_THREADS"])
else:
num_threads = get_omp_num_threads()
assert num_threads > 0
import ipdb; ipdb.set_trace()
# Choose random seeds
seeds = np.random.randint(2**16, size=num_threads)
pyrngs = [PyRNG(seed) for seed in seeds]
alpha = 1.
K = 3
N = 100
Ns = np.random.poisson(10, size=N).astype(np.uint32)
# Ns = np.ones(N).astype(np.uint32)
# Sample model
dirichlet = Multinomial(alphav_0=alpha*np.ones(K), K=K)
X = np.zeros((N, K), dtype=np.uint32)
multinomial_par(pyrngs, Ns, dirichlet.weights * np.ones((N,K)), X)
N_iter = 50000
samplers = ["numpy", "multinomial", "multinomial_par"]
fig = plt.figure()
for i,sampler in enumerate(samplers):
print "Testing ", sampler
ps = []
for itr in progprint_xrange(N_iter, perline=50):
# Resample the dirichlet
dirichlet.resample(X)
# Resample X
if sampler == "numpy":
for n,x in zip(Ns,X):
x[:] = np.random.multinomial(n, dirichlet.weights)
elif sampler == "multinomial":
multinomial(pyrngs[0], Ns, dirichlet.weights * np.ones((N,K)), out=X)
elif sampler == "multinomial_par":
multinomial_par(pyrngs, Ns, dirichlet.weights * np.ones((N,K)), X)
else:
raise Exception("invalid sampler")
# Get sample
ps.append(dirichlet.weights.copy())
ps = np.array(ps)
print np.mean(ps, axis=0)
print np.std(ps, axis=0)
for k in xrange(K):
ax = fig.add_subplot(K,len(samplers),i*K+k+1)
marg_p = beta(alpha, (K-1)*alpha)
probplot(ps[:,k], dist=marg_p, plot=ax)
ax.set_title(sampler + "_%d" % k)
plt.show() | [
"scott.linderman@gmail.com"
] | scott.linderman@gmail.com |
6c78ccfccb734995304f041d3de5d2726e9d1b63 | 1a220abd21c56728aa3368534506bfc9ced8ad46 | /95.COS/2급/모의고사 3회/03.py | 2a1980856327f4a9abae0ae30e2455e9771fbf18 | [] | no_license | JeonJe/Algorithm | 0ff0cbf47900e7877be077e1ffeee0c1cd50639a | 6f8da6dbeef350f71b7c297502a37f87eb7d0823 | refs/heads/main | 2023-08-23T11:08:17.781953 | 2023-08-23T08:31:41 | 2023-08-23T08:31:41 | 197,085,186 | 0 | 0 | null | 2023-02-21T03:26:41 | 2019-07-15T23:22:55 | Python | UTF-8 | Python | false | false | 1,515 | py | # 체조 경기를 진행하고 있습니다. 지금 연기한 선수의 연기 완성도를 채점하는 E점수를 결정하려고
# 합니다. E심판은 모두 6명이며, 각 심판들은 100점 만점에서 시작하여 실수에 따라 점수를 감점합
# 니다. E심판의 점수 중 최고점과 최저점을 제외하고 나머지 심판들의 점수 평균을 최종 E점수로
# 정합니다. 단, 이때 소수점 이하는 버립니다.
# 예를 들어 6명의 E심판이 채점한 점수가 [90, 80, 70, 85, 100, 90]라면, 가장 높은 점수인 100점
# 과 가장 낮은 점수인 70점을 제외하고 나머지 점수의 평균을 구하게 되면 85점입니다. 소수점 이
# 하를 버리게 되면 85점이 최종 점수가 됩니다.
# E심판이 채점한 점수가 담긴 리스트 scores가 매개변수로 주어질 때 최종 E점수를 return하도록
# solution 함수를 작성해 주세요.
#다음과 같이 import를 사용할 수 있습니다.
#import math
def solution(scores):
size = len(scores)
sscores = sorted(scores)
print(sum(sscores), sscores[0], sscores[size-1],size)
avg = int((sum(sscores) - sscores[0] - sscores[size-1] )/ size-2)
print(avg)
answer = 0
return answer
#아래는 테스트케이스 출력을 해보기 위한 코드입니다.
scores = [90, 80, 70, 85, 100, 90]
ret = solution(scores)
#[실행] 버튼을 누르면 출력 값을 볼 수 있습니다.
print("solution 함수의 반환 값은", ret, "입니다.")
| [
"43032391+JeonJe@users.noreply.github.com"
] | 43032391+JeonJe@users.noreply.github.com |
d6fa91da5175175095b83b4070163dce36509ec6 | cc08e9349a14620409dee0bdcf1420976352cf0d | /04_algorithm/06day/걍 품/시간개념.py | 88a829bdbc1897644a4272fafeda435eef97a762 | [] | no_license | Nyapy/TIL | b3f611177d3c54d224c9983b5bedc62abddeeaae | c3c52ad33963628674de4c1dcf8aed53f67af177 | refs/heads/master | 2023-01-11T19:37:02.693660 | 2020-07-04T04:31:41 | 2020-07-04T04:31:41 | 195,938,004 | 2 | 2 | null | 2023-01-07T11:25:27 | 2019-07-09T05:22:47 | Python | UTF-8 | Python | false | false | 879 | py | import sys
sys.stdin = open('시간개념.txt')
T = int(input())
for tc in range(T):
time1 = list(map(int, input().split(':')))
time2 = list(map(int, input().split(':')))
abs_sec1 = 3600*time1[0] + 60*time1[1] + time1[2]
abs_sec2 = 3600 * time2[0] + 60 * time2[1] + time2[2]
if abs_sec1 < abs_sec2:
time_lag = abs_sec2 - abs_sec1
hour = time_lag//3600
minute = (time_lag%3600)//60
sec = (time_lag % 3600) % 60
time = []
time += [hour]
time += [minute]
time += [sec]
else:
time_lag = (3600 * 24) - abs_sec1 + abs_sec2
hour = time_lag // 3600
minute = (time_lag % 3600) // 60
sec = (time_lag % 3600) % 60
time = []
time += [hour]
time += [minute]
time += [sec]
print("{:02}:{:02}:{:02}" .format(hour, minute, sec))
| [
"nyapy@naver.com"
] | nyapy@naver.com |
df7b984513c24df772aa36fd5577fb62ddef4f6b | 317d199d36556ecf5da06c660cb5cb655a86ea09 | /Challenges/rock_paper_scissors/rps.py | 2c085d2ad3e8be9dc50b3dedb02c4919fd2764ed | [] | no_license | baubrun/Challenges-PY | e109126a64a20128202e03c2ed359c179f523dcd | e2ca45cbca264f5790ce303807e25810a5d8d977 | refs/heads/master | 2022-12-17T03:24:43.308680 | 2020-09-14T12:37:24 | 2020-09-14T12:37:24 | 262,485,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,297 | py | """
Rock, Paper, Scissors
Create a function which takes two strings
(p1 and p2 — which represent player 1 and )
as arguments and returns a string stating the winner
in a game of Rock, Paper, Scissors.
Each argument will contain a single string:
"Rock", "Paper", or "Scissors".
Return the winner according to the following rules:
Rock beats Scissors
Scissors beats Paper
Paper beats Rock
If p1 wins, return the string
"The winner is p1". If p2 wins,
return the string "The winner is p2" and if p1 and p2
are the same, return "It's a draw".
Examples
rps("Rock", "Paper") ➞ "The winner is p2"
rps("Scissors", "Paper") ➞ "The winner is p1"
rps("Paper", "Paper") ➞ "It's a draw"
Notes
All inputs will be valid strings.
"""
def rps(p1, p2):
PAPER = "Paper"
ROCK = "Rock"
SCISSORS = "Scissors"
l = [PAPER, ROCK, SCISSORS]
if p1 == p2:
return "It's a draw"
hand1 = l.index(p1)
hand2 = l.index(p2)
if hand1 == 0:
if hand2 == 1:
return "The winner is p1"
return "The winner is p2"
if hand1 == 1:
if hand2 == 2:
return "The winner is p1"
return "The winner is p2"
if hand1 == 2:
if hand2 == 0:
return "The winner is p1"
return "The winner is p2"
| [
"baubelf@gmail.com"
] | baubelf@gmail.com |
dd1a5f7306470c09c8a4e5d4fe2278049dc1ce9d | 22bf910b64283b3c15cc4d80542e83fa89e9f09d | /monero_glue/messages/WipeDevice.py | 57631812883ce5dcd80f84abc1ac8053d0c0c93c | [
"MIT"
] | permissive | ph4r05/monero-agent | 24ed1aa17d6616b2ae6bcdb7b9997f982f8b7b5d | 0bac0e6f33142b2bb885565bfd1ef8ac04559280 | refs/heads/master | 2022-10-18T06:30:43.550133 | 2021-07-01T16:27:56 | 2021-07-01T16:27:56 | 126,215,119 | 24 | 5 | MIT | 2022-09-23T22:53:44 | 2018-03-21T17:18:21 | Python | UTF-8 | Python | false | false | 309 | py | # Automatically generated by pb2py
# fmt: off
from .. import protobuf as p
if __debug__:
try:
from typing import Dict, List # noqa: F401
from typing_extensions import Literal # noqa: F401
except ImportError:
pass
class WipeDevice(p.MessageType):
MESSAGE_WIRE_TYPE = 5
| [
"dusan.klinec@gmail.com"
] | dusan.klinec@gmail.com |
0622bc92b933929fb78f1de42eaa6d2f4aabd814 | 5529b621f65eb855d381932d313c3ca3ed7090f6 | /process.py | 3fefdbc5e4572e2687c390407c0f36aa6bf92646 | [
"MIT"
] | permissive | isabella232/tennis-ages | c39f2d84414890f2ff27537ef0fa1c34ac4476e3 | 8334101e2b1a7484c540be3650d36e7e04a1e40b | refs/heads/master | 2021-06-01T06:03:52.412498 | 2016-04-12T21:43:34 | 2016-04-12T21:43:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 651 | py | #!/usr/bin/env python
from dateutil.parser import parse
from datetime import date
def process():
today = date.today()
with open('USA_-_womens_national_rankings.txt') as f:
content = f.readlines()
for line in content:
line = line.strip()
try:
if line != '16 March 2016':
born = parse(line)
if born.year > 1940 and born.year < 2005:
age = today.year - born.year - ((today.month, today.day) < (born.month, born.day))
print '%s,%s' % (born, age)
except ValueError:
pass
if __name__ == '__main__':
process()
| [
"davideads@gmail.com"
] | davideads@gmail.com |
a35149d6c702cf84d3536ab6faea773b9a9352c4 | f483545d7765c25d1b315027726dbd74bc77b98a | /pkgTest/test01.py | 6b2ec89adba360cdc59828357cf5ee48a105cf72 | [] | no_license | niceman5/pythonProject | e51b44a50776100a63443d7da850ba4b8b00f5eb | 3589fd200b56f68b856d2b4d2031c2a1135168a0 | refs/heads/master | 2023-07-10T16:12:57.756944 | 2023-06-27T08:13:54 | 2023-06-27T08:13:54 | 135,047,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | import tomllib
import os
import sys
from ucrmpkg import database
from ucrmpkg import log
| [
"niceman555@gmail.com"
] | niceman555@gmail.com |
60176f9b42f14ac88d509ed03458e028dfa605e5 | 2caa47f0bdb2f03469a847c3ba39496de315d992 | /Contest/Tenka1-2018-Beginner/c/main.py | ce4eaf625bb1b68df7eb761a8a5387da31561e15 | [
"CC0-1.0"
] | permissive | mpses/AtCoder | 9023e44885dc67c4131762281193c24b69d3b6da | 9c101fcc0a1394754fcf2385af54b05c30a5ae2a | refs/heads/master | 2023-03-23T17:00:11.646508 | 2021-03-20T12:21:19 | 2021-03-20T12:21:19 | 287,489,233 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | #!/usr/bin/env python3
N, *a = map(int, open(0))
n = N//2 - 1
a.sort()
print(2*sum(a[n+2:]) - 2*sum(a[:n]) + a[n+1] - a[n] - (min(a[n]+a[n+2], 2*a[n+1]) if N%2 else 0))
| [
"nsorangepv@gmail.com"
] | nsorangepv@gmail.com |
4efca2e0d72033c9a358908f0fefa68c97baabe1 | 6f97d4e47b4e8bceb6a43ffe417656c06c077d3e | /remoting/curl.py | 8cd4f1dda3a1ea9c22c0862d599245cf83853dda | [] | no_license | getwingm/remoting | b35a47317e7d8c0b728a4462628663242eac6381 | e5a8dc483ffc0935fde49a75ccf13160f2d7f9a8 | refs/heads/master | 2020-04-03T08:08:07.362362 | 2014-05-22T19:29:20 | 2014-05-22T19:29:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 680 | py | import pycurl
import StringIO
def curl(url, headers=None, socks_host=None, socks_port=None):
# if socks_port is given, it should be an integer
c = pycurl.Curl()
c.setopt(pycurl.URL, url)
if socks_host or socks_port:
c.setopt(pycurl.PROXY, socks_host or 'localhost')
c.setopt(pycurl.PROXYPORT, socks_port or 5090)
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
if headers:
header_list = ['%s: %s' % (key, val) for key, val in headers.items()]
c.setopt(pycurl.HTTPHEADER, header_list)
output = StringIO.StringIO()
c.setopt(pycurl.WRITEFUNCTION, output.write)
c.perform()
return output.getvalue()
| [
"io@henrian.com"
] | io@henrian.com |
2b7ec4e81348d282b660f2e7f30fc016dede1ddd | 4aa7a4d0525095725eb99843c83827ba4806ceb1 | /my_mini_project/Deep_Learning/Dense/cpi_Dense.py | 762335816729d70a0ee09cf85ececcf612a71eb4 | [] | no_license | seonukim/Study | 65a70f5bdfad68f643abc3086d5c7484bb2439d4 | a5f2538f9ae8b5fc93b5149dd51704e8881f0a80 | refs/heads/master | 2022-12-04T17:04:31.489771 | 2020-08-21T00:35:15 | 2020-08-21T00:35:15 | 260,144,755 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,010 | py | import pandas as pd
import numpy as np
import warnings
warnings.filterwarnings('ignore')
from keras.models import Sequential, load_model
from keras.layers import Dense, Dropout
from keras.layers import LeakyReLU
from keras.callbacks import EarlyStopping
from sklearn.preprocessing import StandardScaler, RobustScaler
from sklearn.preprocessing import MinMaxScaler, MaxAbsScaler
from sklearn.model_selection import train_test_split
# scaler = StandardScaler()
# scaler = RobustScaler()
scaler = MinMaxScaler()
# scaler = MaxAbsScaler()
leaky = LeakyReLU(alpha = 0.3)
es = EarlyStopping(monitor = 'loss',
mode = 'min',
patience = 10)
## 데이터
train = pd.read_csv('C:/Users/bitcamp/Downloads/'
'/cpi_train(1975.01 - 2002.09).csv',
index_col = 0, header = 0,
encoding = 'cp949')
test = pd.read_csv('C:/Users/bitcamp/Downloads/'
'/cpi_test(2002.10 - 2020.05).csv',
index_col = 0, header = 0,
encoding = 'cp949')
print(train.shape) # (213, 13)
print(test.shape) # (213, 13)
## NumPy형 변환
train = train.values
test = test.values
print(type(train)) # <class 'numpy.ndarray'>
print(type(test)) # <class 'numpy.ndarray'>
## 데이터 분할하기
def split_xy(data, time, y_column):
x, y = list(), list()
for i in range(len(data)):
x_end_number = i + time
y_end_number = x_end_number + y_column
if y_end_number > len(data):
break
tmp_x = data[i:x_end_number, :]
tmp_y = data[x_end_number:y_end_number, 0]
x.append(tmp_x)
y.append(tmp_y)
return np.array(x), np.array(y)
x, y = split_xy(train, 5, 1)
print(x.shape) # (208, 5, 13)
print(y.shape) # (208, 1)
## 데이터 전처리
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size = 0.2,
shuffle = False)
print(x_train.shape) # (166, 5, 13)
print(x_test.shape) # (42, 5, 13)
print(y_train.shape) # (166, 1)
print(y_test.shape) # (42, 1)
## Dense모델에 넣기 위해 reshape
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1] * x_train.shape[2])
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1] * x_test.shape[2])
print(x_train.shape) # (166, 65)
print(x_test.shape) # (42, 65)
## Scaling
scaler.fit(x_train)
x_train = scaler.transform(x_train)
x_test = scaler.transform(x_test)
print(x_train[0])
'''
## 모델링
model = Sequential()
model.add(Dense(100, input_shape = (65, ),
activation = 'relu'))
model.add(Dropout(rate = 0.1))
model.add(Dense(1, activation = 'relu'))
model.summary()
model.save('./my_mini_project/Dense/Dense_model.h5')
## 컴파일 및 훈련
model.compile(loss = 'mse',
optimizer = 'rmsprop',
metrics = ['mse'])
model.fit(x_train, y_train,
epochs = 1000, batch_size = 1,
callbacks = [es], verbose = 1)
## 모델 평가 및 예측
res = model.evaluate(x_test, y_test)
print("loss : ", res[0]) # loss : 1.1412532769498371
print("mse : ", res[1]) # mse : 1.1412532329559326
pred = model.predict(x_test)
for i in range(5):
print('실제값 : ', y_test[i], '예측값 : ', pred[i])
'''
'''
실제값 : [65.164] 예측값 : [66.05616]
실제값 : [65.055] 예측값 : [66.54069]
실제값 : [64.672] 예측값 : [66.896965]
실제값 : [64.452] 예측값 : [66.94108]
실제값 : [65.111] 예측값 : [66.851395]
'''
## test 데이터 분리
a, b = split_xy(test, 5, 1)
print(a.shape) # (208, 5, 13)
print(b.shape) # (208, 1)
## 데이터 전처리
a_train, a_test, b_train, b_test = train_test_split(
a, b, test_size = 0.2, shuffle = False)
print(a_train.shape) # (166, 5, 13)
print(a_test.shape) # (42, 5, 13)
print(b_train.shape) # (166, 1)
print(b_test.shape) # (42, 1)
## Dense모델에 넣기 위해 데이터 reshape
a_train = a_train.reshape(a_train.shape[0], a_train.shape[1] * a_train.shape[2])
a_test = a_test.reshape(a_test.shape[0], a_test.shape[1] * a_test.shape[2])
print(a_train.shape) # (166, 65)
print(a_test.shape) # (42, 65)
## Scaling
scaler.fit(a_train)
a_train = scaler.transform(a_train)
a_test = scaler.transform(a_test)
print(a_train[0])
## 모델 불러오기
model = load_model('./my_mini_project/Dense/Dense_model.h5')
model.summary()
## 컴파일 및 훈련
model.compile(loss = 'mse',
optimizer = 'adam',
metrics = ['mse'])
model.fit(a_train, b_train,
epochs = 1000, batch_size = 2,
callbacks = [es], verbose = 1)
## 모델 평가
res = model.evaluate(a_test, b_test)
print("loss : ", res[0]) # loss : 26.6597063654945
print("mse : ", res[1]) # mse : 26.659706115722656
pred_2 = model.predict(a_test)
for i in range(42):
print('실제값 : ', b_test[i], '예측값 : ', pred_2[i])
'''
실제값 : [102.64] 예측값 : [105.10877]
실제값 : [102.92] 예측값 : [105.93016]
실제값 : [102.85] 예측값 : [106.56531]
실제값 : [102.72] 예측값 : [106.543686]
실제값 : [102.83] 예측값 : [107.598434]
실제값 : [102.61] 예측값 : [108.56856]
실제값 : [102.78] 예측값 : [108.38743]
실제값 : [103.37] 예측값 : [109.004524]
실제값 : [103.49] 예측값 : [110.387726]
실제값 : [103.39] 예측값 : [110.835754]
실제값 : [102.62] 예측값 : [111.340324]
실제값 : [102.99] 예측값 : [111.924095]
실제값 : [103.42] 예측값 : [111.57676]
실제값 : [104.21] 예측값 : [110.89941]
실제값 : [104.1] 예측값 : [110.65243]
실제값 : [104.29] 예측값 : [110.05317]
실제값 : [104.34] 예측값 : [110.82879]
실제값 : [104.13] 예측값 : [111.53039]
실제값 : [103.93] 예측값 : [112.151]
실제값 : [104.85] 예측값 : [111.760124]
실제값 : [105.65] 예측값 : [112.12572]
실제값 : [105.46] 예측값 : [111.91189]
실제값 : [104.71] 예측값 : [111.7032]
실제값 : [104.35] 예측값 : [111.20279]
실제값 : [104.24] 예측값 : [110.55778]
실제값 : [104.69] 예측값 : [109.36505]
실제값 : [104.49] 예측값 : [107.22308]
실제값 : [104.87] 예측값 : [104.85908]
실제값 : [105.05] 예측값 : [103.84239]
실제값 : [104.88] 예측값 : [103.84634]
실제값 : [104.56] 예측값 : [103.85918]
실제값 : [104.81] 예측값 : [103.73224]
실제값 : [105.2] 예측값 : [104.46961]
실제값 : [105.46] 예측값 : [104.5829]
실제값 : [104.87] 예측값 : [104.55163]
실제값 : [105.12] 예측값 : [103.92594]
실제값 : [105.79] 예측값 : [103.70546]
실제값 : [105.8] 예측값 : [103.69801]
실제값 : [105.54] 예측값 : [103.21263]
실제값 : [104.95] 예측값 : [102.21549]
실제값 : [104.71] 예측값 : [101.95478]
실제값 : [104.71] 예측값 : [101.35535] <- 2020.07월의 CPI 총 지수
'''
| [
"92.seoonooo@gmail.com"
] | 92.seoonooo@gmail.com |
7d6ed1ced3438d158b7dcef576862fc4d9ad0ad7 | 17f527d6936397270183a35d7097e0a99de16cb5 | /rasen_book/basic_2/rooted_tree.py | 547f143bb94346f4c29666f256d70fd860307d0a | [] | no_license | ryosuke071111/algorithms | e942f043d08c7c7e2c926ed332ee2b8c44bdf0c5 | 867764450cc0f2a709fa2f743d9a0d95001e9296 | refs/heads/master | 2020-05-14T17:14:39.314064 | 2019-04-17T12:58:12 | 2019-04-17T12:58:12 | 181,888,623 | 11 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,520 | py | NIL = -1
class Node: #ノード作成
def __init__(self):
self.parent = NIL
self.left = NIL
self.right = NIL
def getDepth(u): #深さを調査
d = 0
while T[u].parent != NIL: #親がいたら
u = T[u].parent #上に上る
d += 1 #深さカウント+1
return d
def getChildren(u): #子供調査
c = T[u].left #自分の左側の子供をcとする
result = [] #結果リスト
while c != NIL: #自分の左の子供が存在する限り
result.append(c) #結果リストに子供を貼っていく
c = T[c].right #その右側にノードがあればその子を貼っていく
return result
n = int(input())
T = [0]*n
for i in range(n):
T[i] = Node() #ノードのリスト
for i in range(n):
tmp = list(map(int, input().split())) #[id, 子供の数、子供のid] リスト
id = tmp.pop(0)
k = tmp.pop(0)
c = tmp #残ったら子供のリストとなる
if k != 0:
for j in range(len(c)):
T[c[j]].parent = id
T[id].left = c[0] #自分の左の子供にtmpの最新の子供を貼り付ける
for j in range(len(c)-1):
T[c[j]].right = c[j+1] #自分の右の子供にtmpの子供を貼り付ける
for i in range(n):
d = getDepth(i)
c = getChildren(i)
if d == 0:
t = 'root'
elif c == []:
t = 'leaf'
else:
t = 'internal node'
print('node ',i,': ','parent = ',T[i].parent,', depth = ',d,', ',t,', ',c,sep = '')
| [
"ryosuke0711993@gmail.com"
] | ryosuke0711993@gmail.com |
11bd35fefb742454ba0670f53928bb4eff176cef | c8a04384030c3af88a8e16de4cedc4ef8aebfae5 | /stubs/pandas/core/groupby/base.pyi | 40914d2704f18b8f4cd75c1dd593da5818d0e209 | [
"MIT"
] | permissive | Accern/accern-xyme | f61fce4b426262b4f67c722e563bb4297cfc4235 | 6ed6c52671d02745efabe7e6b8bdf0ad21f8762c | refs/heads/master | 2023-08-17T04:29:00.904122 | 2023-05-23T09:18:09 | 2023-05-23T09:18:09 | 226,960,272 | 3 | 2 | MIT | 2023-07-19T02:13:18 | 2019-12-09T20:21:59 | Python | UTF-8 | Python | false | false | 775 | pyi | # Stubs for pandas.core.groupby.base (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
# pylint: disable=unused-argument,redefined-outer-name,no-self-use,invalid-name
# pylint: disable=relative-beyond-top-level,line-too-long,arguments-differ
# pylint: disable=no-member,too-few-public-methods,keyword-arg-before-vararg
# pylint: disable=super-init-not-called,abstract-method,redefined-builtin
# pylint: disable=unused-import,useless-import-alias,signature-differs
# pylint: disable=blacklisted-name,c-extension-no-member
from typing import Any
class GroupByMixin:
...
plotting_methods: Any
common_apply_whitelist: Any
series_apply_whitelist: Any
dataframe_apply_whitelist: Any
cython_transforms: Any
cython_cast_blacklist: Any
| [
"josua.krause@gmail.com"
] | josua.krause@gmail.com |
e3baf2fee82699caed0d28245f9d8a0a4b2a00e3 | e1b2b4215a08c1ef8df03d68a933f538bcab1176 | /projects/migrations/0001_initial.py | d8d64fe140786f7801ed16527fb13b74f355dde3 | [
"MIT"
] | permissive | wanguinjoka/Awwards | 4ed1d232dbe49167e3b15c3854c2d21455966673 | 23218076075601cb899a8ed28c11c2bd561e8f1c | refs/heads/master | 2020-04-01T20:37:55.332604 | 2018-10-19T14:45:54 | 2018-10-19T14:45:54 | 153,613,092 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,127 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-10-18 16:08
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
('site_image', models.ImageField(upload_to='projects/')),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
('site_url', models.CharField(blank=True, max_length=100)),
('developer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"wangui.njoka@gmail.com"
] | wangui.njoka@gmail.com |
c57020eb0fb14e7f7a51cb39ffeac16321b07756 | 01fdd206c8c825b30870bdd3f6e75f0aa113b849 | /test/record/parser/test_response_whois_co_ug_property_status_unconfirmed.py | 44fbcad55857c7be4ea983289a09e1f7bed4becc | [
"MIT"
] | permissive | huyphan/pyyawhois | 0fbc5a7d64a53ae6e3393fdc1c7ff0d0ac5f22b5 | 77fb2f73a9c67989f1d41d98f37037406a69d136 | refs/heads/master | 2021-01-23T22:42:55.989651 | 2015-09-19T16:40:06 | 2015-09-19T16:40:06 | 23,335,785 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 926 | py |
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.co.ug/property_status_unconfirmed
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisCoUgPropertyStatusUnconfirmed(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.co.ug/property_status_unconfirmed.txt"
host = "whois.co.ug"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, 'registered')
def test_available(self):
eq_(self.record.available, False)
def test_registered(self):
eq_(self.record.registered, True)
| [
"dachuy@gmail.com"
] | dachuy@gmail.com |
8fbeb9f6b161fe0dfa306a2b9f8b05576bf7ffa5 | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /lib/python2.7/site-packages/astropy/nddata/tests/test_nddata_base.py | 68b51c55334f9432d9bc581bd6d936d30d2a03ae | [
"Python-2.0",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] | permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 1,463 | py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# Tests of NDDataBase
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from ..nddata_base import NDDataBase
from ...tests.helper import pytest
class MinimalSubclass(NDDataBase):
def __init__(self):
super(MinimalSubclass, self).__init__()
@property
def data(self):
return None
@property
def mask(self):
return super(MinimalSubclass, self).mask
@property
def unit(self):
return super(MinimalSubclass, self).unit
@property
def wcs(self):
return super(MinimalSubclass, self).wcs
@property
def meta(self):
return super(MinimalSubclass, self).meta
class MinimalUncertainty(object):
"""
Define the minimum attributes acceptable as an uncertainty object.
"""
def __init__(self, value):
self._uncertainty = value
@property
def uncertainty_type(self):
return "totally and completely fake"
def test_nddata_base_subclass():
a = MinimalSubclass()
assert a.meta is None
assert a.data is None
assert a.mask is None
assert a.unit is None
assert a.wcs is None
good_uncertainty = MinimalUncertainty(5)
a.uncertainty = good_uncertainty
assert a.uncertainty is good_uncertainty
bad_uncertainty = 5
with pytest.raises(TypeError):
a.uncertainty = bad_uncertainty
| [
"wgyumg@mgail.com"
] | wgyumg@mgail.com |
58d5dde0bff26df07a3835833b1fc51873e23df9 | f3baf8b850c896231b4c254a22567fd5d7a5035c | /Aula 16/web.py | 414a87d388f65424a1221b6bf05f757182fcd896 | [
"MIT"
] | permissive | Katakhan/TrabalhosPython2 | e1c23119ef582038ceea0004c872c00778fd326e | ab47af0ff3c00922857578e58a1a149d9e65e229 | refs/heads/master | 2020-09-21T02:15:04.505791 | 2020-03-19T13:23:41 | 2020-03-19T13:23:41 | 224,650,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 221 | py | from flask import Flask, render_template
from Faixa import ler
app = Flask(__name__)
@app.route('/lista')
def listar_faixas():
return render_template("lista.html", nome = 'Lista de Faixas', lista = ler)
app.run() | [
"antoniorafaelgastaldi@hotmail.com"
] | antoniorafaelgastaldi@hotmail.com |
46ab140e843abff2a8e2d248dbce509b67ef8b61 | 76fa4bc242502bcd9dfe1053c964318b94acc6d8 | /numpy/append.py | bcd3aa63ba9ec11c6e64df626596ef2678090f2e | [] | no_license | phani-1995/Week3-python_libraries | 720156098ccab5301a58e39a4dd7af5a19a08008 | 1347b8dfd4980b37471a54ce991c967fdcb32e2b | refs/heads/master | 2021-04-01T17:42:54.855954 | 2020-03-23T06:50:18 | 2020-03-23T06:50:18 | 248,204,612 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | import numpy as np
a=[10,20,30]
print("The original array is: ",a)
x=np.append(a, [40,50,60,70,80,90])
print("The new array is : ",x)
| [
"phanindrajallavaram@gmail.com"
] | phanindrajallavaram@gmail.com |
c580d300800cfbb77da8334c268c8ffd851841c6 | ad13583673551857615498b9605d9dcab63bb2c3 | /output/instances/msData/datatypes/Facets/NCName/NCName_enumeration004.py | 63150c8aeb21fe8810124ca42d4b34c1600604f2 | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 267 | py | from output.models.ms_data.datatypes.facets.ncname.ncname_enumeration004_xsd.ncname_enumeration004 import FooTypeFoo
from output.models.ms_data.datatypes.facets.ncname.ncname_enumeration004_xsd.ncname_enumeration004 import Test
obj = Test(
foo=FooTypeFoo.FOO
)
| [
"tsoulloftas@gmail.com"
] | tsoulloftas@gmail.com |
d87578492b072196777372c73ee2c551e194668f | 6bcf8b136d45b53e75c0a6a75d8545188acb8190 | /sourmash_lib/fig.py | f07bef297252769c2b0b82042e37022426f2bab7 | [
"LicenseRef-scancode-public-domain",
"BSD-3-Clause"
] | permissive | swamidass/sourmash | 2dab03a86842f868242c18f4b1b6307891eeb389 | fb7a6c1ac1a86ef4adc8b7385c664e947ed1b365 | refs/heads/master | 2021-01-21T06:30:18.211742 | 2017-03-01T05:01:57 | 2017-03-01T05:01:57 | 83,241,616 | 0 | 0 | NOASSERTION | 2022-08-20T14:57:30 | 2017-02-26T21:21:56 | Standard ML | UTF-8 | Python | false | false | 1,900 | py | #! /usr/bin/env python
"""
Make plots using the distance matrix+labels output by ``sourmash compare``.
"""
try:
import numpy
import scipy
import pylab
import scipy.cluster.hierarchy as sch
except (RuntimeError, ImportError):
pass
def load_matrix_and_labels(basefile):
"""Load the comparison matrix and associated labels.
Returns a square numpy matrix & list of labels.
"""
D = numpy.load(open(basefile, 'rb'))
labeltext = [x.strip() for x in open(basefile + '.labels.txt')]
return (D, labeltext)
def plot_composite_matrix(D, labeltext, show_labels=True, show_indices=True,
vmax=1.0, vmin=0.0):
"""Build a composite plot showing dendrogram + distance matrix/heatmap.
Returns a matplotlib figure."""
if show_labels:
show_indices = True
fig = pylab.figure(figsize=(11, 8))
ax1 = fig.add_axes([0.09, 0.1, 0.2, 0.6])
# plot denderogram
Y = sch.linkage(D, method='single') # centroid
dendrolabels = labeltext
if not show_labels:
dendrolabels = [str(i) for i in range(len(labeltext))]
Z1 = sch.dendrogram(Y, orientation='left', labels=dendrolabels,
no_labels=not show_indices)
ax1.set_xticks([])
xstart = 0.45
width = 0.45
if not show_labels:
xstart = 0.315
scale_xstart = xstart + width + 0.01
# plot matrix
axmatrix = fig.add_axes([xstart, 0.1, width, 0.6])
# (this reorders D by the clustering in Z1)
idx1 = Z1['leaves']
D = D[idx1, :]
D = D[:, idx1]
# show matrix
im = axmatrix.matshow(D, aspect='auto', origin='lower',
cmap=pylab.cm.YlGnBu, vmin=vmin, vmax=vmax)
axmatrix.set_xticks([])
axmatrix.set_yticks([])
# Plot colorbar.
axcolor = fig.add_axes([scale_xstart, 0.1, 0.02, 0.6])
pylab.colorbar(im, cax=axcolor)
return fig
| [
"titus@idyll.org"
] | titus@idyll.org |
4b9b537852edb55ab74e630264c5c984a186c436 | d0a84d97aaa8dcc2dff4a6b33ce98dee6d474496 | /com.CheckProofing/2020/Test_w_42_TV_Deals/test_w_42_TV_Deals_EPP_QLEDTV8K_url_segment.py | 8cf211361608f5a284cf6ec475af2d53c5196aa7 | [] | no_license | ahmed-test001/python | 21a27248c4571a13c0ed4dccab256aede1beea3a | eab59b9a54fae1a51fbc18c391599eb3b0e28b3d | refs/heads/master | 2023-03-10T21:00:54.634028 | 2021-02-27T05:31:58 | 2021-02-27T05:31:58 | 342,778,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,124 | py | import json
from urllib.parse import urlparse, parse_qs
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
import unittest
import sys
import os
import logging
sys.path.append(os.path.join(os.path.dirname(__file__),"."))
from Utility_Files import ReadConfig
from Utility_Files.HTMLTestRunner import stdout_redirector
logger = logging.getLogger(__name__)
out_hdlr = logging.StreamHandler(stdout_redirector)
out_hdlr.setFormatter(logging.Formatter('%(asctime)s%(levelname)s%(message)s'))
out_hdlr.setLevel(logging.INFO)
logger.addHandler(out_hdlr)
logger.setLevel(logging.INFO)
class URLSegment_W_42_TV_Deals_EPP_QLEDTV8K_Test(unittest.TestCase):
def test_UrlSegmentvalidation(self):
logger.info(': ' + self.test_UrlSegmentvalidation.__name__ + "\n ##### Starting TEST ##### ")
final_json = {}
final_array = []
bpid_array = []
final_bpid = {}
dir_name = "../OutputT/"
test = os.listdir(dir_name)
for item in test:
if item.endswith(".json"):
os.remove(os.path.join(dir_name, item))
with open('../TextFolder/TestIn_UniqueURL_List.txt')as f:
# urls = f.readline().split()
urls = f.read().splitlines()
for url in urls:
if ReadConfig.read_w42_TVDeals_configData('WEBLink', 'qled8k') in url:
try:
option = webdriver.ChromeOptions()
option.add_experimental_option('excludeSwitches', ['enable-logging'])
self.driver = webdriver.Chrome(executable_path='../Drivers/chromedriver_01.exe', options=option)
self.driver.maximize_window()
self.driver.get(url)
txt = WebDriverWait(self.driver, 10).until(
EC.element_to_be_clickable((By.XPATH, "(//div[@class ='device-label'])[2]"))).text
txt1 = WebDriverWait(self.driver, 10).until(
EC.element_to_be_clickable((By.XPATH, "//p[@class ='title']"))).text
self.driver.quit()
except:
try:
txt = WebDriverWait(self.driver, 10).until(EC.element_to_be_clickable((By.XPATH, "(//div[@class='title'])[2]"))).text
if len(txt) == 0:
txt = "No Device Present"
txt1 = WebDriverWait(self.driver, 10).until(
EC.element_to_be_clickable((By.XPATH, "//p[@class ='title']"))).text
except:
txt = "No Device Present"
txt1 = "Title Promo not Available"
parsed_url = urlparse(url)
pair = parse_qs(parsed_url.query)
bpidValue = pair.get('bpid')
pair['tradeIn_ModelName'] = txt.split(',')
pair['preorder_text_banner'] = txt1.split(',')
pair['url_deep_link'] = url.split()
bpid_array.append(bpidValue)
final_array.append(pair)
self.driver.quit()
final_json['check_list'] = final_array
final_bpid['bpid_list'] = bpid_array
final_json = json.dumps(final_json, indent=4, sort_keys=False)
final_bpid = json.dumps(final_bpid, indent=4, sort_keys=False)
f = open("../OutputT/OutResult.json", "w")
f.write(final_json)
logger.info(": Printing URL Segment values:" + final_json)
f.close()
f = open("../OutputT/bpIdList.json", "w")
f.write(final_bpid)
logger.info(": Printing BPID:" + final_bpid)
f.close()
logger.info('#### TEST Complete ####')
def test_segment_validation(self):
logger.info(': ' + self.test_segment_validation.__name__ + "\n ##### Starting TEST ##### ")
with open('../OutputT/OutResult.json', 'r')as jsonfile:
readdata=json.load(jsonfile)
# if ReadConfig.read_w41_S20FE_configData('TVDataEPP', 'offerCID') in readdata['check_list'][0]['offerCID']:
# logger.info(": offerCID matched")
# else:
# logger.info(": offerCID NOT matched")
# if ReadConfig.read_w41_S20FE_configData('TVDataEPP', 'promoCode') in readdata['check_list'][0]['promoCode']:
# logger.info(": promoCode matched")
# else:
# logger.info(": promoCode NOT matched")
# if ReadConfig.read_w41_S20FE_configData('TVDataEPP', 'skipOffer') in readdata['check_list'][0]['skipOffer']:
# logger.info(": skipOffer matched")
# else:
# logger.info(": skipOffer NOT matched")
# if ReadConfig.read_w41_S20FE_configData('TVDataEPP', 'utm_source') in readdata['check_list'][0]['utm_source']:
# logger.info(": utm_source matched")
# else:
# logger.info(": utm_source NOT matched")
# if ReadConfig.read_w41_S20FE_configData('TVDataEPP', 'utm_medium') in readdata['check_list'][0]['utm_medium']:
# logger.info(": utm_medium matched")
# else:
# logger.info(": utm_medium NOT matched")
# if ReadConfig.read_w42_TVDeals_configData('TVDataEPP', 'utm_campaign') in readdata['check_list'][0]['utm_campaign']:
# logger.info(": utm_campaign matched")
# else:
# logger.info(": utm_campaign NOT matched")
# if ReadConfig.read_w42_TVDeals_configData('TVDataEPP', 'marsLinkCategory') in readdata['check_list'][0]['marsLinkCategory']:
# logger.info(": marsLinkCategory matched")
# else:
# logger.info(": marsLinkCategory NOT matched")
# if ReadConfig.read_w41_S20FE_configData('TVDataEPP', 'MKM_RID') in readdata['check_list'][0]['MKM_RID']:
# logger.info(": MKM_RID matched")
# else:
# logger.info(": MKM_RID NOT matched")
# if ReadConfig.read_w41_S20FE_configData('TVDataEPP', 'MKM_MID') in readdata['check_list'][0]['MKM_MID']:
# logger.info(": MKM_MID matched")
# else:
# logger.info(": MKM_MID NOT matched")
if ReadConfig.read_w42_TVDeals_configData('TVDataEPP', 'cid') in readdata['check_list'][0]['cid']:
logger.info(": cid matched")
else:
logger.info(": cid NOT matched")
if ReadConfig.read_w42_TVDeals_configData('TVDataEPP', 'bpid') in readdata['check_list'][0]['bpid']:
logger.info(": bpid matched")
else:
logger.info(": bpid NOT matched")
logger.info('#### TEST Complete ####')
if __name__ == '__main__':
unittest.main()
| [
"ahmedu.ferdous@gmail.com"
] | ahmedu.ferdous@gmail.com |
e06905c6d21fcca68df17ac398d343813c1d928f | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_bustles.py | 9306899ef63702f12adbd82507903dfabaf4bc2a | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py |
from xai.brain.wordbase.nouns._bustle import _BUSTLE
#calss header
class _BUSTLES(_BUSTLE, ):
def __init__(self,):
_BUSTLE.__init__(self)
self.name = "BUSTLES"
self.specie = 'nouns'
self.basic = "bustle"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
b2a86de7c43392511773de33a9a6735fbd37638c | 7761628d276bc2fd6e17ac5ccc244ca4a6d97f6d | /day12/modules/myssh.py | a53f9f92035f890a126219862dd0205c55df215f | [] | no_license | cooshko/PythonHomework | 252306b0392667ae019a291f1bfec4a96a24f9eb | 187a4924697e2e5d23bb00e7ec62987c48fe80dc | refs/heads/master | 2021-01-11T03:03:41.165085 | 2017-06-12T04:32:21 | 2017-06-12T04:32:21 | 70,857,475 | 7 | 3 | null | null | null | null | UTF-8 | Python | false | false | 8,981 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author : Coosh
import os, sys, datetime, paramiko, socket, logging, json, types
APP_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(APP_ROOT)
LOG_DIR = os.path.join(APP_ROOT, 'log')
DATA_DIR = os.path.join(APP_ROOT, 'data')
# windows does not have termios...
try:
import termios
import tty
HAS_TERMIOS = True
except ImportError:
HAS_TERMIOS = False
# 日志装饰器函数
FF = logging.Formatter(fmt="%(asctime)s - %(levelname)s - %(message)s", datefmt="%F %H:%M:%S")
FH = logging.FileHandler(filename=os.path.join(LOG_DIR, "access.log"), encoding="utf-8")
FH.setLevel(logging.DEBUG)
FH.setFormatter(FF)
LOGGER = logging.getLogger('ACCESS-LOG')
LOGGER.setLevel(logging.DEBUG)
LOGGER.addHandler(FH)
def log_access(func):
def wrapper(*args, **kwargs):
myssh = args[0]
# args_str = json.dumps(args[1:])
# kwargs_str = json.dumps(kwargs)
args_for_log = []
for arg in args[1:]:
if not isinstance(arg, types.FunctionType):
args_for_log.append(arg)
kwargs_for_log = dict()
for key in kwargs:
if not isinstance(kwargs[key], types.FunctionType):
kwargs_for_log[key] = kwargs[key]
args_str = json.dumps(args_for_log)
kwargs_str = json.dumps(kwargs_for_log)
log_msg = "%s on %s %s %s" % (myssh.user, myssh.hostname, func.__name__, args_str + kwargs_str)
LOGGER.info(log_msg)
ret = func(*args, **kwargs)
return ret
return wrapper
class MySSH(object):
FF = logging.Formatter(fmt="%(asctime)s - %(levelname)s - %(message)s", datefmt="%F %H:%M:%S")
FH = logging.FileHandler(filename=os.path.join(LOG_DIR, "cmd.log"), encoding="utf-8")
FH.setLevel(logging.DEBUG)
FH.setFormatter(FF)
LOGGER = logging.getLogger('CMD-LOG')
LOGGER.setLevel(logging.DEBUG)
LOGGER.addHandler(FH)
def __init__(self, user, hostname, port, ssh_user, using_key, passwd, pkey_file):
self.user = user
self.hostname = hostname
self.port = port
self.ssh_user = ssh_user
self.ssh_pass = passwd
self.ssh_key = pkey_file
self.using_key = using_key
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if self.using_key:
try:
pkey = paramiko.RSAKey.from_private_key_file(self.ssh_key)
except paramiko.ssh_exception.PasswordRequiredException as e:
while True:
try:
key_pass = input("KEY文件加密了,请输入密码:")
pkey = paramiko.RSAKey.from_private_key_file(self.ssh_key, key_pass)
break
except paramiko.ssh_exception.SSHException as e:
print("密码错,请重新输入")
self.client.connect(hostname=self.hostname,
port=self.port,
username=self.ssh_user,
pkey=pkey)
else:
self.client.connect(hostname=self.hostname,
port=self.port,
username=ssh_user,
password=self.ssh_pass)
self.chan = self.client.invoke_shell()
t = self.client.get_transport()
self.sftp_client = paramiko.SFTPClient.from_transport(t)
def excute_command(self, cmd, callback):
stdin, stdout, stderr = self.client.exec_command(cmd)
out = stdout.read()
err = stderr.read()
b_ret = out if out else err
import chardet
charset = chardet.detect(b_ret)['encoding']
ret = b_ret.decode(encoding=charset)
callback(self.hostname, ret)
def interactive_shell(self):
if HAS_TERMIOS:
self.posix_shell()
else:
self.windows_shell()
def posix_shell(self):
import select
# chan = self.client.invoke_shell()
oldtty = termios.tcgetattr(sys.stdin)
try:
tty.setraw(sys.stdin.fileno())
tty.setcbreak(sys.stdin.fileno())
self.chan.settimeout(0.0)
cmd = ""
tab_flag = False
while True:
r, w, e = select.select([self.chan, sys.stdin], [], [])
if self.chan in r:
try:
x = self.chan.recv(10240).decode()
if len(x) == 0:
sys.stdout.write('\r\n*** EOF\r\n')
break
if tab_flag:
if "\r" not in x:
cmd += x
tab_flag = False
sys.stdout.write(x)
sys.stdout.flush()
except socket.timeout:
pass
if sys.stdin in r:
x = sys.stdin.read(1)
if len(x) == 0:
break
elif x == "\r":
self.log_cmd(cmd)
cmd = ""
elif x == "\b":
cmd = cmd[:-1]
elif x == "\t":
tab_flag = True
else:
cmd += x
self.chan.send(x)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, oldtty)
def log_cmd(self, cmd):
# 记录用户执行过的命令
# print("-->", cmd)
log_msg = "%s: %s" % (self.user, str(cmd))
MySSH.LOGGER.info(log_msg)
def windows_shell(self):
import threading
sys.stdout.write("Line-buffered terminal emulation. Press F6 or ^Z to send EOF.\r\n\r\n")
def writeall(sock):
while True:
data = sock.recv(256).decode()
if not data:
sys.stdout.write('\r\n*** EOF ***\r\n\r\n')
sys.stdout.flush()
break
sys.stdout.write(data)
sys.stdout.flush()
writer = threading.Thread(target=writeall, args=(self.chan,))
writer.start()
try:
while True:
d = sys.stdin.read(1)
if not d:
break
self.chan.send(d)
except EOFError:
# user hit ^Z or F6
pass
except OSError:
pass
@log_access
def upload_file(self, local_file, remote_path, callback):
"""
上传文件
:param filepath:
:return:
"""
# t = self.client.get_transport()
# sftp_client = paramiko.SFTPClient.from_transport(t)
filename = os.path.basename(local_file)
if r"/" in remote_path:
# linux类的远程主机
if remote_path[-1] == r"/":
remote_file = remote_path + filename
else:
remote_file = remote_path + r"/" + filename
else:
# windows类
if remote_path[-1] == "\\":
remote_file = remote_path + filename
else:
remote_file = remote_path + "\\" + filename
if os.path.isfile(local_file):
try:
ret = self.sftp_client.put(local_file, remote_file)
except:
# 远程路径不存在
callback(self.hostname, "远程路径不存在")
else:
callback(self.hostname, "本地文件不存在")
@log_access
def download_file(self, remote_file, callback, toplevel_dir="by_host", secondary_dir="."):
"""
下载文件
:param secondary_dir: 如果是按组下载,则再却分来自哪个组
:param toplevel_dir: 下载到本地后,存到指定目录,但要区分是按组还是按主机来下载
:param remote_file: 远程文件路径
:return:
"""
filename = os.path.basename(remote_file)
local_dir = os.path.join(DATA_DIR, toplevel_dir, secondary_dir)
# if not os.path.isdir(local_dir):
os.makedirs(name=local_dir, exist_ok=True)
local_file = os.path.join(local_dir, filename)
try:
self.sftp_client.get(remote_file, local_file)
return True
except FileNotFoundError:
callback(self.hostname, "文件不存在")
if __name__ == '__main__':
ms = MySSH("coosh", "192.168.5.138", 22, "root", False, "24559982", "")
# ms = MySSH("192.168.5.41", 22, "coosh", True, "", r"C:\Users\Coosh\Documents\Identity")
# ms.interactive_shell()
ms.upload_file(local_file=r"e:\test.txt", remote_path=r"/tmp")
# ms.download_file(r"/tmp/test.txt") | [
"coosh@qq.com"
] | coosh@qq.com |
b658a230e27e99b5d27f1fcc2463356dd690aff3 | 4173d689e0c4c6da971b4743dd58d5039b2d76ff | /tests/test_scrapbook_host.py | ec0fdb1c91232e05efce11c88165931d14bd06c8 | [
"MIT"
] | permissive | vsc55/PyWebScrapBook | a2a4443431630775bde17826d89947ef85dfe713 | 9fb8a744b1ced53ee401f3fdf14cf6993ab9f52a | refs/heads/master | 2023-01-31T16:23:31.489910 | 2023-01-23T19:23:59 | 2023-01-23T19:23:59 | 208,370,334 | 0 | 0 | MIT | 2023-01-23T18:31:48 | 2019-09-14T01:02:59 | Python | UTF-8 | Python | false | false | 24,083 | py | import os
import re
import tempfile
import time
import unittest
from unittest import mock
from webscrapbook import WSB_DIR, Config, util
from webscrapbook.scrapbook import host as wsb_host
from webscrapbook.scrapbook.host import Host
from . import TEMP_DIR
def setUpModule():
"""Set up a temp directory for testing."""
global _tmpdir, tmpdir
_tmpdir = tempfile.TemporaryDirectory(prefix='host-', dir=TEMP_DIR)
tmpdir = os.path.realpath(_tmpdir.name)
# mock out user config
global mockings
mockings = [
mock.patch('webscrapbook.scrapbook.host.WSB_USER_DIR', os.path.join(tmpdir, 'wsb')),
mock.patch('webscrapbook.WSB_USER_DIR', os.path.join(tmpdir, 'wsb')),
mock.patch('webscrapbook.WSB_USER_CONFIG', tmpdir),
]
for mocking in mockings:
mocking.start()
def tearDownModule():
"""Cleanup the temp directory."""
_tmpdir.cleanup()
# stop mock
for mocking in mockings:
mocking.stop()
class TestBase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.maxDiff = 8192
def setUp(self):
"""Set up a general temp test folder
"""
self.test_root = tempfile.mkdtemp(dir=tmpdir)
self.test_wsbdir = os.path.join(self.test_root, WSB_DIR)
self.test_config = os.path.join(self.test_root, WSB_DIR, 'config.ini')
os.makedirs(self.test_wsbdir)
class TestHost(TestBase):
def test_init01(self):
"""Check basic"""
with open(self.test_config, 'w', encoding='UTF-8') as fh:
fh.write("""[app]
name = myhost
theme = custom
root = public
backup_dir = mybackups
[book ""]
name = mybook
[book "id2"]
name = mybook2
""")
host = Host(self.test_root)
self.assertEqual(host.root, self.test_root)
self.assertEqual(host.name, 'myhost')
self.assertEqual(host.chroot, os.path.join(self.test_root, 'public'))
self.assertEqual(host.backup_dir, os.path.join(self.test_root, 'mybackups'))
self.assertEqual([os.path.normcase(f) for f in host.themes], [
os.path.normcase(os.path.join(self.test_root, WSB_DIR, 'themes', 'custom')),
os.path.normcase(os.path.join(tmpdir, 'wsb', 'themes', 'custom')),
os.path.normcase(os.path.abspath(os.path.join(wsb_host.__file__, '..', '..', 'themes', 'custom'))),
])
self.assertEqual([os.path.normcase(f) for f in host.statics], [
os.path.normcase(os.path.join(self.test_root, WSB_DIR, 'themes', 'custom', 'static')),
os.path.normcase(os.path.join(tmpdir, 'wsb', 'themes', 'custom', 'static')),
os.path.normcase(os.path.abspath(os.path.join(wsb_host.__file__, '..', '..', 'themes', 'custom', 'static'))),
])
self.assertEqual([os.path.normcase(f) for f in host.templates], [
os.path.normcase(os.path.join(self.test_root, WSB_DIR, 'themes', 'custom', 'templates')),
os.path.normcase(os.path.join(tmpdir, 'wsb', 'themes', 'custom', 'templates')),
os.path.normcase(os.path.abspath(os.path.join(wsb_host.__file__, '..', '..', 'themes', 'custom', 'templates'))),
])
self.assertEqual(host.locks, os.path.join(self.test_root, WSB_DIR, 'locks'))
self.assertEqual({i: host.books[i].name for i in host.books}, {
'': 'mybook',
'id2': 'mybook2',
})
def test_init02(self):
"""Check config param"""
other_root = os.path.join(self.test_root, 'rootdir')
os.makedirs(other_root)
with open(self.test_config, 'w', encoding='UTF-8') as fh:
fh.write("""[app]
name = myhost
theme = custom
root = public
backup_dir = mybackups
[book "id2"]
name = mybook2
""")
conf = Config()
conf.load(self.test_root)
host = Host(other_root, config=conf)
self.assertEqual(host.root, other_root)
self.assertEqual(host.name, 'myhost')
self.assertEqual(host.chroot, os.path.join(other_root, 'public'))
self.assertEqual(host.backup_dir, os.path.join(other_root, 'mybackups'))
self.assertEqual([os.path.normcase(f) for f in host.themes], [
os.path.normcase(os.path.join(other_root, WSB_DIR, 'themes', 'custom')),
os.path.normcase(os.path.join(tmpdir, 'wsb', 'themes', 'custom')),
os.path.normcase(os.path.abspath(os.path.join(wsb_host.__file__, '..', '..', 'themes', 'custom'))),
])
self.assertEqual([os.path.normcase(f) for f in host.statics], [
os.path.normcase(os.path.join(other_root, WSB_DIR, 'themes', 'custom', 'static')),
os.path.normcase(os.path.join(tmpdir, 'wsb', 'themes', 'custom', 'static')),
os.path.normcase(os.path.abspath(os.path.join(wsb_host.__file__, '..', '..', 'themes', 'custom', 'static'))),
])
self.assertEqual([os.path.normcase(f) for f in host.templates], [
os.path.normcase(os.path.join(other_root, WSB_DIR, 'themes', 'custom', 'templates')),
os.path.normcase(os.path.join(tmpdir, 'wsb', 'themes', 'custom', 'templates')),
os.path.normcase(os.path.abspath(os.path.join(wsb_host.__file__, '..', '..', 'themes', 'custom', 'templates'))),
])
self.assertEqual(host.locks, os.path.join(other_root, WSB_DIR, 'locks'))
self.assertEqual({i: host.books[i].name for i in host.books}, {
'': 'scrapbook',
'id2': 'mybook2',
})
def test_init03(self):
"""Validate theme name to avoid a potential bad path."""
for theme, theme_fixed in [
('', '_'),
('.', '_'),
('..', '_'),
('foo/bar', 'foo_bar'),
('foo\\bar', 'foo_bar'),
]:
with self.subTest(theme=theme):
with open(self.test_config, 'w', encoding='UTF-8') as fh:
fh.write(f'[app]\ntheme = {theme}')
host = Host(self.test_root)
self.assertEqual([os.path.normcase(f) for f in host.themes], [
os.path.normcase(os.path.join(self.test_root, WSB_DIR, 'themes', theme_fixed)),
os.path.normcase(os.path.join(tmpdir, 'wsb', 'themes', theme_fixed)),
os.path.normcase(os.path.abspath(os.path.join(wsb_host.__file__, '..', '..', 'themes', theme_fixed))),
])
def test_get_static_file01(self):
"""Lookup static file from built-in themes"""
host = Host(self.test_root)
self.assertEqual(
os.path.normcase(host.get_static_file('index.css')),
os.path.normcase(os.path.abspath(os.path.join(wsb_host.__file__, '..', '..', 'themes', 'default', 'static', 'index.css'))),
)
def test_get_static_file02(self):
"""Lookup static file from user themes"""
user_dir = os.path.join(self.test_root, 'wsb')
other_static = os.path.join(user_dir, 'themes', 'default', 'static', 'test.txt')
os.makedirs(os.path.dirname(other_static))
with open(other_static, 'w'):
pass
with mock.patch('webscrapbook.scrapbook.host.WSB_USER_DIR', user_dir):
host = Host(self.test_root)
self.assertEqual(host.get_static_file('test.txt'), other_static)
def test_get_static_file03(self):
"""Lookup static file from local themes"""
other_static = os.path.join(self.test_root, WSB_DIR, 'themes', 'default', 'static', 'test.txt')
os.makedirs(os.path.dirname(other_static))
with open(other_static, 'w'):
pass
host = Host(self.test_root)
self.assertEqual(host.get_static_file('test.txt'), other_static)
@mock.patch('webscrapbook.scrapbook.host.FileLock')
def test_get_lock01(self, mock_filelock):
host = Host(self.test_root)
host.get_lock('test')
mock_filelock.assert_called_once_with(host, 'test')
@mock.patch('webscrapbook.scrapbook.host.FileLock')
def test_get_lock02(self, mock_filelock):
"""With parameters"""
host = Host(self.test_root)
host.get_lock(
'test',
timeout=10, stale=120, poll_interval=0.3, assume_acquired=True,
)
mock_filelock.assert_called_once_with(
host, 'test',
timeout=10, stale=120, poll_interval=0.3, assume_acquired=True,
)
def test_backup01(self):
"""A common case."""
test_backup_dir = os.path.join(self.test_root, 'backup')
os.makedirs(test_backup_dir)
test_file = os.path.join(self.test_root, 'tree', 'meta.js')
os.makedirs(os.path.dirname(test_file))
with open(test_file, 'w', encoding='UTF-8') as fh:
fh.write('abc')
host = Host(self.test_root)
host.backup(test_file, test_backup_dir)
with open(os.path.join(test_backup_dir, 'tree', 'meta.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'abc')
def test_backup02(self):
"""A common directory case."""
test_backup_dir = os.path.join(self.test_root, 'backup')
os.makedirs(test_backup_dir)
test_dir = os.path.join(self.test_root, 'tree')
os.makedirs(test_dir)
with open(os.path.join(test_dir, 'meta.js'), 'w', encoding='UTF-8') as fh:
fh.write('abc')
with open(os.path.join(test_dir, 'toc.js'), 'w', encoding='UTF-8') as fh:
fh.write('def')
host = Host(self.test_root)
host.backup(test_dir, test_backup_dir)
with open(os.path.join(test_backup_dir, 'tree', 'meta.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'abc')
with open(os.path.join(test_backup_dir, 'tree', 'toc.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'def')
def test_backup03(self):
"""Pass if file not exist."""
test_backup_dir = os.path.join(self.test_root, 'backup')
os.makedirs(test_backup_dir)
test_file = os.path.join(self.test_wsbdir, 'icon', 'nonexist.txt')
host = Host(self.test_root)
host.backup(test_file, test_backup_dir)
self.assertFalse(os.path.lexists(os.path.join(test_backup_dir, WSB_DIR, 'icon', 'nonexist.txt')))
def test_backup04(self):
"""Pass if file outside the host root."""
test_backup_dir = os.path.join(self.test_root, 'backup')
os.makedirs(test_backup_dir)
host = Host(self.test_root)
host.backup(__file__, test_backup_dir)
self.assertListEqual(os.listdir(test_backup_dir), [])
def test_backup05(self):
"""Test base param."""
test_backup_dir = os.path.join(self.test_root, 'backup')
os.makedirs(test_backup_dir)
test_base_dir = os.path.join(self.test_root, 'backup_base')
os.makedirs(test_base_dir)
test_file = os.path.join(test_base_dir, 'test.txt')
with open(test_file, 'w', encoding='UTF-8') as fh:
fh.write('ABC123')
host = Host(self.test_root)
host.backup(os.path.join(test_base_dir, 'test.txt'), test_backup_dir, base=test_base_dir)
with open(os.path.join(test_backup_dir, 'test.txt'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'ABC123')
def test_backup06(self):
"""Test move param."""
test_backup_dir = os.path.join(self.test_root, 'backup')
os.makedirs(test_backup_dir)
test_dir = os.path.join(self.test_root, 'tree')
os.makedirs(test_dir)
with open(os.path.join(test_dir, 'meta.js'), 'w', encoding='UTF-8') as fh:
fh.write('abc')
with open(os.path.join(test_dir, 'toc.js'), 'w', encoding='UTF-8') as fh:
fh.write('def')
host = Host(self.test_root)
host.backup(test_dir, test_backup_dir, move=True)
self.assertFalse(os.path.lexists(test_dir))
with open(os.path.join(test_backup_dir, 'tree', 'meta.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'abc')
with open(os.path.join(test_backup_dir, 'tree', 'toc.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'def')
def test_backup07(self):
"""A common case."""
test_file = os.path.join(self.test_root, 'tree', 'meta.js')
os.makedirs(os.path.dirname(test_file))
with open(test_file, 'w', encoding='UTF-8') as fh:
fh.write('abc')
host = Host(self.test_root)
host.backup(test_file)
backup_dirname = os.listdir(os.path.join(self.test_wsbdir, 'backup'))[0]
self.assertRegex(backup_dirname, r'^\d{17}$')
with open(os.path.join(self.test_wsbdir, 'backup', backup_dirname, 'tree', 'meta.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'abc')
def test_unbackup01(self):
"""A common case."""
test_backup_dir = os.path.join(self.test_root, 'backup')
os.makedirs(test_backup_dir)
host = Host(self.test_root)
host.unbackup(test_backup_dir)
self.assertFalse(os.path.lexists(test_backup_dir))
def test_unbackup02(self):
"""Pass if backup dir not exist."""
test_backup_dir = os.path.join(self.test_root, 'backup')
host = Host(self.test_root)
host.unbackup(test_backup_dir)
self.assertFalse(os.path.lexists(test_backup_dir))
def test_init_backup01(self):
"""Test ts param."""
host = Host(self.test_root)
host.init_backup(True)
self.assertRegex(
host._backup_dir,
r'^' + re.escape(os.path.join(self.test_root, WSB_DIR, 'backup', '')) + r'\d{17}$',
)
ts = util.datetime_to_id()
host.init_backup(ts)
self.assertEqual(
host._backup_dir,
os.path.join(self.test_root, WSB_DIR, 'backup', ts),
)
host.init_backup(False)
self.assertIsNone(host._backup_dir)
def test_init_backup02(self):
"""Test note param."""
host = Host(self.test_root)
host.init_backup(True, 'foo~bar')
self.assertRegex(
host._backup_dir,
r'^' + re.escape(os.path.join(self.test_root, WSB_DIR, 'backup', '')) + r'\d{17}-foo~bar',
)
ts = util.datetime_to_id()
host.init_backup(ts, note='foo:bar:中文?')
self.assertEqual(
host._backup_dir,
os.path.join(self.test_root, WSB_DIR, 'backup', ts + '-foo_bar_中文_'),
)
def test_auto_backup01(self):
"""A common case."""
test_file = os.path.join(self.test_root, 'tree', 'meta.js')
os.makedirs(os.path.dirname(test_file))
with open(test_file, 'w', encoding='UTF-8') as fh:
fh.write('abc')
host = Host(self.test_root)
host.init_backup()
host.auto_backup(test_file)
with open(os.path.join(host._backup_dir, 'tree', 'meta.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'abc')
def test_auto_backup02(self):
"""A common directory case."""
test_dir = os.path.join(self.test_root, 'tree')
os.makedirs(test_dir)
with open(os.path.join(test_dir, 'meta.js'), 'w', encoding='UTF-8') as fh:
fh.write('abc')
with open(os.path.join(test_dir, 'toc.js'), 'w', encoding='UTF-8') as fh:
fh.write('def')
host = Host(self.test_root)
host.init_backup()
host.auto_backup(test_dir)
with open(os.path.join(host._backup_dir, 'tree', 'meta.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'abc')
with open(os.path.join(host._backup_dir, 'tree', 'toc.js'), encoding='UTF-8') as fh:
self.assertEqual(fh.read(), 'def')
def test_auto_backup03(self):
"""Pass if _backup_dir not set."""
test_file = os.path.join(self.test_wsbdir, 'icon', 'test.txt')
os.makedirs(os.path.dirname(test_file))
with open(test_file, 'w', encoding='UTF-8') as fh:
fh.write('abc')
host = Host(self.test_root)
host.auto_backup(test_file)
self.assertListEqual(os.listdir(self.test_wsbdir), ['icon'])
class TestFileLock(TestBase):
def test_init01(self):
"""Normal"""
lock_file = os.path.join(self.test_root, WSB_DIR, 'locks', '098f6bcd4621d373cade4e832627b4f6.lock')
host = Host(self.test_root)
lock = wsb_host.FileLock(host, 'test')
self.assertEqual(lock.host, host)
self.assertEqual(lock.name, 'test')
self.assertEqual(lock.timeout, 5)
self.assertEqual(lock.stale, 60)
self.assertEqual(lock.file, lock_file)
self.assertIsInstance(lock.id, str)
self.assertEqual(lock._lock, False)
def test_init02(self):
"""Parameters."""
lock_file = os.path.join(self.test_root, WSB_DIR, 'locks', '098f6bcd4621d373cade4e832627b4f6.lock')
host = Host(self.test_root)
lock = wsb_host.FileLock(host, 'test', timeout=2, stale=120)
self.assertEqual(lock.host, host)
self.assertEqual(lock.name, 'test')
self.assertEqual(lock.timeout, 2)
self.assertEqual(lock.stale, 120)
self.assertEqual(lock.file, lock_file)
self.assertIsInstance(lock.id, str)
self.assertEqual(lock._lock, False)
def test_persist01(self):
"""Normal case."""
lock_file = os.path.join(self.test_root, WSB_DIR, 'locks', '098f6bcd4621d373cade4e832627b4f6.lock')
os.makedirs(os.path.dirname(lock_file))
with open(lock_file, 'w', encoding='UTF-8') as fh:
fh.write('oldid')
host = Host(self.test_root)
lock = wsb_host.FileLock(host, 'test', persist='oldid')
self.assertEqual(lock.id, 'oldid')
self.assertEqual(lock._lock, True)
def test_persist02(self):
"""Wrong ID."""
lock_file = os.path.join(self.test_root, WSB_DIR, 'locks', '098f6bcd4621d373cade4e832627b4f6.lock')
os.makedirs(os.path.dirname(lock_file))
with open(lock_file, 'w', encoding='UTF-8') as fh:
fh.write('oldid')
host = Host(self.test_root)
with self.assertRaises(wsb_host.LockPersistUnmatchError):
wsb_host.FileLock(host, 'test', persist='dummy')
def test_persist03(self):
"""Lock file missing (or inaccessible)."""
host = Host(self.test_root)
with self.assertRaises(wsb_host.LockPersistOSError):
wsb_host.FileLock(host, 'test', persist='dummy')
def test_acquire01(self):
"""Normal case"""
lock = Host(self.test_root).get_lock('test')
lock.acquire()
with open(lock.file) as fh:
self.assertTrue(fh.read(), lock.id)
self.assertTrue(lock.locked)
def test_acquire02(self):
"""Already exists"""
lock = Host(self.test_root).get_lock('test', timeout=0)
os.makedirs(os.path.dirname(lock.file))
with open(lock.file, 'w'):
pass
with self.assertRaises(wsb_host.LockTimeoutError):
lock.acquire()
def test_acquire03(self):
"""Already exists, timeout as acquire param"""
lock = Host(self.test_root).get_lock('test')
os.makedirs(os.path.dirname(lock.file))
with open(lock.file, 'w'):
pass
with self.assertRaises(wsb_host.LockTimeoutError):
lock.acquire(timeout=0)
def test_acquire04(self):
"""Stale lock should be regenerated"""
lock = Host(self.test_root).get_lock('test', timeout=1, stale=0)
os.makedirs(os.path.dirname(lock.file))
with open(lock.file, 'w') as fh:
fh.write('oldid')
lock.acquire()
with open(lock.file) as fh:
self.assertTrue(fh.read(), lock.id)
self.assertNotEqual(lock.id, 'oldid')
self.assertTrue(lock.locked)
def test_acquire05(self):
"""Unable to generate upper directory"""
lock = Host(self.test_root).get_lock('test')
with open(os.path.join(self.test_root, WSB_DIR, 'locks'), 'wb'):
pass
with self.assertRaises(wsb_host.LockGenerateError):
lock.acquire()
def test_acquire06(self):
"""Occupied by a directory"""
lock = Host(self.test_root).get_lock('test')
os.makedirs(lock.file)
with self.assertRaises(wsb_host.LockGenerateError):
lock.acquire()
def test_acquire_with(self):
"""Lock should be released after an with statement."""
lock = Host(self.test_root).get_lock('test')
with lock.acquire() as lh:
self.assertTrue(os.path.isfile(lock.file))
self.assertTrue(lock.locked)
self.assertEqual(lh, lock)
self.assertFalse(os.path.exists(lock.file))
self.assertFalse(lock.locked)
def test_extend01(self):
"""Nnormal case"""
lock = Host(self.test_root).get_lock('test')
lock.acquire()
prev_time = os.stat(lock.file).st_mtime
time.sleep(0.05)
lock.extend()
cur_time = os.stat(lock.file).st_mtime
self.assertGreater(cur_time, prev_time)
self.assertTrue(lock.locked)
def test_extend02(self):
"""Not acquired"""
lock = Host(self.test_root).get_lock('test')
with self.assertRaises(wsb_host.LockExtendNotAcquiredError):
lock.extend()
def test_extend03(self):
"""File not exist"""
lock = Host(self.test_root).get_lock('test')
lock.acquire()
os.remove(lock.file)
with self.assertRaises(wsb_host.LockExtendNotFoundError):
lock.extend()
def test_release01(self):
"""Nnormal case"""
lock = Host(self.test_root).get_lock('test')
lock.acquire()
lock.release()
self.assertFalse(os.path.lexists(lock.file))
self.assertFalse(lock.locked)
def test_release02(self):
"""Not acquired"""
lock = Host(self.test_root).get_lock('test')
with self.assertRaises(wsb_host.LockReleaseNotAcquiredError):
lock.release()
def test_release03(self):
"""File not exist"""
lock = Host(self.test_root).get_lock('test')
lock.acquire()
os.remove(lock.file)
with self.assertRaises(wsb_host.LockReleaseNotFoundError):
lock.release()
def test_keep01(self):
"""Lock should be auto-extended until released."""
lock_file = os.path.join(self.test_root, WSB_DIR, 'locks', '098f6bcd4621d373cade4e832627b4f6.lock')
lock = Host(self.test_root).get_lock('test', stale=0.01)
lock.acquire()
try:
lock.keep()
mtime = os.stat(lock_file).st_mtime
# poll up to 0.5 seconds in case thread delay due to busyness
start = time.time()
while True:
time.sleep(0.005)
try:
self.assertGreater(os.stat(lock_file).st_mtime, mtime)
except AssertionError as exc:
if time.time() - start > 0.5:
raise exc
else:
break
finally:
lock.release()
def test_keep02(self):
"""Lock should be auto-extended until released."""
lock_file = os.path.join(self.test_root, WSB_DIR, 'locks', '098f6bcd4621d373cade4e832627b4f6.lock')
lock = Host(self.test_root).get_lock('test', stale=0.01)
with lock.acquire():
mtime = os.stat(lock_file).st_mtime
# poll up to 0.5 seconds in case thread delay due to busyness
start = time.time()
while True:
time.sleep(0.005)
try:
self.assertGreater(os.stat(lock_file).st_mtime, mtime)
except AssertionError as exc:
if time.time() - start > 0.5:
raise exc
else:
break
if __name__ == '__main__':
unittest.main()
| [
"danny0838@gmail.com"
] | danny0838@gmail.com |
12a4375cf4891bb40aac4e72c72a86695597729b | 48460db1a6fdc6c09845c86cf5fa257f1a32f08a | /leetcode/medium/1041_Robot_Bounded_In_Circle.py | 6384889f67c7af592d411faf5355890bbe147bed | [] | no_license | MichalBrzozowski91/algorithms | 9d0b085621ed94b1aff5473663fbdc686463cd8d | ae57535b574a800c6300eae7d55b21f2432c3baa | refs/heads/master | 2022-12-20T08:00:59.385002 | 2020-09-30T16:32:33 | 2020-09-30T16:32:33 | 290,835,098 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 747 | py | class Solution:
def isRobotBounded(self, instructions) -> bool:
# We calculate composition of all instructions
direction = 0 # 0: North, 1: West, 2: South, 3: East
movement = {0: [0,1],1: [-1,0],2: [0,-1],3: [1,0]}
position = [0,0]
for letter in instructions:
if letter == 'L':
direction = (direction - 1) % 4
elif letter == 'R':
direction = (direction + 1) % 4
elif letter == 'G':
position[0] += movement[direction][0]
position[1] += movement[direction][1]
if direction == 0 and position != [0,0]: # Robot moved but did not rotate
return False
else:
return True
| [
"noreply@github.com"
] | MichalBrzozowski91.noreply@github.com |
c2fa30fea223b2b9791202b3d3ab0002c0d0074c | e41f4530175b15edfe52b4f3cf9b5c8c4d93ce17 | /veriloggen/core/module.py | 5ab6d6f32de4c82e192566aac76e5a3237426f89 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | foreverstream/veriloggen | ae12caa27531efdfd9cbc8d38e0d0275901b0157 | e61f28e19a6edd5a6485ba0dc541c6185d3482c2 | refs/heads/master | 2022-12-03T13:51:27.965735 | 2020-05-03T12:59:21 | 2020-05-03T12:59:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50,823 | py | from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import collections
import copy
import re
import veriloggen.core.vtypes as vtypes
import veriloggen.core.function as function
import veriloggen.core.task as task
import veriloggen.core.rename_visitor as rename_visitor
class Module(vtypes.VeriloggenNode):
""" Verilog Module class """
def __init__(self, name=None, tmp_prefix='_tmp'):
vtypes.VeriloggenNode.__init__(self)
self.name = name if name is not None else self.__class__.__name__
self.io_variable = collections.OrderedDict()
self.variable = collections.OrderedDict()
self.global_constant = collections.OrderedDict()
self.local_constant = collections.OrderedDict()
self.function = collections.OrderedDict()
self.task = collections.OrderedDict()
self.assign = []
self.always = []
self.initial = []
self.instance = collections.OrderedDict()
self.submodule = collections.OrderedDict()
self.generate = collections.OrderedDict()
self.items = []
self.tmp_prefix = tmp_prefix
self.tmp_count = 0
self.hook = []
self.used = False
#-------------------------------------------------------------------------
# User interface for variables
#-------------------------------------------------------------------------
def Input(self, name, width=None, dims=None, signed=False, value=None):
t = vtypes.Input(width, dims, signed, value, name=name, module=self)
self.check_existing_identifier(name, vtypes.Wire)
self.io_variable[name] = t
self.items.append(t)
return t
def Output(self, name, width=None, dims=None, signed=False, value=None):
t = vtypes.Output(width, dims, signed, value, name=name, module=self)
self.check_existing_identifier(name, vtypes.Wire, vtypes.Reg)
self.io_variable[name] = t
self.items.append(t)
return t
def OutputReg(self, name, width=None, dims=None, signed=False, value=None,
initval=None):
t = vtypes.Output(width, dims, signed, value, name=name, module=self)
self.check_existing_identifier(name)
self.io_variable[name] = t
self.items.append(t)
t = vtypes.Reg(width, dims, signed, value,
initval, name=name, module=self)
self.variable[name] = t
self.items.append(t)
return t
def Inout(self, name, width=None, dims=None, signed=False, value=None):
t = vtypes.Inout(width, dims, signed, value, name=name, module=self)
self.check_existing_identifier(name, vtypes.Wire)
self.io_variable[name] = t
self.items.append(t)
return t
def Wire(self, name, width=None, dims=None, signed=False, value=None):
t = vtypes.Wire(width, dims, signed, value, name=name, module=self)
self.check_existing_identifier(name, vtypes.Input, vtypes.Output)
if self.is_reg(name):
raise ValueError("Object '%s' is already defined." % name)
self.variable[name] = t
self.items.append(t)
return t
def TmpWire(self, width=None, dims=None, signed=False, value=None,
prefix=None):
if prefix is None:
prefix = self.tmp_prefix
name = '_'.join([prefix, str(self.get_tmp())])
return self.Wire(name, width, dims, signed, value)
def Reg(self, name, width=None, dims=None, signed=False, value=None,
initval=None):
t = vtypes.Reg(width, dims, signed, value,
initval, name=name, module=self)
self.check_existing_identifier(name, vtypes.Output)
self.variable[name] = t
self.items.append(t)
return t
def TmpReg(self, width=None, dims=None, signed=False, value=None,
initval=None, prefix=None):
if prefix is None:
prefix = self.tmp_prefix
name = '_'.join([prefix, str(self.get_tmp())])
return self.Reg(name, width, dims, signed, value, initval)
def Integer(self, name, width=None, dims=None, signed=False, value=None,
initval=None):
t = vtypes.Integer(width, dims, signed, value,
initval, name=name, module=self)
self.check_existing_identifier(name)
self.variable[name] = t
self.items.append(t)
return t
def TmpInteger(self, width=None, dims=None, signed=False, value=None,
initval=None, prefix=None):
if prefix is None:
prefix = self.tmp_prefix
name = '_'.join([prefix, str(self.get_tmp())])
return self.Integer(name, width, dims, signed, value, initval)
def Real(self, name, width=None, dims=None, signed=False, value=None,
initval=None):
t = vtypes.Real(width, dims, signed, value,
initval, name=name, module=self)
self.check_existing_identifier(name)
self.variable[name] = t
self.items.append(t)
return t
def TmpReal(self, width=None, dims=None, signed=False, value=None,
initval=None, prefix=None):
if prefix is None:
prefix = self.tmp_prefix
name = '_'.join([prefix, str(self.get_tmp())])
return self.Real(name, width, dims, signed, value, initval)
def Genvar(self, name, width=None, dims=None, signed=False, value=None):
t = vtypes.Genvar(width, dims, signed, value, name=name, module=self)
self.check_existing_identifier(name)
self.variable[name] = t
self.items.append(t)
return t
def TmpGenvar(self, width=None, dims=None, signed=False, value=None,
prefix=None):
if prefix is None:
prefix = self.tmp_prefix
name = '_'.join([prefix, str(self.get_tmp())])
return self.Genvar(name, width, dims, signed, value)
def Parameter(self, name, value, width=None, signed=False, dims=None):
t = vtypes.Parameter(value, width, signed, name=name, module=self)
self.check_existing_identifier(name)
self.global_constant[name] = t
self.items.append(t)
return t
def Localparam(self, name, value, width=None, signed=False, dims=None):
t = vtypes.Localparam(value, width, signed, name=name, module=self)
self.check_existing_identifier(name)
self.local_constant[name] = t
self.items.append(t)
return t
def TmpLocalparam(self, value, width=None, signed=False, dims=None,
prefix=None):
if prefix is None:
prefix = self.tmp_prefix
name = '_'.join([prefix, str(self.get_tmp())])
return self.Localparam(name, value, width, signed, dims)
#-------------------------------------------------------------------------
def InputLike(self, src, name=None, width=None, dims=None,
signed=None, value=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
return self.Input(name, width, dims, signed, value)
def OutputLike(self, src, name=None, width=None, dims=None,
signed=None, value=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
return self.Output(name, width, dims, signed, value)
def OutputRegLike(self, src, name=None, width=None, dims=None,
signed=None, value=None, initval=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
if initval is None:
initval = src.initval
return self.OutputReg(name, width, dims, signed, value, initval)
def InoutLike(self, src, name=None, width=None, dims=None,
signed=None, value=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
return self.Inout(name, width, dims, signed, value)
def WireLike(self, src, name=None, width=None, dims=None,
signed=None, value=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
return self.Wire(name, width, dims, signed, value)
def TmpWireLike(self, src, width=None, dims=None,
signed=None, value=None, prefix=None):
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
return self.TmpWire(width, dims, signed, value, prefix)
def RegLike(self, src, name=None, width=None, dims=None,
signed=None, value=None, initval=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
if initval is None:
initval = src.initval
return self.Reg(name, width, dims, signed, value, initval)
def TmpRegLike(self, src, width=None, dims=None,
signed=None, value=None, initval=None, prefix=None):
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
if initval is None:
initval = src.initval
return self.TmpReg(width, dims, signed, value, initval, prefix)
def IntegerLike(self, src, name=None, width=None, dims=None,
signed=None, value=None, initval=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
if initval is None:
initval = src.initval
return self.Integer(name, width, dims, signed, value, initval)
def TmpIntegerLike(self, src, width=None, dims=None,
signed=None, value=None, initval=None, prefix=None):
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
if initval is None:
initval = src.initval
return self.TmpInteger(width, dims, signed, value, initval, prefix)
def RealLike(self, src, name=None, width=None, dims=None,
signed=None, value=None, initval=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
if initval is None:
initval = src.initval
return self.Real(name, width, dims, signed, value, initval)
def TmpRealLike(self, src, width=None, dims=None,
signed=None, value=None, initval=None, prefix=None):
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
if initval is None:
initval = src.initval
return self.TmpReal(width, dims, signed, value, initval, prefix)
def GenvarLike(self, src, name=None, width=None, dims=None,
signed=None, value=None):
if name is None:
name = src.name
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
return self.Genvar(name, width, dims, signed, value)
def TmpGenvarLike(self, src, width=None, dims=None,
signed=None, value=None, prefix=None):
if width is None:
width = src.width
if dims is None:
dims = src.dims
if signed is None:
signed = src.signed
if value is None:
value = src.value
return self.TmpGenvar(width, dims, signed, value, prefix)
def ParameterLike(self, src, name=None, value=None, width=None,
signed=False, dims=None):
if name is None:
name = src.name
if value is None:
value = src.value
if width is None:
width = src.width
if signed is None:
signed = src.signed
if dims is None:
dims = src.dims
return self.Parameter(name, value, width, signed, dims)
def LocalparamLike(self, src, name=None, value=None, width=None,
signed=False, dims=None):
if name is None:
name = src.name
if value is None:
value = src.value
if width is None:
width = src.width
if signed is None:
signed = src.signed
if dims is None:
dims = src.dims
return self.Localparam(name, value, width, signed, dims)
def TmpLocalparamLike(self, src, value=None, width=None,
signed=False, dims=None, prefix=None):
if value is None:
value = src.value
if width is None:
width = src.width
if signed is None:
signed = src.signed
if dims is None:
dims = src.dims
return self.TmpLocalparam(value, width, signed, dims, prefix)
#-------------------------------------------------------------------------
# User interface for control statements
#-------------------------------------------------------------------------
def Always(self, *sensitivity):
t = vtypes.Always(*sensitivity)
self.always.append(t)
self.items.append(t)
return t
def Assign(self, statement):
t = vtypes.Assign(statement)
self.assign.append(t)
self.items.append(t)
return t
def Initial(self, *statement):
t = vtypes.Initial(*statement)
self.initial.append(t)
self.items.append(t)
return t
def Function(self, name, width=1):
t = function.Function(name, width)
self.check_existing_identifier(name)
self.function[name] = t
self.items.append(t)
return t
def Task(self, name):
t = task.Task(name)
self.check_existing_identifier(name)
self.task[name] = t
self.items.append(t)
return t
#-------------------------------------------------------------------------
def GenerateFor(self, pre, cond, post, scope=None):
t = GenerateFor(self, pre, cond, post, scope)
if scope is None:
if None not in self.generate:
self.generate[None] = []
self.generate[None].append(t)
self.items.append(t)
return t
self.check_existing_identifier(scope)
if scope in self.generate:
raise ValueError("scope '%s' is already defined." % scope)
self.generate[scope] = t
self.items.append(t)
return t
def GenerateIf(self, cond, scope=None):
t = GenerateIf(self, cond, scope)
if scope is None:
if None not in self.generate:
self.generate[None] = []
self.generate[None].append(t)
self.items.append(t)
return t
self.check_existing_identifier(scope)
if scope in self.generate:
raise ValueError("scope '%s' is already defined." % scope)
self.generate[scope] = t
self.items.append(t)
return t
#-------------------------------------------------------------------------
def Instance(self, module, instname, params=None, ports=None):
if isinstance(module, str):
module = StubModule(module)
if not isinstance(module, (Module, StubModule, str)):
raise TypeError('"module" of Instance must be Module,'
' StubModule, or str, not %s' % type(module))
self.check_existing_identifier(instname)
t = Instance(module, instname, params, ports)
self.instance[instname] = t
self.items.append(t)
mod = self.find_module(module.name)
if mod is None:
self.submodule[module.name] = module
while mod is not None:
if mod == module:
break
module.name = module.name + '_'
self.submodule[module.name] = module
mod = self.find_module(module.name)
return t
#-------------------------------------------------------------------------
def EmbeddedCode(self, code):
t = vtypes.EmbeddedCode(code)
self.items.append(t)
return t
#-------------------------------------------------------------------------
# User intarface for reset assignments
#-------------------------------------------------------------------------
def make_reset(self):
ret = []
for vname, var in self.variable.items():
r = var.reset()
if r is not None:
ret.append(r)
return ret
#-------------------------------------------------------------------------
# User interface for accessing internal information
#-------------------------------------------------------------------------
def get_params(self):
return self.global_constant
def get_localparams(self):
return self.local_constant
def get_ports(self):
return self.io_variable
def get_vars(self):
return self.variable
def __getitem__(self, r):
if isinstance(r, slice):
raise TypeError("Index must be str.")
if not isinstance(r, str):
raise TypeError("Index must be str.")
v = self.find_identifier(r)
if v is None:
raise NameError("No such variable '%s'" % r)
return v
#-------------------------------------------------------------------------
def copy_params(self, src, prefix=None, postfix=None,
include=None, exclude=None, rename_exclude=None,
use_fullmatch=False):
if prefix is None:
prefix = ''
if postfix is None:
postfix = ''
if include is None:
include = ()
if isinstance(include, str):
include = [include]
if exclude is None:
exclude = ()
if isinstance(exclude, str):
exclude = [exclude]
if rename_exclude is None:
rename_exclude = ()
if isinstance(rename_exclude, str):
rename_exclude = [rename_exclude]
visitor = rename_visitor.RenameVisitor(prefix, postfix, rename_exclude)
ret = collections.OrderedDict()
for key, obj in src.global_constant.items():
if not include:
skip = False
else:
skip = True
for inc in include:
if use_fullmatch:
inc = ''.join(('^', inc, '$'))
if re.match(inc, key):
skip = False
for ex in exclude:
if use_fullmatch:
ex = ''.join(('^', ex, '$'))
if re.match(ex, key):
skip = True
if skip:
continue
copy_obj = copy.deepcopy(obj)
copy_obj.name = ''.join([prefix, copy_obj.name, postfix])
copy_obj.value = visitor.visit(copy_obj.value)
copy_obj.width = visitor.visit(copy_obj.width)
self.add_object(copy_obj)
ret[copy_obj.name] = copy_obj
return ret
def copy_params_as_localparams(self, src, prefix=None, postfix=None,
include=None, exclude=None, rename_exclude=None,
use_fullmatch=False):
if prefix is None:
prefix = ''
if postfix is None:
postfix = ''
if include is None:
include = ()
if isinstance(include, str):
include = [include]
if exclude is None:
exclude = ()
if isinstance(exclude, str):
exclude = [exclude]
if rename_exclude is None:
rename_exclude = ()
if isinstance(rename_exclude, str):
rename_exclude = [rename_exclude]
visitor = rename_visitor.RenameVisitor(prefix, postfix, rename_exclude)
ret = collections.OrderedDict()
for key, obj in src.global_constant.items():
if not include:
skip = False
else:
skip = True
for inc in include:
if use_fullmatch:
inc = ''.join(('^', inc, '$'))
if re.match(inc, key):
skip = False
for ex in exclude:
if use_fullmatch:
ex = ''.join(('^', ex, '$'))
if re.match(ex, key):
skip = True
if skip:
continue
name = ''.join([prefix, obj.name, postfix])
value = visitor.visit(obj.value)
width = visitor.visit(obj.width)
signed = obj.signed
copy_obj = vtypes.Localparam(value, width, signed, name)
self.add_object(copy_obj)
ret[copy_obj.name] = copy_obj
return ret
def copy_localparams(self, src, prefix=None, postfix=None,
include=None, exclude=None, rename_exclude=None,
use_fullmatch=False):
if prefix is None:
prefix = ''
if postfix is None:
postfix = ''
if include is None:
include = ()
if isinstance(include, str):
include = [include]
if exclude is None:
exclude = ()
if isinstance(exclude, str):
exclude = [exclude]
if rename_exclude is None:
rename_exclude = ()
if isinstance(rename_exclude, str):
rename_exclude = [rename_exclude]
visitor = rename_visitor.RenameVisitor(prefix, postfix, rename_exclude)
ret = collections.OrderedDict()
for key, obj in src.local_constant.items():
if not include:
skip = False
else:
skip = True
for inc in include:
if use_fullmatch:
inc = ''.join(('^', inc, '$'))
if re.match(inc, key):
skip = False
for ex in exclude:
if use_fullmatch:
ex = ''.join(('^', ex, '$'))
if re.match(ex, key):
skip = True
if skip:
continue
copy_obj = copy.deepcopy(obj)
copy_obj.name = ''.join([prefix, copy_obj.name, postfix])
copy_obj.value = visitor.visit(copy_obj.value)
copy_obj.width = visitor.visit(copy_obj.width)
self.add_object(copy_obj)
ret[copy_obj.name] = copy_obj
return ret
def copy_ports(self, src, prefix=None, postfix=None,
include=None, exclude=None, rename_exclude=None,
use_fullmatch=False):
if prefix is None:
prefix = ''
if postfix is None:
postfix = ''
if include is None:
include = ()
if isinstance(include, str):
include = [include]
if exclude is None:
exclude = ()
if isinstance(exclude, str):
exclude = [exclude]
if rename_exclude is None:
rename_exclude = ()
if isinstance(rename_exclude, str):
rename_exclude = [rename_exclude]
visitor = rename_visitor.RenameVisitor(prefix, postfix, rename_exclude)
ret = collections.OrderedDict()
for key, obj in src.io_variable.items():
if not include:
skip = False
else:
skip = True
for inc in include:
if use_fullmatch:
inc = ''.join(('^', inc, '$'))
if re.match(inc, key):
skip = False
for ex in exclude:
if use_fullmatch:
ex = ''.join(('^', ex, '$'))
if re.match(ex, key):
skip = True
if skip:
continue
copy_obj = copy.deepcopy(obj)
copy_obj.name = ''.join([prefix, copy_obj.name, postfix])
copy_obj.width = visitor.visit(copy_obj.width)
copy_obj.signed = obj.signed
self.add_object(copy_obj)
ret[copy_obj.name] = copy_obj
return ret
def copy_ports_as_vars(self, src, prefix=None, postfix=None,
include=None, exclude=None, rename_exclude=None,
use_fullmatch=False, use_wire=False):
if prefix is None:
prefix = ''
if postfix is None:
postfix = ''
if include is None:
include = ()
if isinstance(include, str):
include = [include]
if exclude is None:
exclude = ()
if isinstance(exclude, str):
exclude = [exclude]
if rename_exclude is None:
rename_exclude = ()
if isinstance(rename_exclude, str):
rename_exclude = [rename_exclude]
visitor = rename_visitor.RenameVisitor(prefix, postfix, rename_exclude)
ret = collections.OrderedDict()
for key, obj in src.io_variable.items():
if not include:
skip = False
else:
skip = True
for inc in include:
if use_fullmatch:
inc = ''.join(('^', inc, '$'))
if re.match(inc, key):
skip = False
for ex in exclude:
if use_fullmatch:
ex = ''.join(('^', ex, '$'))
if re.match(ex, key):
skip = True
if skip:
continue
copy_obj = self.get_opposite_variable(obj, use_wire)(
name=key, width=copy.deepcopy(obj.width),
initval=obj.initval, signed=obj.signed, module=self)
copy_obj.name = ''.join([prefix, copy_obj.name, postfix])
copy_obj.width = visitor.visit(copy_obj.width)
copy_obj.initval = visitor.visit(copy_obj.initval)
copy_obj.signed = obj.signed
self.add_object(copy_obj)
ret[copy_obj.name] = copy_obj
return ret
def copy_vars(self, src, prefix=None, postfix=None,
include=None, exclude=None, rename_exclude=None,
use_fullmatch=False):
if prefix is None:
prefix = ''
if postfix is None:
postfix = ''
if include is None:
include = ()
if isinstance(include, str):
include = [include]
if exclude is None:
exclude = ()
if isinstance(exclude, str):
exclude = [exclude]
if rename_exclude is None:
rename_exclude = ()
if isinstance(rename_exclude, str):
rename_exclude = [rename_exclude]
visitor = rename_visitor.RenameVisitor(prefix, postfix, rename_exclude)
ret = collections.OrderedDict()
for key, obj in src.variable.items():
if not include:
skip = False
else:
skip = True
for inc in include:
if use_fullmatch:
inc = ''.join(('^', inc, '$'))
if re.match(inc, key):
skip = False
for ex in exclude:
if use_fullmatch:
ex = ''.join(('^', ex, '$'))
if re.match(ex, key):
skip = True
if skip:
continue
copy_obj = copy.deepcopy(obj)
copy_obj.name = ''.join([prefix, copy_obj.name, postfix])
copy_obj.width = visitor.visit(copy_obj.width)
copy_obj.signed = obj.signed
self.add_object(copy_obj)
ret[copy_obj.name] = copy_obj
return ret
def copy_sim_ports(self, src, prefix=None, postfix=None,
include=None, exclude=None, rename_exclude=None,
use_fullmatch=False, use_wire=False):
return self.copy_ports_as_vars(src, prefix, postfix,
include, exclude, rename_exclude,
use_fullmatch, use_wire)
#-------------------------------------------------------------------------
def connect_params(self, targ, prefix=None, postfix=None,
include=None, exclude=None, strict=False):
if prefix is None:
prefix = ''
if postfix is None:
postfix = ''
if include is None:
include = ()
if isinstance(include, str):
include = [include]
if exclude is None:
exclude = ()
if isinstance(exclude, str):
exclude = [exclude]
ret = []
for key, obj in targ.global_constant.items():
if not include:
skip = False
else:
skip = True
for inc in include:
if use_fullmatch:
inc = ''.join(('^', inc, '$'))
if re.match(inc, key):
skip = False
for ex in exclude:
if use_fullmatch:
ex = ''.join(('^', ex, '$'))
if re.match(ex, key):
skip = True
if skip:
continue
my_key = ''.join([prefix, key, postfix])
if (strict and (my_key not in self.global_constant) and
(my_key not in self.local_constant)):
raise IndexError(
"No such constant '%s' in module '%s'" % (key, self.name))
if my_key in self.global_constant:
ret.append((key, self.global_constant[my_key]))
elif my_key in self.local_constant:
ret.append((key, self.local_constant[my_key]))
return ret
def connect_ports(self, targ, prefix=None, postfix=None,
include=None, exclude=None, strict=False):
if prefix is None:
prefix = ''
if postfix is None:
postfix = ''
if include is None:
include = ()
if isinstance(include, str):
include = [include]
if exclude is None:
exclude = ()
if isinstance(exclude, str):
exclude = [exclude]
ret = []
for key, obj in targ.io_variable.items():
if not include:
skip = False
else:
skip = True
for inc in include:
if use_fullmatch:
inc = ''.join(('^', inc, '$'))
if re.match(inc, key):
skip = False
for ex in exclude:
if use_fullmatch:
ex = ''.join(('^', ex, '$'))
if re.match(ex, key):
skip = True
if skip:
continue
my_key = ''.join([prefix, key, postfix])
if (strict and (my_key not in self.io_variable) and
(my_key not in self.variable)):
raise IndexError("No such IO '%s' in module '%s'" %
(key, self.name))
if my_key in self.io_variable:
ret.append((key, self.io_variable[my_key]))
elif my_key in self.variable:
ret.append((key, self.variable[my_key]))
return ret
#-------------------------------------------------------------------------
# User interface for Verilog code generation
#-------------------------------------------------------------------------
def to_verilog(self, filename=None, for_verilator=False):
import veriloggen.verilog.to_verilog as to_verilog
obj = self.to_hook_resolved_obj()
return to_verilog.write_verilog(obj, filename, for_verilator)
def add_hook(self, method, args=None, kwargs=None):
""" add a hooked method to 'to_verilog()' """
self.hook.append((method, args, kwargs))
#-------------------------------------------------------------------------
def add_object(self, obj):
if isinstance(obj, vtypes._Variable) and obj.name is None:
raise ValueError("Object must have a name.")
self.items.append(obj)
if isinstance(obj, vtypes.AnyType):
self.io_variable[obj.name] = obj
#self.variable[obj.name] = obj
#self.global_constant[obj.name] = obj
#self.local_constant[obj.name] = obj
return
if isinstance(obj, (vtypes.Input, vtypes.Output, vtypes.Inout)):
self.io_variable[obj.name] = obj
return
if isinstance(obj, (vtypes.Reg, vtypes.Wire)):
self.variable[obj.name] = obj
return
if isinstance(obj, (vtypes.Integer, vtypes.Real, vtypes.Genvar)):
self.variable[obj.name] = obj
return
if isinstance(obj, vtypes.Parameter):
self.global_constant[obj.name] = obj
return
if isinstance(obj, vtypes.Localparam):
self.local_constant[obj.name] = obj
return
if isinstance(obj, function.Function):
self.function[obj.name] = obj
return
if isinstance(obj, task.Task):
self.task[obj.name] = obj
return
if isinstance(obj, vtypes.Assign):
self.assign.append(obj)
return
if isinstance(obj, vtypes.Always):
self.always.append(obj)
return
if isinstance(obj, vtypes.Initial):
self.initial.append(obj)
return
if isinstance(obj, GenerateFor):
if obj.scope is None:
if None not in self.generate:
self.generate[None] = []
self.generate[None].append(obj)
return
self.generate[obj.scope] = obj
return
if isinstance(obj, GenerateIf):
if obj.true_scope is None:
if None not in self.generate:
self.generate[None] = []
self.generate[None].append(obj)
return
self.generate[obj.true_scope] = obj
return
if isinstance(obj, GenerateIfElse):
if obj.false_scope is None:
if None not in self.generate:
self.generate[None] = []
self.generate[None].append(obj)
return
self.generate[obj.false_scope] = obj
return
if isinstance(obj, Instance):
if isinstance(obj.module, Module):
self.instance[obj.instname] = obj
self.submodule[obj.module.name] = obj.module
elif isinstance(obj.module, str):
self.instance[obj.instname] = obj
return
raise TypeError("Object type '%s' is not supported." % str(type(obj)))
#-------------------------------------------------------------------------
def add_function(self, t):
if not isinstance(t, function.Function):
raise TypeError(
"add_function requires a Function, not %s" % type(t))
name = t.name
self.function[name] = t
self.items.append(t)
return t
def add_task(self, t):
if not isinstance(t, task.Task):
raise TypeError("add_task requires a Task, not %s" % type(t))
name = t.name
self.task[name] = t
self.items.append(t)
return t
#-------------------------------------------------------------------------
def remove(self, v):
vid = id(v)
for i, item in enumerate(self.items):
if vid == id(item):
del self.items[i]
return
def append(self, v):
self.items.append(v)
#-------------------------------------------------------------------------
def find_identifier(self, name):
if name in self.io_variable:
return self.io_variable[name]
if name in self.variable:
return self.variable[name]
if name in self.global_constant:
return self.global_constant[name]
if name in self.local_constant:
return self.local_constant[name]
if name in self.function:
return self.function[name]
if name in self.task:
return self.task[name]
if name in self.instance:
return self.instance[name]
if name in self.generate:
return self.generate[name]
return None
#-------------------------------------------------------------------------
def get_tmp(self):
ret = self.tmp_count
self.tmp_count += 1
return ret
#-------------------------------------------------------------------------
def is_input(self, name):
if name not in self.io_variable:
return False
if isinstance(self.io_variable[name], vtypes.Input):
return True
return False
def is_output(self, name):
if name not in self.io_variable:
return False
if isinstance(self.io_variable[name], vtypes.Output):
return True
return False
def is_inout(self, name):
if name not in self.io_variable:
return False
if isinstance(self.io_variable[name], vtypes.Inout):
return True
return False
def is_reg(self, name):
if name not in self.variable:
return False
if isinstance(self.variable[name], vtypes.Reg):
return True
return False
def is_wire(self, name):
if name not in self.variable and name not in self.io_variable:
return False
if name in self.variable and isinstance(self.variable[name], vtypes.Wire):
return True
if name in self.variable and isinstance(self.variable[name], vtypes.Reg):
return False
if name in self.io_variable:
return True
return False
#-------------------------------------------------------------------------
def get_opposite_variable(self, var, use_wire=False):
if isinstance(var, vtypes.Input):
if use_wire:
return vtypes.Wire
return vtypes.Reg
if isinstance(var, vtypes.Output):
return vtypes.Wire
if isinstance(var, vtypes.Inout):
return vtypes.Wire
raise TypeError('No corresponding IO type for %s' % str(type(var)))
#-------------------------------------------------------------------------
def to_hook_resolved_obj(self):
# if there is no hooked method, object copy is not required.
if not self.has_hook():
return self
copied = copy.deepcopy(self)
copied.resolve_hook()
return copied
def resolve_hook(self):
for method, args, kwargs in self.hook:
if args is None:
args = ()
if kwargs is None:
kwargs = {}
method(*args, **kwargs)
for sub in self.submodule.values():
sub.resolve_hook()
def has_hook(self):
if self.hook:
return True
for sub in self.submodule.values():
if sub.has_hook():
return True
return False
#-------------------------------------------------------------------------
def find_module(self, name):
if name in self.submodule:
return self.submodule[name]
for gen in self.generate.values():
if isinstance(gen, (tuple, list)):
for g in gen:
r = g.find_module(name)
if r is not None:
return r
else:
r = gen.find_module(name)
if r is not None:
return r
for sub in self.submodule.values():
r = sub.find_module(name)
if r is not None:
return r
return None
def get_modules(self):
modules = collections.OrderedDict()
modules[self.name] = self
for gen in self.generate.values():
if isinstance(gen, (tuple, list)):
for g in gen:
modules.update(g.get_modules())
else:
modules.update(gen.get_modules())
for sub in self.submodule.values():
modules.update(sub.get_modules())
return modules
def check_existing_identifier(self, name, *types):
s = self.find_identifier(name)
if s is None:
return
if isinstance(s, vtypes.AnyType):
return
if len(types) == 0:
raise ValueError("Object '%s' is already defined." % name)
if not isinstance(s, types):
raise ValueError("Object '%s' is already defined." % name)
class StubModule(vtypes.VeriloggenNode):
""" Verilog Module class """
def __init__(self, name=None, code=''):
vtypes.VeriloggenNode.__init__(self)
self.name = name if name is not None else self.__class__.__name__
self.code = code
self.used = False
def set_code(self, code):
self.code = code
def get_code(self):
return self.code
def to_verilog(self, filename=None):
import veriloggen.verilog.to_verilog as to_verilog
return to_verilog.write_verilog(self, filename)
def resolve_hook(self):
pass
def has_hook(self):
return False
def find_module(self, name):
return None
def get_modules(self):
modules = collections.OrderedDict()
modules[self.name] = self
return modules
class Instance(vtypes.VeriloggenNode):
def __init__(self, module, instname, params=None, ports=None):
vtypes.VeriloggenNode.__init__(self)
if params is None:
params = ()
if ports is None:
ports = ()
self._type_check_params(params)
self._type_check_ports(ports)
self.module = module
self.instname = instname
if hasattr(self.module, 'used'):
self.module.used = True
if not params:
self.params = ()
elif isinstance(params, dict): # named
self.params = [(k, v) for k, v in params.items()]
elif isinstance(params[0], (tuple, list)): # named
for param in params:
if not isinstance(param, (tuple, list)) or len(param) != 2:
raise ValueError("Illegal parameter argument")
self.params = params
else: # noname
for param in params:
if not isinstance(param, vtypes.numerical_types):
raise ValueError("Illegal parameter argument")
if not isinstance(module, Module) or isinstance(module, StubModule):
self.params = [(None, p) for p in params]
else:
self.params = [(v.name, p) for v, p in zip(
module.global_constant.values(), params)]
if isinstance(module, Module) and not isinstance(module, StubModule):
for name, port in self.params:
if name is None:
continue
if not isinstance(module.find_identifier(name), vtypes.Parameter):
raise ValueError("No such parameter '%s' in module '%s'" %
(name, module.name))
if not ports:
self.ports = ()
elif isinstance(ports, dict): # named
self.ports = [(k, v) for k, v in ports.items()]
elif isinstance(ports[0], (tuple, list)): # named
for port in ports:
if not isinstance(port, (tuple, list)) or len(port) != 2:
raise ValueError("Illegal port argument")
self.ports = ports
else: # noname
for port in ports:
if port is not None and not isinstance(port, vtypes.numerical_types):
raise ValueError("Illegal port argument")
if not isinstance(module, Module) or isinstance(module, StubModule):
self.ports = [(None, p) for p in ports]
else:
self.ports = [(v.name, p) for v, p in zip(
module.io_variable.values(), ports)]
if isinstance(module, Module) and not isinstance(module, StubModule):
for name, port in self.ports:
if name is None:
continue
if not isinstance(module.find_identifier(name),
(vtypes.Input, vtypes.Output, vtypes.Inout)):
raise ValueError("No such port '%s' in module '%s'" %
(name, module.name))
def _type_check_module(self, module):
if not isinstance(module, (Module, StubModule)):
raise TypeError("module of Instance must be Module or StubModule, not %s" %
type(module))
def _type_check_params(self, params):
if not isinstance(params, (tuple, list, dict)):
raise TypeError(
"params of Instance require tuple, list, or dict, not %s." % type(params))
def _type_check_ports(self, ports):
if not isinstance(ports, (tuple, list, dict)):
raise TypeError(
"ports of Instance require tuple, list, or dict, not %s." % type(ports))
class Generate(Module):
""" Base class of generate statement """
def __init__(self, m):
Module.__init__(self)
self.m = m
def Input(self, name, width=None, dims=None, signed=False, value=None):
raise TypeError("Input port is not allowed in generate statement")
def Output(self, name, width=None, dims=None, signed=False, value=None):
raise TypeError("Output port is not allowed in generate statement")
def OutputReg(self, name, width=None, dims=None, signed=False, value=None,
initval=None):
raise TypeError("OutputReg port is not allowed in generate statement")
def Inout(self, name, width=None, dims=None, signed=False, value=None):
raise TypeError("Inout port is not allowed in generate statement")
def find_identifier(self, name):
r = Module.find_identifier(self, name)
if r is not None:
return r
r = self.m.find_identifier(name)
if r is not None:
return r
return None
def get_modules(self):
modules = collections.OrderedDict()
for gen in self.generate.values():
if isinstance(gen, (tuple, list)):
for g in gen:
modules.update(g.get_modules())
else:
modules.update(gen.get_modules())
for sub in self.submodule.values():
modules.update(sub.get_modules())
return modules
def _type_check_scope(self, scope):
if scope is None:
return
if not isinstance(scope, str):
raise TypeError("Scope name should be str, not %s." % type(scope))
class GenerateFor(Generate):
def __init__(self, m, pre, cond, post, scope=None):
Generate.__init__(self, m)
self.pre = pre
self.cond = cond
self.post = post
self.scope = scope
self._type_check_scope(scope)
def __getitem__(self, index):
return vtypes.ScopeIndex(self.scope, index)
class GenerateIf(Generate):
def __init__(self, m, cond, true_scope=None):
Generate.__init__(self, m)
self.cond = cond
self.true_scope = true_scope
self.Else = GenerateIfElse(m)
self._type_check_scope(true_scope)
class GenerateIfElse(Generate):
def __init__(self, m, false_scope=None):
Generate.__init__(self, m)
self.false_scope = false_scope
self._type_check_scope(false_scope)
def __call__(self, false_scope):
self.false_scope = false_scope
return self
def connect_same_name(*args):
ret = []
for arg in args:
if isinstance(arg, (list, tuple)):
ret.extend([(a.name, a) for a in arg])
elif isinstance(arg, vtypes._Variable):
ret.append((arg.name, arg))
else:
raise TypeError(
'connect_same_name supports Variables, lists and tuples of them.')
return ret
| [
"shta.ky1018@gmail.com"
] | shta.ky1018@gmail.com |
16eba7e91dddfc9fd58fd0bdfcb6fc0faaaa7bc4 | 89e6c3548fbdd06178aae712de1ff19004bc2faa | /my_django/contrib/staticfiles/utils.py | 5a69ad055a37f4d7a0a0c72ea6dd79f935e9a6a7 | [] | no_license | bhgv/ublog_git.hg.repo-django.python-engine | a3f3cdcbacc95ec98f022f9719d3b300dd6541d4 | 74cdae100bff5e8ab8fb9c3e8ba95623333c2d43 | refs/heads/master | 2020-03-23T01:04:07.431749 | 2018-07-25T12:59:21 | 2018-07-25T12:59:21 | 140,899,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,979 | py | import os
import fnmatch
from my_django.conf import settings
from my_django.core.exceptions import ImproperlyConfigured
def matches_patterns(path, patterns=None):
"""
Return True or False depending on whether the ``path`` should be
ignored (if it matches any pattern in ``ignore_patterns``).
"""
if patterns is None:
patterns = []
for pattern in patterns:
if fnmatch.fnmatchcase(path, pattern):
return True
return False
def get_files(storage, ignore_patterns=None, location=''):
"""
Recursively walk the storage directories yielding the paths
of all files that should be copied.
"""
if ignore_patterns is None:
ignore_patterns = []
directories, files = storage.listdir(location)
for fn in files:
if matches_patterns(fn, ignore_patterns):
continue
if location:
fn = os.path.join(location, fn)
yield fn
for dir in directories:
if matches_patterns(dir, ignore_patterns):
continue
if location:
dir = os.path.join(location, dir)
for fn in get_files(storage, ignore_patterns, dir):
yield fn
def check_settings(base_url=None):
"""
Checks if the staticfiles settings have sane values.
"""
if base_url is None:
base_url = settings.STATIC_URL
if not base_url:
raise ImproperlyConfigured(
"You're using the staticfiles app "
"without having set the required STATIC_URL setting.")
if settings.MEDIA_URL == base_url:
raise ImproperlyConfigured("The MEDIA_URL and STATIC_URL "
"settings must have different values")
if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and
(settings.MEDIA_ROOT == settings.STATIC_ROOT)):
raise ImproperlyConfigured("The MEDIA_ROOT and STATIC_ROOT "
"settings must have different values")
| [
"bhgv.empire@gmail.com"
] | bhgv.empire@gmail.com |
189f40d1033967adf12fbea0e2054173d1ae1b2c | f89cd872172489785df20354c7a78bc332c4d894 | /countBlackCells.py | dbc2324dc696ab6595a7dfc80610d2c8f328cf04 | [] | no_license | amararora07/CodeFights | d565ed21b1f5c2fbe4d902159db61bee8244e1c8 | 51e3cb75eb32d22dac60f380b1f5b87822678c20 | refs/heads/master | 2021-09-06T15:45:08.716269 | 2018-02-08T06:06:52 | 2018-02-08T06:06:52 | 109,230,368 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | def countBlackCells(n, m):
s=n+m
while m:
n%=m
n,m=m,n
return s-n+2*(n-1)
| [
"noreply@github.com"
] | amararora07.noreply@github.com |
cdd92af8c583df98a026b684150ffdee3db66b54 | c56ee3cf2a97ae7fc043bd90e26ad5e34b87328f | /.venv/Lib/site-packages/pip/_internal/commands/wheel.py | c36acd229ba85ef1b3b4f6b228713e369f56bb42 | [
"MIT"
] | permissive | LuckJMG/ImprovedReplace | a88cab845ab894e3e8cb9591bc4e5611b43d403e | e59ad89c43f901d409215353a7403781fb689c7e | refs/heads/main | 2023-02-27T07:40:26.746185 | 2021-02-02T03:04:18 | 2021-02-02T03:04:18 | 235,675,235 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,377 | py | # -*- coding: utf-8 -*-
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import logging
import os
import shutil
from pip._internal.cache import WheelCache
from pip._internal.cli import cmdoptions
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
from pip._internal.exceptions import CommandError
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.misc import ensure_dir, normalize_path
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.wheel_builder import build, should_build_for_wheel_command
if MYPY_CHECK_RUNNING:
from optparse import Values
from typing import Any, List
logger = logging.getLogger(__name__)
class WheelCommand(RequirementCommand):
"""
Build Wheel archives for your requirements and dependencies.
Wheel is a built-package format, and offers the advantage of not
recompiling your software during every install. For more details, see the
wheel docs: https://wheel.readthedocs.io/en/latest/
Requirements: setuptools>=0.8, and wheel.
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
package to build individual wheels.
"""
usage = """
%prog [options] <requirement specifier> ...
%prog [options] -r <requirements file> ...
%prog [options] [-e] <vcs project url> ...
%prog [options] [-e] <local project path> ...
%prog [options] <archive url/path> ..."""
def __init__(self, *args, **kw):
super(WheelCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
"-w",
"--wheel-dir",
dest="wheel_dir",
metavar="dir",
default=os.curdir,
help=(
"Build wheels into <dir>, where the default is the "
"current working directory."
),
)
cmd_opts.add_option(cmdoptions.no_binary())
cmd_opts.add_option(cmdoptions.only_binary())
cmd_opts.add_option(cmdoptions.prefer_binary())
cmd_opts.add_option(
"--build-option",
dest="build_options",
metavar="options",
action="append",
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
)
cmd_opts.add_option(cmdoptions.no_build_isolation())
cmd_opts.add_option(cmdoptions.use_pep517())
cmd_opts.add_option(cmdoptions.no_use_pep517())
cmd_opts.add_option(cmdoptions.constraints())
cmd_opts.add_option(cmdoptions.editable())
cmd_opts.add_option(cmdoptions.requirements())
cmd_opts.add_option(cmdoptions.src())
cmd_opts.add_option(cmdoptions.ignore_requires_python())
cmd_opts.add_option(cmdoptions.no_deps())
cmd_opts.add_option(cmdoptions.build_dir())
cmd_opts.add_option(cmdoptions.progress_bar())
cmd_opts.add_option(
"--global-option",
dest="global_options",
action="append",
metavar="options",
help="Extra global options to be supplied to the setup.py "
"call before the 'bdist_wheel' command.",
)
cmd_opts.add_option(
"--pre",
action="store_true",
default=False,
help=(
"Include pre-release and development versions. By default, "
"pip only finds stable versions."
),
)
cmd_opts.add_option(cmdoptions.require_hashes())
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser,)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
@with_cleanup
def run(self, options, args):
# type: (Values, List[Any]) -> None
cmdoptions.check_install_build_global(options)
session = self.get_default_session(options)
finder = self._build_package_finder(options, session)
build_delete = not (options.no_clean or options.build_dir)
wheel_cache = WheelCache(options.cache_dir, options.format_control)
options.wheel_dir = normalize_path(options.wheel_dir)
ensure_dir(options.wheel_dir)
req_tracker = self.enter_context(get_requirement_tracker())
directory = TempDirectory(
options.build_dir, delete=build_delete, kind="wheel", globally_managed=True,
)
reqs = self.get_requirements(args, options, finder, session)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
options=options,
req_tracker=req_tracker,
session=session,
finder=finder,
wheel_download_dir=options.wheel_dir,
use_user_site=False,
)
resolver = self.make_resolver(
preparer=preparer,
finder=finder,
options=options,
wheel_cache=wheel_cache,
ignore_requires_python=options.ignore_requires_python,
use_pep517=options.use_pep517,
)
self.trace_basic_info(finder)
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
reqs_to_build = [
r
for r in requirement_set.requirements.values()
if should_build_for_wheel_command(r)
]
# build wheels
build_successes, build_failures = build(
reqs_to_build,
wheel_cache=wheel_cache,
build_options=options.build_options or [],
global_options=options.global_options or [],
)
for req in build_successes:
assert req.link and req.link.is_wheel
assert req.local_file_path
# copy from cache to target directory
try:
shutil.copy(req.local_file_path, options.wheel_dir)
except OSError as e:
logger.warning(
"Building wheel for %s failed: %s", req.name, e,
)
build_failures.append(req)
if len(build_failures) != 0:
raise CommandError("Failed to build one or more wheels")
| [
"lucas.mosquera13@gmail.com"
] | lucas.mosquera13@gmail.com |
8b405f00dab70862c334598b6c1021a7e457cf2c | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/6/pkt.py | 634432923d2acf078394fbc0fcdf3e231639cda8 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'pKT':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
b80e40afc308d64a20780da4e4481dcccaa2cb8a | f36856f1fe47f66d7181d4bc026bfb6fc9a215e2 | /code/train.py | 723b3bac4a2c9802ecf1f3c0294dc2934b62cfb6 | [] | no_license | TrendingTechnology/ROS | 5776bc7faa419c74164703d486092dc4ac9a7bce | bceef4d9dc505f55322a4c25fb8071f49e7a5671 | refs/heads/master | 2023-01-10T15:39:48.464872 | 2020-08-07T13:48:54 | 2020-08-07T13:48:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,169 | py | import sys
import argparse
from steps_separation_adaptation import Trainer
import numpy as np
import torch
import os
def get_args():
parser = argparse.ArgumentParser(description="Script to launch training",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
#domains
parser.add_argument("--source", help="Source")
parser.add_argument("--target", help="Target")
parser.add_argument("--batch_size", type=int, default=32, help="Batch size")
parser.add_argument("--learning_rate", type=float, default=0.003, help="Learning rate")
parser.add_argument("--divison_learning_rate_backbone", type=float, default=10.0, help="Scaling factor of the learning rate used for the part pf the backbone not freezed")
#epochs step1 and step2
parser.add_argument("--epochs_step1", type=int, default=80, help="Epochs of step1")
parser.add_argument("--epochs_step2", type=int, default=80,help="Epochs of step2")
#number of classes: known, unknown and the classes of self-sup task
parser.add_argument("--n_classes", type=int, default=25, help="Number of classes of source domain -- known classes")
parser.add_argument("--n_classes_target", type=int, default=65,help="Number of classes of target domain -- known+unknown classes")
parser.add_argument("--ss_classes", "-rc", type=int, default=4, help="Number of classes for the self-supervised task")
#weights used during training
parser.add_argument("--ss_weight_source", type=float, default=3.0, help="Weight of the source domain for the ss task (it acts in step1)")
parser.add_argument("--ss_weight_target", type=float, default=3.0, help="Weight of the target domain for the ss task (it acts in step2)")
parser.add_argument("--cls_weight_source", type=float, default=1.0, help="Weight for the cls task (it acts in step1 and step2)")
parser.add_argument("--entropy_weight", type=float, default=0.1, help="Weight for the ss task (it acts in step2)")
parser.add_argument("--weight_center_loss", type=float, default=0.0, help="Weight of the center loss for the ss task (it acts in step1)")
parser.add_argument("--weight_class_unknown", type=float, default=1.0, help="Power of learning of the unknown class (it acts in step2)")
#path of the folders used
parser.add_argument("--folder_dataset",default=None, help="Path to the dataset")
parser.add_argument("--folder_txt_files", default='/.../ROS/data/',help="Path to the txt files of the dataset")
parser.add_argument("--folder_txt_files_saving", default='/.../ROS/data/',help="Path where to save the new txt files")
parser.add_argument("--folder_log", default=None, help="Path of the log folder")
#to select gpu/num of workers
parser.add_argument("--gpu", type=int, default=0, help="gpu chosen for the training")
parser.add_argument("--n_workers", type=int, default=4, help="num of worker used")
parser.add_argument("--use_VGG", action='store_true', default=False, help="If use VGG")
parser.add_argument("--use_weight_net_first_part", action='store_true', default=False, help="If use the weight computed in the step1 for step2")
parser.add_argument("--only_4_rotations", action='store_true', default=False,help="If not use rotation for class")
return parser.parse_args()
args = get_args()
orig_stdout = sys.stdout
rand = np.random.randint(200000)
words = args.folder_txt_files.split('/ROS/')
args.folder_log = words[0]+'/'+'ROS/outputs/logs/' + str(rand)
args.folder_name = words[0]+'/'+'ROS/outputs/' + str(rand)
args.folder_txt_files_saving = args.folder_txt_files + str(rand)
gpu = str(args.gpu)
device = torch.device("cuda:"+gpu)
if not os.path.exists(args.folder_name):
os.makedirs(args.folder_name)
print('\n')
print('TRAIN START!')
print('\n')
print('THE OUTPUT IS SAVED IN A TXT FILE HERE -------------------------------------------> ', args.folder_name)
print('\n')
f = open(args.folder_name + '/out.txt', 'w')
sys.stdout = f
print("\n%s to %s - %d ss classes" % (args.source, args.target, args.ss_classes))
trainer = Trainer(args, device, rand)
trainer._do_train()
print(args)
sys.stdout = orig_stdout
f.close()
| [
"noreply@github.com"
] | TrendingTechnology.noreply@github.com |
d49ee13b8846f6305e8ae4ead7f5b99135202e48 | b1b77bb1ed47586f96d8f2554a65bcbd0c7162cc | /SPOTIFY/crtauth/crtauth/ldap_key_provider.py | d89c6427c32275e73dd6181b9b384991f5fb2e4a | [
"Apache-2.0"
] | permissive | DanHefrman/stuff | b3624d7089909972ee806211666374a261c02d08 | b98a5c80cfe7041d8908dcfd4230cf065c17f3f6 | refs/heads/master | 2023-07-10T09:47:04.780112 | 2021-08-13T09:55:17 | 2021-08-13T09:55:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,057 | py | # Copyright (c) 2011-2017 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from crtauth import exceptions, rsa, key_provider
import ldap
from ldap import filter
class LDAPKeyProvider(key_provider.KeyProvider):
"""
Provides a PubKey instance based on a lookup in an LDAP directory.
User entries are expected to be of class posixAccount living directly under
base_dn in the directory information tree, have an uid matching the
username parameter and one sshPublicKey string representation
of the ssh public key of the user.
Group entries are expected to be of class posixGroup and be located under
cn=groups under the base_dn in the directory information tree. The group
string parameter corresponds to the cn attribute of the posixGroup entry
"""
def __init__(self, uri, auth_user, auth_password, base_dn, group=None):
"""
Constructs and binds an LDAPKeyProvider instance to the server
identified by the uri using auth_user and auth_password for
authentication.
When users are looked up, it is verified that they belong to the
provided group.
"""
self.group = None
if group:
self.group = filter.escape_filter_chars(group)
self.base_dn = base_dn
# I know, this is not functionality the ldap module straightforwardly
# exposes, but it seems to work.
self.conn = ldap.ldapobject.ReconnectLDAPObject(uri)
self.conn.simple_bind(auth_user, auth_password)
def get_key(self, username):
"""
Returns a PubKey instance based on LDAP lookup. If group is specified
in the constructor, the user needs to be a member for the lookup to
succeed.
Throws NoSuchUserException, InsufficientPrivilegesException,
MissingKeyException when appropriate.
"""
user = filter.escape_filter_chars(username)
f = ("(|(&(uid=%s)(objectClass=posixAccount))"
"(&(memberUid=%s)(objectClass=posixGroup)))" % (user, user))
# We don't care about looking for a group if self.group is not set
group_dn = None
if self.group:
group_dn = "cn=%s,cn=groups,%s" % (self.group, self.base_dn)
result = dict(self.conn.search_s(self.base_dn, ldap.SCOPE_SUBTREE, f,
['sshPublicKey']))
attributes = result.get("uid=%s,cn=users,%s" % (user, self.base_dn))
if attributes is None:
raise exceptions.NoSuchUserException("User '%s' not found" % user)
key_list = attributes.get("sshPublicKey")
if key_list is None:
raise exceptions.MissingKeyException("User '%s' does not have "
"her key in LDAP" % user)
if len(key_list) > 1:
raise RuntimeError("Can't handle multiple sshPublicKey values "
"for an LDAP user")
if group_dn and group_dn not in result:
s = ("User '%s' not member of required group '%s'" %
(user, self.group))
raise exceptions.InsufficientPrivilegesException(s)
return rsa.RSAPublicKey(key_list[0])
class HybridKeyProvider(key_provider.KeyProvider):
"""
A KeyProvider that behaves as an LDAP KeyProvider if there is no ldap data
it falls back to a FileKeyProvider.
Useful for non mixing real ldap users with service-specific non-human
users.
"""
def __init__(self, dir, uri, auth_user, auth_password, base_dn, group=None):
"""
Constructs a FileKeyProvider based on the directory dir, and a
LDAPKeyProvider based on the remaining arguments.
"""
self.file_key_provider = key_provider.FileKeyProvider(dir)
self.ldap_key_provider = LDAPKeyProvider(uri, auth_user, auth_password,
base_dn, group)
def get_key(self, username):
"""
Returns the user's public key if it can be found in LDAP, otherwise
tries to find it in the key directory, or fails.
"""
try:
return self.ldap_key_provider.get_key(username)
except exceptions.NoSuchUserException:
try:
return self.file_key_provider.get_key(username)
except Exception, e:
raise exceptions.NoSuchUserException(
"User %s not in ldap, defaulted to pubkey dir and got "
"exception %s" % (username, e))
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
f65d9423059940465bbccc546b180b8afb0b29bf | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02730/s566992327.py | fed7821775b5c54933a78f29905ba5d357c3433d | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 397 | py | s = input()
n = len(s)
# 全体が回文かどうか
if s != s[::-1]:
print('No')
# 1文字目から(N-1)/2文字目までからなる文字列が回文かどうか
elif s[:(n-1)//2] != s[:(n-1)//2][::-1]:
print('No')
# Sの(N+3)/2文字目からN文字目までからなる文字列が回文かどうか
elif s[(n+3)//2-1:] != s[(n+3)//2-1:][::-1]:
print('No')
else:
print('Yes') | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
d77758ec0bb4fa4a66f45609ff6dbd6ec4ca67cb | 41f39d013ae3cb2b3ca4230c77b9037cc9c894f6 | /sandbox/ex2/algos/batch_polopt.py | 5e0c8f8994dab4f854a29f37542f4bce4f3f98ce | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | sokol1412/rllab_hierarchical_rl | 162aec9bb06e271d12333fa072fb44d692c26301 | 6d46c02e32c3d7e9ac55d753d6a3823ff86c5a57 | refs/heads/master | 2020-03-07T07:37:39.510301 | 2018-08-19T11:54:56 | 2018-08-19T11:54:56 | 127,353,660 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,997 | py | from rllab.algos.base import RLAlgorithm
from rllab.sampler import parallel_sampler
from rllab.sampler.base import BaseSampler
import rllab.misc.logger as logger
import rllab.plotter as plotter
from rllab.policies.base import Policy
import numpy as np
class BatchSampler(BaseSampler):
def __init__(self, algo):
"""
:type algo: BatchPolopt
"""
self.algo = algo
def start_worker(self):
parallel_sampler.populate_task(self.algo.env, self.algo.policy, scope=self.algo.scope)
def shutdown_worker(self):
parallel_sampler.terminate_task(scope=self.algo.scope)
def obtain_samples(self, itr):
cur_params = self.algo.policy.get_param_values()
paths = parallel_sampler.sample_paths(
policy_params=cur_params,
max_samples=self.algo.batch_size,
max_path_length=self.algo.max_path_length,
scope=self.algo.scope,
)
if self.algo.whole_paths:
return paths
else:
paths_truncated = parallel_sampler.truncate_paths(paths, self.algo.batch_size)
return paths_truncated
class BatchPolopt(RLAlgorithm):
"""
Base class for batch sampling-based policy optimization methods.
This includes various policy gradient methods like vpg, npg, ppo, trpo, etc.
"""
def __init__(
self,
env,
policy,
baseline,
scope=None,
n_itr=500,
start_itr=0,
batch_size=5000,
max_path_length=500,
discount=0.99,
gae_lambda=1,
plot=False,
exemplar_cls=None,
exemplar_args=None,
bonus_coeff=0,
pause_for_plot=False,
center_adv=True,
positive_adv=False,
store_paths=False,
whole_paths=True,
sampler_cls=None,
sampler_args=None,
eval_first=False,
**kwargs
):
"""
:param env: Environment
:param policy: Policy
:type policy: Policy
:param baseline: Baseline
:param scope: Scope for identifying the algorithm. Must be specified if running multiple algorithms
simultaneously, each using different environments and policies
:param n_itr: Number of iterations.
:param start_itr: Starting iteration.
:param batch_size: Number of samples per iteration.
:param max_path_length: Maximum length of a single rollout.
:param discount: Discount.
:param gae_lambda: Lambda used for generalized advantage estimation.
:param plot: Plot evaluation run after each iteration.
:param pause_for_plot: Whether to pause before contiuing when plotting.
:param center_adv: Whether to rescale the advantages so that they have mean 0 and standard deviation 1.
:param positive_adv: Whether to shift the advantages so that they are always positive. When used in
conjunction with center_adv the advantages will be standardized before shifting.
:param store_paths: Whether to save all paths data to the snapshot.
"""
self.env = env
self.policy = policy
self.baseline = baseline
self.scope = scope
self.n_itr = n_itr
self.current_itr = start_itr
self.batch_size = batch_size
self.max_path_length = max_path_length
self.discount = discount
self.gae_lambda = gae_lambda
self.plot = plot
self.pause_for_plot = pause_for_plot
self.center_adv = center_adv
self.positive_adv = positive_adv
self.store_paths = store_paths
self.whole_paths = whole_paths
if sampler_cls is None:
sampler_cls = BatchSampler
if sampler_args is None:
sampler_args = dict()
self.sampler = sampler_cls(self, **sampler_args)
self.exemplar = None
self.exemplar_cls = exemplar_cls
self.exemplar_args = exemplar_args
self.bonus_coeff = bonus_coeff
self.eval_first = eval_first
def start_worker(self):
self.sampler.start_worker()
if self.plot:
plotter.init_plot(self.env, self.policy)
def shutdown_worker(self):
self.sampler.shutdown_worker()
def process_paths(self, paths):
if self.eval_first:
for path in paths:
path["raw_rewards"] = np.copy(path["rewards"])
if self.exemplar is not None:
path["bonus_rewards"] = self.exemplar.predict(path)
if self.exemplar is not None:
self.exemplar.fit(paths)
else:
if self.exemplar is not None:
self.exemplar.fit(paths)
for path in paths:
path["raw_rewards"] = np.copy(path["rewards"])
if self.exemplar is not None:
path["bonus_rewards"] = self.exemplar.predict(path)
if self.exemplar is not None:
bonus_rewards = np.concatenate([path["bonus_rewards"].ravel() for path in paths])
median_bonus = np.median(bonus_rewards)
mean_discrim = np.mean(1 / (bonus_rewards + 1))
for path in paths:
path["bonus_rewards"] -= median_bonus
path["rewards"] = path["rewards"] + self.bonus_coeff * path["bonus_rewards"]
logger.record_tabular('Median Bonus', median_bonus)
logger.record_tabular('Mean Discrim', mean_discrim)
def train(self):
if self.exemplar_cls is not None:
self.exemplar = self.exemplar_cls(**self.exemplar_args)
self.exemplar.init_rank(0)
self.start_worker()
self.init_opt()
for itr in range(self.current_itr, self.n_itr):
with logger.prefix('itr #%d | ' % itr):
paths = self.sampler.obtain_samples(itr)
self.process_paths(paths)
samples_data = self.sampler.process_samples(itr, paths)
self.log_diagnostics(paths)
self.optimize_policy(itr, samples_data)
logger.log("saving snapshot...")
params = self.get_itr_snapshot(itr, samples_data)
self.current_itr = itr + 1
params["algo"] = self
if self.store_paths:
params["paths"] = samples_data["paths"]
logger.save_itr_params(itr, params)
logger.log("saved")
logger.dump_tabular(with_prefix=False)
if self.plot:
self.update_plot()
if self.pause_for_plot:
input("Plotting evaluation run: Press Enter to "
"continue...")
self.shutdown_worker()
def log_diagnostics(self, paths):
self.env.log_diagnostics(paths)
self.policy.log_diagnostics(paths)
self.baseline.log_diagnostics(paths)
undiscounted_returns = [sum(path["rewards"]) for path in paths]
undiscounted_raw_returns = [sum(path["raw_rewards"]) for path in paths]
num_traj = len(undiscounted_returns)
sum_return = np.sum(undiscounted_returns)
min_return = np.min(undiscounted_returns)
max_return = np.max(undiscounted_returns)
sum_raw_return = np.sum(undiscounted_raw_returns)
min_raw_return = np.min(undiscounted_raw_returns)
max_raw_return = np.max(undiscounted_raw_returns)
average_return = sum_return / num_traj
average_raw_return = sum_raw_return / num_traj
logger.record_tabular('ReturnAverage', average_return)
logger.record_tabular('ReturnMax', max_return)
logger.record_tabular('ReturnMin', min_return)
logger.record_tabular('RawReturnAverage', average_raw_return)
logger.record_tabular('RawReturnMax', max_raw_return)
logger.record_tabular('RawReturnMin', min_raw_return)
if self.exemplar is not None:
bonuses = np.concatenate([path["bonus_rewards"] for path in paths])
logger.record_tabular('BonusRewardMax', bonuses.max())
logger.record_tabular('BonusRewardMin', bonuses.min())
logger.record_tabular('BonusRewardAverage', bonuses.mean())
def init_opt(self):
"""
Initialize the optimization procedure. If using theano / cgt, this may
include declaring all the variables and compiling functions
"""
raise NotImplementedError
def get_itr_snapshot(self, itr, samples_data):
"""
Returns all the data that should be saved in the snapshot for this
iteration.
"""
raise NotImplementedError
def optimize_policy(self, itr, samples_data):
raise NotImplementedError
def update_plot(self):
if self.plot:
plotter.update_plot(self.policy, self.max_path_length)
| [
"wlasek1412@gmail.com"
] | wlasek1412@gmail.com |
cfe96d0459f4c3746b821a72c6a44b9327591521 | 77fdfa980f6d923d8fccb7eefdcadadad6f7cdcc | /blog_project/settings.py | 100e3cc9aa7b6ed04e934d16820cd2d42f3d71f2 | [] | no_license | joegotflow83/tdd_blog | fe72657a361a6203bcebc1ff64a831c3c307e871 | 254d44de3037bfaeee4495c6a1620afbfe87c7fb | refs/heads/master | 2021-01-18T18:42:11.594528 | 2016-07-31T00:20:44 | 2016-07-31T00:20:44 | 63,269,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,371 | py | """
Django settings for blog_project project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ws0o)ej_8!m*%nnwo89n$h%1=zjs6(k2)x9_#_m%)@g%3de$ky'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Local apps
'main',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blog_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static')
)
LOGIN_URL = '/accounts/login/'
LOGIN_REDIRECT_URL = '/home/'
| [
"joe@absolutod.com"
] | joe@absolutod.com |
fe41008bb2ab32968ed7dc245a83ccd70bb5c1db | cd0a284c47fb03121e05284b6d5f2940ea6457ba | /fb/dfs-bfs/207-course-schedule.py | 7b8325d06c0f32426434b8845cabd8b291aed9f0 | [] | no_license | franktank/py-practice | 5803933c07c07a06670f83b059806385d0d029fa | 1dec441f1975d402d093031569cfd301eb71d465 | refs/heads/master | 2021-03-22T04:33:20.818891 | 2017-11-14T03:40:54 | 2017-11-14T03:40:54 | 101,592,046 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,031 | py | """
There are a total of n courses you have to take, labeled from 0 to n - 1.
Some courses may have prerequisites, for example to take course 0 you have to first take course 1, which is expressed as a pair: [0,1]
Given the total number of courses and a list of prerequisite pairs, is it possible for you to finish all courses?
For example:
2, [[1,0]]
There are a total of 2 courses to take. To take course 1 you should have finished course 0. So it is possible.
2, [[1,0],[0,1]]
There are a total of 2 courses to take. To take course 1 you should have finished course 0, and to take course 0 you should also have finished course 1. So it is impossible.
Note:
The input prerequisites is a graph represented by a list of edges, not adjacency matrices. Read more about how a graph is represented.
You may assume that there are no duplicate edges in the input prerequisites.
"""
class Solution(object):
def canFinish(self, numCourses, prerequisites):
"""
:type numCourses: int
:type prerequisites: List[List[int]]
:rtype: bool
"""
# How to handle loops?
# To take a course, we must have its prerequisite done
# Brute Force
# For each course we see, try to find if its preqrequisite can be done
# Iterate through rest of prerequisites:
# If it is NOT in preqrequisites
# The preqrequisite can be finished, and then numCourses -= 1
# If it is in prerequisites, repeat and make it None in prerequisites if it can be done, and then numCourses -= 1
for pr in prerequisites:
if pr == None:
continue
take = self.helper(pr, prerequisites)
if take:
numCourses -= 1
if numCourses < 0:
return False
else:
return True
def helper(self, pr, prerequisites):
"""
:rtype: bool
"""
for pr in prerequisites
| [
"fliangz96@gmail.com"
] | fliangz96@gmail.com |
bbf45a6ca2e4e02d5dc77888323c239edcc5f744 | fce5eda4745578557f7120104188c2437529b98f | /listas_tipos/dicionario/muitos_usuarios.py | 1558b8ec7959ad7e558fc063ca69369b76826a28 | [] | no_license | weguri/python | 70e61584e8072125a4b4c57e73284ee4eb10f33b | d5195f82428104d85b0e6215b75e31ee260e5370 | refs/heads/master | 2022-12-01T08:26:36.248787 | 2020-08-23T03:30:46 | 2020-08-23T03:30:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | """
Dicionário em um dicionário
Podemos aninhar um dicionário em outro dicionário
"""
usuarios = {
'alberteinstein': {
'nome': 'albert',
'sobrenome': 'einstein',
'pais': 'alemanha'
},
'stephenhawking': {
'nome': 'stephen',
'sobrenome': 'hawking',
'pais': 'reino unido'
}
}
for username, info_user in usuarios.items():
print("\nUsuario:", username)
nome_completo = info_user['nome'] + " " + info_user['sobrenome']
localizacao = info_user['pais']
print("\tNome:", nome_completo.title())
print("\tPais:", localizacao.title())
| [
"welguri@gmail.com"
] | welguri@gmail.com |
a7569d7922515885e8dd93423d4e71b1c36dbd34 | 2a1b8a671aceda6bc446f8ce26400aa84fa444a6 | /Packs/NCSCCyberAsssessmentFramework/Scripts/EntryWidgetNCSCResultsD/EntryWidgetNCSCResultsD.py | 173f34e55128e5321c9d1b28478b0aade2266374 | [
"MIT"
] | permissive | demisto/content | 6d4722d46f0ff0beea2748e9f7de585bf91a78b4 | 890def5a0e0ae8d6eaa538148249ddbc851dbb6b | refs/heads/master | 2023-09-04T00:02:25.618032 | 2023-09-03T21:56:22 | 2023-09-03T21:56:22 | 60,525,392 | 1,023 | 1,921 | MIT | 2023-09-14T20:55:24 | 2016-06-06T12:17:02 | Python | UTF-8 | Python | false | false | 1,005 | py | import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
incident = demisto.incidents()
cafdresult = incident[0].get("CustomFields", {}).get("cafdresultraw", {})
if type(cafdresult) != dict:
cafdresult = json.loads(cafdresult)
total = len(cafdresult)
non_compliant_count = (
len([x for x in cafdresult if x["Result"] != "Achieved"]) if cafdresult else None
)
medium = int(round(total / 3, 0))
high = int(round(total / 3 * 2, 0))
data = {
"Type": 17,
"ContentsFormat": "number",
"Contents": {
"stats": non_compliant_count,
"params": {
"layout": "horizontal",
"name": "Unachieved items",
"sign": "",
"colors": {
"items": {
"#00CD33": {"value": -1},
"#FF9000": {"value": medium},
"#FF1744": {"value": high},
}
},
"type": "above",
},
},
}
demisto.results(data)
| [
"noreply@github.com"
] | demisto.noreply@github.com |
22ff77002f20b413ed20ffcd59834f91fd70bff6 | 3aa334fe55b2d618726395167cd75dd37ae9ec27 | /testapp/management/commands/bootstrap.py | 08950d431e442356eac5db34e6b440b6ef3ea179 | [
"BSD-3-Clause"
] | permissive | nishitchittora/django-saml-sp | cd143539c83258d6fd62059f8efb89323b91e282 | 879036f1f90febdf4eed1ef74b2288074736fbec | refs/heads/master | 2023-05-29T03:28:37.768564 | 2021-03-27T02:43:46 | 2021-03-27T02:43:46 | 372,801,911 | 0 | 0 | BSD-3-Clause | 2021-06-01T11:15:20 | 2021-06-01T11:15:19 | null | UTF-8 | Python | false | false | 1,872 | py | from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from sp.models import IdP
class Command(BaseCommand):
help = 'Bootstraps the SP with a default "admin" user and a local test IdP.'
def handle(self, *args, **options):
User = get_user_model()
if User.objects.count() == 0:
print(
'Creating default "admin" account with password "letmein" '
"-- change this immediately!"
)
User.objects.create_superuser(
"admin",
"admin@example.com",
"letmein",
first_name="Admin",
last_name="User",
)
if IdP.objects.count() == 0:
print('Creating "local" IdP for http://localhost:8000')
idp = IdP.objects.create(
name="Local SimpleSAML Provider",
url_params={"idp_slug": "local"},
base_url="http://localhost:8000",
contact_name="Admin User",
contact_email="admin@example.com",
metadata_url="http://localhost:8080/simplesaml/saml2/idp/metadata.php",
respect_expiration=True,
logout_triggers_slo=True,
)
idp.generate_certificate()
# The local IdP sends an email address, but it isn't the nameid. Override it
# to be our nameid, AND set the email field on User.
idp.attributes.create(
saml_attribute="email", mapped_name="email", is_nameid=True
)
try:
idp.import_metadata()
except Exception:
print(
"Could not import IdP metadata; "
"make sure your local IdP exposes {}".format(idp.metadata_url)
)
| [
"dcwatson@gmail.com"
] | dcwatson@gmail.com |
263e182dab5e386359c971120c177a398de757ba | d6d874fe9e1607a859e9484fdc5bce09b3f76472 | /Pipeline/the_LATEST/latest_MAYA/maya_SCRIPTS/pickrunner - Copy (3)/controller/engine_0001.py | 9fbf2e97230181255c7175805dee4d28ab0d535b | [] | no_license | tws0002/pop2-project | c80095cc333195ebb9ffa2199e2c3a3446d0df0c | 6886f05d54ec77b66d13b4eaafe8a66ac49f2f41 | refs/heads/master | 2021-01-11T20:53:19.982950 | 2016-03-10T10:31:29 | 2016-03-10T10:31:29 | 79,202,989 | 1 | 1 | null | 2017-01-17T07:56:09 | 2017-01-17T07:56:09 | null | UTF-8 | Python | false | false | 4,399 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
An enhanced, lightwight, user-defined hierarchy between nodes in Maya
"""
# IMPORT STANDARD LIBRARIES
import os
import sys
sys.path.append("F:\transfer\to_monty\the_LATEST\latest_MAYA\maya_SCRIPTS\pickrunner")
# sys.path.append("F:\transfer\to_monty\the_LATEST\sys_PY\py_MODULES")
# IMPORT THIRD-PARTY LIBRARIES
try:
import PyQt4.QtCore as QtCore
import PyQt4.QtGui as QtGui
except ImportError:
import PySide.QtCore as QtCore
import PySide.QtGui as QtGui
except:
raise
# IMPORT LOCAL LIBRARIES
import controller.membership as membership
import controller.engine as engine
import view.basegui as basegui
class Window(QtGui.QMainWindow, basegui.Ui_MainWindow):
def __init__(self):
super(Window, self).__init__()
self.setupUi(self)
self.init_defaults()
self.init_ui()
self.triggers()
# end __init__
def init_ui(self):
"""
Inits the default behavior of the GUI
"""
# diectional buttons
self.up_pb.setStyleSheet(self.buttonStyle)
self.left_pb.setStyleSheet(self.buttonStyle)
self.down_pb.setStyleSheet(self.buttonStyle)
self.right_pb.setStyleSheet(self.buttonStyle)
# lower lineEdits
self.up_le.setEnabled(False)
self.left_le.setEnabled(False)
self.down_le.setEnabled(False)
self.right_le.setEnabled(False)
# end init_ui
def init_defaults(self):
self.db = {}
self.assignDirections = False
self.loadedNode = None
self.buttonStyle = "background-color: #e6ffff; border: 1px solid black; padding: 4px;"
self.buttonStyleOn = "background-color: #ccffcc;"
self.buttonAssignOff = "background-color: #ffcccc;"
self.buttonError = "background-color: red;"
self.motor = engine.MotorMaya()
# end init_defaults
def triggers(self):
"""
Creates the interactivity functionality of the GUI
"""
self.assignLayout_pb.clicked.connect(self.toggle_pickwalk_directions)
addAttrLeft = functools.partial(engine.MotorMaya, direction="left")
self.left_pb.clicked.connect(self.add_attr)
QtGui.QShortcut(QtGui.QKeySequence("Ctrl+Q"), self, self.reject)
# end triggers
def reject(self):
"""
Executes when the user wants to close the current window class
"""
sys.exit()
# end reject
def toggle_pickwalk_directions(self):
"""
Turns on/off the ability to add buttons to the current node loaded
"""
if not self.assignDirections:
# enable the buttons
self.assignDirections = True
self.assignLayout_pb.setStyleSheet(self.buttonStyleOn)
self.up_pb.setStyleSheet(self.buttonStyleOn)
self.left_pb.setStyleSheet(self.buttonStyleOn)
self.down_pb.setStyleSheet(self.buttonStyleOn)
self.right_pb.setStyleSheet(self.buttonStyleOn)
currentSelection = self.motor.get_selection()
if currentSelection == 0:
self.loadSelection_pb.setStyleSheet(self.buttonError)
self.loadedNode = None
else:
self.loadedNode = currentSelection[-1]
else:
# disable the buttons
self.assignDirections = False
self.assignLayout_pb.setStyleSheet(self.buttonAssignOff)
self.up_pb.setStyleSheet(self.buttonStyle)
self.left_pb.setStyleSheet(self.buttonStyle)
self.down_pb.setStyleSheet(self.buttonStyle)
self.right_pb.setStyleSheet(self.buttonStyle)
self.loadSelection_pb.setStyleSheet(self.buttonStyle)
# end toggle_pickwalk_directions
# end Windows
def show_gui():
"""
Shows the main GUI to the application
"""
app = QtGui.QApplication.activeWindow()
if app is None:
app = QtGui.QApplication.instance()
if app is None:
app = QtGui.QApplication(sys.argv)
window = Window()
window.show()
sys.exit(app.exec_())
# end show_gui
def main():
show_gui()
# end main
if __name__ == "__main__":
main() | [
"colinvfx@gmail.com"
] | colinvfx@gmail.com |
100f0ca4721ee11c998081da3d0f5e77aa5f7323 | 81539aba88c22cf75bd2e14f5e0e92f2bf54e962 | /DarkMatterMap2017/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV_madgraph_mcatnlo_pythia8/TTbarDMJets_Inclusive_pseudoscalar_LO_Mchi-51_Mphi-100_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV_madgraph_mcatnlo_pythia8_230000_1_cff.py | 2db28a4584cc1a9ea955ef9cf2afefed22984ff8 | [] | no_license | nistefan/RandomizedParametersSeparator | ad35b48b95e9745814c0bf9d8d8b6eb8aa479177 | 66a0e291b59113c6b5301768f1c10e36cf23d3c3 | refs/heads/master | 2021-01-03T00:41:17.415005 | 2020-02-19T13:30:54 | 2020-02-19T13:30:54 | 239,838,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,067 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, lumisToProcess = cms.untracked.VLuminosityBlockRange(*('1:53147', '1:53395', '1:53244', '1:53534', '1:54333', '1:58243', '1:3316', '1:4240', '1:4624', '1:6109', '1:6455', '1:7465', '1:12246', '1:13083', '1:13096', '1:13312', '1:13985', '1:12627', '1:12917', '1:18281', '1:18379', '1:18819', '1:18660', '1:18887', '1:18952', '1:18733', '1:19944', '1:19809', '1:19993', '1:27837', '1:18695', '1:18778', '1:18835', '1:22198', '1:22371', '1:28556', '1:28857', '1:28987', '1:8011', '1:8352', '1:14144', '1:51316', '1:51580', '1:53040', '1:53098', '1:54175', '1:42361', '1:42574', '1:42631', '1:86810', '1:103193', '1:103328', '1:72581', '1:57385', '1:82224', '1:82432', '1:2566', '1:2476', '1:6487', '1:7708', '1:8181', '1:8805', '1:10359', '1:10680', '1:11138', '1:11521', '1:10693', '1:11420', '1:14049', '1:14492', '1:13920', '1:13936', '1:58403', '1:58411', '1:58200', '1:58347', '1:7365', '1:7702', '1:8314', '1:8404', '1:9430', '1:16413', '1:5886', '1:6249', '1:6370', '1:12927', '1:15766', '1:17534', '1:41551', '1:41711', '1:18897', '1:18913', '1:41653', '1:43064', '1:43795', '1:46065', '1:80084', '1:80220', '1:78481', '1:20724', '1:20886', '1:20921', '1:21039', '1:21452', '1:98092', '1:98382', '1:98928', '1:103189', '1:103224', '1:103438', '1:5235', '1:6885', '1:39141', '1:44313', '1:44362', '1:56960', '1:58129', '1:91544', '1:92216', '1:16564', '1:16597', '1:16871', '1:21585', '1:26687', '1:80563', '1:56670', '1:59839', '1:59136', '1:60471', '1:57980', '1:60505', '1:62573', '1:96077', '1:1217', '1:44643', '1:55489', '1:51724', '1:15171', '1:14347', '1:23968', '1:24413', '1:31741', '1:31813', '1:77539', '1:49888', '1:95484', '1:95377', ))
)
readFiles.extend( ['/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/90043704-20FC-E911-8078-0CC47AFCC3D2.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/58F7196A-76FC-E911-8198-0025905C96E8.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/B65A1C1B-7FFC-E911-92D1-0CC47AFCC392.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/30FB16E9-BC12-EA11-A843-7CD30AC03722.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/70BD48D5-3CF5-E911-BBCD-D4856445E5A4.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/8E131ABC-63F2-E911-BBAD-441EA157ADE4.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/00610B13-6BF2-E911-832F-98039B3B01B2.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/2E996FF0-01F5-E911-BFCE-D4856444A744.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/501EAB60-EC02-EA11-9994-0CC47AFCC6B2.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/230000/3AF7C78E-7106-EA11-9305-0025905C3E68.root']); | [
"Nicole.Stefanov@cern.ch"
] | Nicole.Stefanov@cern.ch |
548d7e455a7fbe253bb7e27b83098b05012f1446 | 402537ee248b91a127772f7ce00a4b2f93fe1d06 | /chapter10/src/src/allocation/domain/commands.py | cb656444bf546659c1bd27074c8bdb424ac020c7 | [] | no_license | babjo/architecture-patterns-with-python | 56ac7b1801cf658fc912ffa7b22398d015d8ee8f | 705a68b34b2c11e2eb18b11444819f964ab6fce9 | refs/heads/master | 2023-07-14T23:01:51.246828 | 2021-08-25T13:25:06 | 2021-08-25T13:25:06 | 394,334,509 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | from typing import Optional
from dataclasses import dataclass
from datetime import date
class Command:
pass
@dataclass
class Allocate(Command):
orderid: str
sku: str
qty: int
@dataclass
class CreateBatch(Command):
ref: str
sku: str
qty: int
eta: Optional[date] = None
@dataclass
class ChangeBatchQuantity(Command):
ref: str
qty: int
| [
"kd980311@naver.com"
] | kd980311@naver.com |
302247df466f9f53facf141a2738300d61ef8a04 | 584ce08fd638b2481e61b00da22ae70290cb0e2d | /main/forms.py | e54ec2919a4ddecb88435e72ec1d36138d69f316 | [] | no_license | CHIRAG202/Tutoria-Project | dedef5581ea72f47be5965f5c783a7176aa17fb2 | 133520fbd8b4154d3b1b777a13e179dfd062c438 | refs/heads/master | 2021-08-23T02:10:20.378763 | 2017-12-02T11:42:28 | 2017-12-02T11:42:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 910 | py | from django import forms
from django.contrib.auth.models import User
from main.models import Student, Sessions, Tutor
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput())
class Meta():
model = User
fields = ('username', 'password', 'email')
# remove email
class StudentInfoForm(forms.ModelForm):
class Meta():
model = Student
fields = ('firstName', 'lastName', 'avatar', 'phoneNo')
class TutorInfoForm(forms.ModelForm):
class Meta():
model = Tutor
fields = ('firstName', 'lastName', 'courses',
'university_name', 'hourly_rate', 'tutor_intro', 'isStudent','phoneNo', 'searchTags', 'avatar', 'tutorType')
class BookingForm(forms.ModelForm):
class Meta():
model = Sessions
fields = ('tutorID', 'studentID', 'bookedDate', 'bookedStartTime', 'bookedEndTime')
| [
"rohaksinghal14@gmail.com"
] | rohaksinghal14@gmail.com |
7e1027a626b96efa1718ab2aad192f93f8db7e12 | 8981fd540c4857edbaf4162e9ca08e86c5625b80 | /capital.py | 792693b711ad1a9c52b0f3f748a65d7a48a526fb | [] | no_license | DylanQiu/CheckioProject | c37a149795b076665b6b05ff1b2d4af9f701c840 | 386fd5aee694ddc7efe7dab1aa1a1f4610a0fb0b | refs/heads/master | 2021-07-19T13:15:07.708474 | 2017-10-24T04:57:13 | 2017-10-24T04:57:13 | 104,953,411 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | def find_message(text):
"""Find a secret message"""
uppers = [l for l in text if l.isupper()]
s = ''.join(uppers)
return s
if __name__ == '__main__':
#These "asserts" using only for self-checking and not necessary for auto-testing
assert find_message("How are you? Eh, ok. Low or Lower? Ohhh.") == "HELLO", "hello"
assert find_message("hello world!") == "", "Nothing"
assert find_message("HELLO WORLD!!!") == "HELLOWORLD", "Capitals"
print("Coding complete? Click 'Check' to review your tests and earn cool rewards!")
| [
"32313210+DylanQiu@users.noreply.github.com"
] | 32313210+DylanQiu@users.noreply.github.com |
3185c772f0e736ae4fc5a2c5fa54f50793bfac2a | ef14d37fc87a191b36b5b70c39b02b0d193f9fe0 | /futuregreen/people/urls.py | 5cef8b0730140b238ffbcfa32a611ae77ea5c14b | [] | no_license | dmeehan/futuregreen | 9f608b69255761011a525e349fb583669e8dacaa | 835c455503a75658d8b744df643158ac6575b737 | refs/heads/master | 2020-12-19T01:19:08.975895 | 2017-06-21T13:21:03 | 2017-06-21T13:21:03 | 2,144,649 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 423 | py | # people/urls.py
from django.conf import settings
from django.conf.urls.defaults import *
from django.views.generic import ListView, DetailView, TemplateView
from futuregreen.people.views import EmployeeDetailView, EmployeeListView
urlpatterns = patterns('',
url(r'^$', EmployeeListView.as_view(), name = 'person_list'),
url(r'^(?P<slug>[-\w]+)/$', EmployeeDetailView.as_view(), name = 'people_person_detail'),
) | [
"dmeehan@gmail.com"
] | dmeehan@gmail.com |
2751b3b5e76bad7aaf1ba7884f8ec09cb869c56d | ecff4b18a49ce5952c5f9125dc027cebdecf10a8 | /azure-mgmt-logic/azure/mgmt/logic/models/workflow_secret_keys.py | ab2f480de04cdc5157f1b5b10bfb7d0ed53826c9 | [
"Apache-2.0"
] | permissive | jehine-MSFT/azure-sdk-for-python | a56c18020ecd5f4c245c093fd6a33e1b1d7c95e1 | 6d0f94b39406eab374906c683bd2150217132a9c | refs/heads/master | 2020-12-06T19:17:38.153819 | 2016-04-08T21:03:16 | 2016-04-08T21:03:16 | 55,809,131 | 0 | 0 | null | 2016-04-08T20:54:00 | 2016-04-08T20:54:00 | null | UTF-8 | Python | false | false | 1,608 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class WorkflowSecretKeys(Model):
"""WorkflowSecretKeys
:param primary_secret_key: Gets the primary secret key.
:type primary_secret_key: str
:param secondary_secret_key: Gets the secondary secret key.
:type secondary_secret_key: str
"""
_attribute_map = {
'primary_secret_key': {'key': 'primarySecretKey', 'type': 'str'},
'secondary_secret_key': {'key': 'secondarySecretKey', 'type': 'str'},
}
def __init__(self, primary_secret_key=None, secondary_secret_key=None, **kwargs):
self.primary_secret_key = primary_secret_key
self.secondary_secret_key = secondary_secret_key
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
38bdd4206acfc59ea9b3adf745d3103f4eed2c66 | b3d4198406ec727b29eb3429433aa3eec0c80ead | /CBF/membership/admin.py | dd61fd70bef33ea626b5e552b20d0d4d0d3be629 | [] | no_license | aqt01/CBF | 7c9148aa1e5eed9524082cecef74f9571e1f5889 | 4769b11d26dad1a1dfff718e042f78564b13f671 | refs/heads/master | 2020-06-12T18:00:26.528230 | 2018-03-22T00:34:25 | 2018-03-22T00:34:25 | 75,782,721 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | from django.contrib import admin
from .models import SocialMedia, Member
class SocialMediaInLine(admin.TabularInline):
model = SocialMedia
extra = 1
class MemberAdmin(admin.ModelAdmin):
inlines = [
SocialMediaInLine,
]
list_display = ['name', 'email', 'role_description']
admin.site.register(Member, MemberAdmin)
admin.site.register(SocialMedia)
| [
"lowell.abbott@gmail.com"
] | lowell.abbott@gmail.com |
071427cf0bd1245c7ad847bbd11ee1d6c3f23146 | 4971e12aabbc3f80b18a558ef3c4dec4cc847b7b | /tools/benchmark/wsgiapp.py | 138d53e036808cb8575a827326398aea9e30a507 | [
"MIT"
] | permissive | AmesianX/skitai | 3d6a10f8d5a64ea3174d31dfbe19562fca3f30b1 | d8ed2a02986f8bb0013a593c083a2ca97818f6d2 | refs/heads/master | 2020-05-18T09:42:58.026212 | 2019-02-27T06:39:38 | 2019-02-27T06:39:38 | 184,334,658 | 1 | 0 | null | 2019-04-30T21:34:09 | 2019-04-30T21:34:09 | null | UTF-8 | Python | false | false | 125 | py |
DEBUG = True
def app (env, start_response):
start_response ("200 OK", [("Content-Type", "text/plain")])
return ['pong']
| [
"hansroh@gmail.com"
] | hansroh@gmail.com |
c32b16b42a4384da77bdc4d6e8b0b0fe32ef1331 | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/support/azext_support/_utils.py | 9429828d972a0884f8f6269b4854ae1ae5c694b5 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 2,174 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import re
from azure.cli.core._profile import Profile
from azure.cli.core.azclierror import UnauthorizedError
from knack.util import CLIError
from knack.log import get_logger
logger = get_logger(__name__)
def is_billing_ticket(service_name):
return "517f2da6-78fd-0498-4e22-ad26996b1dfc" in service_name
def is_quota_ticket(service_name):
return "06bfd9d3-516b-d5c6-5802-169c800dec89" in service_name
def is_subscription_mgmt_ticket(service_name):
return "f3dc5421-79ef-1efa-41a5-42bf3cbb52c6" in service_name
def is_technical_ticket(service_name):
return (not is_billing_ticket(service_name)) and \
(not is_quota_ticket(service_name)) and \
(not is_subscription_mgmt_ticket(service_name))
def parse_support_area_path(problem_classification_id):
service_id_prefix = "/providers/Microsoft.Support/services/".lower()
guid_regex = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"
sap_regex = re.compile('^{0}({1})/problemclassifications/({1})$'.format(service_id_prefix, guid_regex))
match = sap_regex.search(problem_classification_id.lower())
if match is not None and len(match.groups()) == 2:
return {"service_name": match.group(1), "problem_classifications_name": match.group(2)}
return None
def get_bearer_token(cmd, tenant_id):
client = Profile(cli_ctx=cmd.cli_ctx)
try:
logger.debug("Retrieving access token for tenant %s", tenant_id)
creds, _, _ = client.get_raw_token(tenant=tenant_id)
except CLIError as unauthorized_error:
raise UnauthorizedError("Can't find authorization for {0}. ".format(tenant_id) +
"Run \'az login -t <tenant_name> --allow-no-subscriptions\' and try again.") from \
unauthorized_error
return "Bearer " + creds[1]
| [
"noreply@github.com"
] | Azure.noreply@github.com |
04abc664f35460d59c8be0e6ce737af05ee1140d | d7b9b490c954c7a9160b69f8ce2c907ef4681ecb | /sponsors/migrations/0017_sponsorbenefit_added_by_user.py | f304cd76bece2385330c80321b7225cdc2430663 | [
"Apache-2.0"
] | permissive | python/pythondotorg | 00db93a4b1789a4d438806d106d9cee3349ad78c | c4ee749942227ca75c8e670546afe67232d647b2 | refs/heads/main | 2023-08-28T20:04:24.735314 | 2023-08-03T19:12:29 | 2023-08-03T19:12:29 | 6,127,047 | 1,131 | 646 | Apache-2.0 | 2023-08-24T15:57:04 | 2012-10-08T16:00:15 | Python | UTF-8 | Python | false | false | 435 | py | # Generated by Django 2.0.13 on 2020-11-20 14:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("sponsors", "0016_auto_20201119_1448"),
]
operations = [
migrations.AddField(
model_name="sponsorbenefit",
name="added_by_user",
field=models.BooleanField(default=False, verbose_name="Added by user?"),
),
]
| [
"noreply@github.com"
] | python.noreply@github.com |
cd6217740f6dc93ee83304e31a2062ebd5bf0370 | 9499922b6d2e2652a5beccafdb57ea35e7f58970 | /templates/openwisp2/urls.py | aa3a020b690d1659b6c1439f03afdb449ac142dc | [
"BSD-3-Clause"
] | permissive | stepura/ansible-openwisp2 | 2d49fe3804df0427cf8006e4346acc7e889d52ce | 1c11882bed03e4f11be15b4d0395c8e9bd30492e | refs/heads/master | 2020-05-21T14:41:34.562597 | 2017-03-10T15:34:50 | 2017-03-10T15:34:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 846 | py | from django.conf.urls import include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django_netjsonconfig.admin_theme.admin import admin, openwisp_admin
openwisp_admin()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
# controller URLs
# used by devices to download/update their configuration
# keep the namespace argument unchanged
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
# common URLs
# shared among django-netjsonconfig components
# keep the namespace argument unchanged
url(r'^', include('django_netjsonconfig.urls', namespace='netjsonconfig')),
# django-x509 urls
# keep the namespace argument unchanged
url(r'^', include('django_x509.urls', namespace='x509')),
]
urlpatterns += staticfiles_urlpatterns()
| [
"nemesis@ninux.org"
] | nemesis@ninux.org |
f7384a4409b3afbb1d03580ef40cd8a2c46553f3 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/1/bSx.py | 57c10c6a7f431e5af471cd44dd45dae84ca55d69 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'bSX':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
0ad12506076a948181b82d4c9316d29569a3451a | 36132d1a4a2669775fbf1f86f6c4b1f341c6a85e | /aliyun-python-sdk-iot/aliyunsdkiot/request/v20180120/ListThingModelVersionRequest.py | 8177b8219b21fcea3e5f60edbf5cd5b9577252e2 | [
"Apache-2.0"
] | permissive | ghosthgy/aliyun-openapi-python-sdk | 0f676e47d0df51d9e0727a0ae00ed9c86fe756f8 | eb809a296864f29f8fce6e82adf29fdeedb41c0a | refs/heads/master | 2023-03-26T00:49:11.347883 | 2021-03-25T09:25:14 | 2021-03-25T09:25:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,838 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkiot.endpoint import endpoint_data
class ListThingModelVersionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Iot', '2018-01-20', 'ListThingModelVersion')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_IotInstanceId(self):
return self.get_query_params().get('IotInstanceId')
def set_IotInstanceId(self,IotInstanceId):
self.add_query_param('IotInstanceId',IotInstanceId)
def get_ProductKey(self):
return self.get_query_params().get('ProductKey')
def set_ProductKey(self,ProductKey):
self.add_query_param('ProductKey',ProductKey) | [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
c8f86134a84120155ebc8043bfd218ea17981057 | a2098c9c8d39cc9e392f21de64c7ced0549d6f1f | /custom/blog/feeds.py | e022cd27429659435e03ba70731e061570513524 | [] | no_license | dmitryro/divorcesus | 23fe394b0d065f635ecb11eed945cc4fcb9bb829 | 8ecedb2b8a019e63f37702888dd12e994a75105e | refs/heads/master | 2022-12-11T17:20:13.348413 | 2020-10-01T17:27:57 | 2020-10-01T17:27:57 | 56,432,086 | 0 | 1 | null | 2022-12-08T02:22:29 | 2016-04-17T11:05:27 | JavaScript | UTF-8 | Python | false | false | 910 | py | from django.contrib.syndication.views import Feed
from django.utils import feedgenerator
from custom.blog.models import Post
from django.utils.feedgenerator import Atom1Feed
from django.core.urlresolvers import reverse
import datetime
class RssSiteNewsFeed(Feed):
title = "Divorces U.S. Feed"
link = "/blog/"
description = "Updates to Divorces U.S. blog."
def items(self):
return Post.objects.order_by('-time_published')[:5]
# def link(self, obj):
# return obj.get_absolute_url()
def item_title(self, item):
return item.title
def item_description(self, item):
return item.body
# item_link is only needed if NewsItem has no get_absolute_url method.
# def item_link(self, item):
# return reverse('posts', args=[item.pk])
class AtomSiteNewsFeed(RssSiteNewsFeed):
feed_type = Atom1Feed
subtitle = RssSiteNewsFeed.description
| [
"dmitryro@gmail.com"
] | dmitryro@gmail.com |
af2569704f85afa754bf1a09f1bb6e3bf339a63d | 021c96f56992bfb58da4973a3b0067ca3298585c | /branch/sqlgen2/sample/toolbox/rtorrent/infopart.py | 15f881db4c5295f6f9c6933408c87f500c3172f7 | [] | no_license | BackupTheBerlios/useless-svn | 3818ec28f74be9ad4b43f7261ebbe50c4efea3d7 | a38ecbb06063d09bf50c284e9fd3f7d9c0e5f3a1 | refs/heads/master | 2021-01-25T10:30:05.919994 | 2012-11-13T20:25:26 | 2012-11-13T20:25:26 | 40,749,119 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,049 | py | from qt import QWidget
from qt import PYSIGNAL, SIGNAL
from kdecore import KURL
from kdeui import KMessageBox
from useless.kdebase.htmlpart import BaseInfoPart
from base import MyUrl
from infodoc import RtorrentDocument
class RtorrentInfoPart(BaseInfoPart):
def __init__(self, parent, name='RtorrentInfoPart'):
BaseInfoPart.__init__(self, parent, name=name)
self.clear_view()
self.doc = RtorrentDocument()
def set_info(self, infohash):
self.clear_view()
self.app.processEvents()
self.begin()
self.doc.set_torrent(self.app.rtorrent.torrents[infohash])
self.infohash = infohash
self.write(unicode(self.doc.generate()))
self.end()
#self.emit(PYSIGNAL('EntityInfoUpdated'), (entityid,))
####################################################
# the methods in this section map url's to actions #
####################################################
def urlSelected(self, url, button, state, target, args):
print url
return
if url.find('||') > -1:
self._perform_url_action(url)
else:
self.openURL(KURL(url))
def _perform_url_action(self, url):
parsed = myurl.parse(str(url))
print parsed
action, atype, ident = parsed
if ident.isdigit():
ident = int(ident)
if action == 'edit':
if self._update_entity_dlg is None:
dlg = MainEntityDialog(self.dialog_parent, dtype='update', entityid=ident)
dlg.show()
elif action == 'delete':
print 'delete selected'
elif action == 'addtag':
dlg = AddTagsDialog(self.dialog_parent, ident)
dlg.show()
elif action == 'deltag':
dlg = RemoveTagsDialog(self.dialog_parent, ident)
dlg.show()
else:
KMessageBox.error(self.dialog_parent,
'Unknown action: %s' % action)
| [
"umeboshi@70758ab2-d2f7-0310-a994-9f7f813c4004"
] | umeboshi@70758ab2-d2f7-0310-a994-9f7f813c4004 |
ebbd242f0378e77644f277eea227a316fa7e5f7b | 0729e5a36e75e938b04570ad1515bc9958088a50 | /kopipasta/migrations/0002_auto_20210420_1015.py | 49d94aa250b85cf1d1c0f8d38e416480aaab9ca1 | [] | no_license | multiscripter/kopipasta-django-sqlite | 42c17f0815e807349025ae99222a76ec23e0b0aa | b25cabebc8fdf323ff4535cb921450b8faa427ec | refs/heads/master | 2023-04-06T13:51:15.007595 | 2021-04-24T20:27:47 | 2021-04-24T20:27:47 | 360,644,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 876 | py | # Generated by Django 3.2 on 2021-04-20 10:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kopipasta', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.SmallAutoField(primary_key=True, serialize=False, verbose_name='ИД')),
('name', models.CharField(max_length=128, verbose_name='Название')),
],
options={
'verbose_name': 'Категория',
'verbose_name_plural': 'Категории',
},
),
migrations.RemoveField(
model_name='item',
name='next',
),
migrations.RemoveField(
model_name='item',
name='prev',
),
]
| [
"ILL-JAH@yandex.ru"
] | ILL-JAH@yandex.ru |
c889e40298be02aa6d96c57ddc5d869b79642c8f | 39486c9763e9955c6f0d2bc8459ce8151d2611eb | /python/ThirteenTeV/RPVStop_M_1300_LDQ331_TuneCUETP8M1_tauola_13TeV_pythia8_cfi.py | 573e9ed13d59e9dac0cf2ba87bfbda5a6b98d094 | [] | no_license | rafaellopesdesa/genproductions | 299220881fb8e464fb10ce076d8a21af5d082ae6 | efbcffdf67970eaa06a2f54ae91bb7b585928ffb | refs/heads/master | 2021-01-24T04:43:36.562016 | 2017-01-20T13:27:13 | 2017-01-20T13:27:13 | 46,142,369 | 0 | 0 | null | 2015-11-13T19:23:28 | 2015-11-13T19:23:28 | null | UTF-8 | Python | false | false | 18,609 | py | SLHA_TABLE="""
# SOFTSUSY3.2.4
# B.C. Allanach, Comput. Phys. Commun. 143 (2002) 305-331, hep-ph/0104145
# B.C. Allanach and M.A. Bernhardt, arXiv:0903.1805
# B.C. Allanach, M. Hanussek and C.H. Kom, arXiv:1109.3735
Block SPINFO # Program information
1 SOFTSUSY # spectrum calculator
2 3.2.4 # version number
Block MODSEL # Select model
1 1 # sugra
4 1 # R-parity violating
Block SMINPUTS # Standard Model inputs
1 1.27925000e+02 # alpha_em^(-1)(MZ) SM MSbar
2 1.16637000e-05 # G_Fermi
3 1.17600000e-01 # alpha_s(MZ)MSbar
4 9.11876000e+01 # MZ(pole)
5 4.20000000e+00 # mb(mb)
6 1.73300000e+02 # Mtop(pole)
7 1.77699000e+00 # Mtau(pole)
Block MINPAR # SUSY breaking input parameters
3 2.50000000e+01 # tanb
4 1.00000000e+00 # sign(mu)
1 0.00000000e+00 # m0
2 1.04000000e+03 # m12
5 0.00000000e+00 # A0
Block RVLAMLLEIN # GUT scale LLE couplings
1 2 1 3.20000000e-02 # lambda_{121}
2 1 1 -3.20000000e-02 # lambda_{211}
# Low energy data in SOFTSUSY: MIXING=1 TOLERANCE=1.00000000e-03
# mgut=1.19751896e+16 GeV
Block MASS # Mass spectrum
#PDG code mass particle
12 0.00000000e+00 # Mnu1(pole) normal hierarchy output
14 -6.42418814e-49 # Mnu2(pole) normal hierarchy output
16 -8.53937218e-12 # Mnu3(pole) normal hierarchy output
24 8.05658637e+01 # MW
25 1.19947771e+02 # h0
35 1.12839107e+03 # H0
36 1.12827155e+03 # A0
37 1.13127113e+03 # H+
1000001 2.05257742e+03 # ~d_L
1000002 2.05117612e+03 # ~u_L
1000003 2.05256092e+03 # ~s_L
1000004 2.05115961e+03 # ~c_L
1000005 1.86347413e+03 # ~b_1
1000006 1.30000000e+03 # ~t_1
1000011 6.89348255e+02 # ~e_L
1000012 6.84513161e+02 # ~nue_L
1000013 6.89297086e+02 # ~mu_L
1000014 6.84481384e+02 # ~numu_L
1000015 3.37046647e+02 # ~stau_1
1000016 6.72974252e+02 # ~nu_tau_L
1000021 2.25970059e+03 # ~g
1000022 4.43273800e+02 # ~neutralino(1)
1000023 8.33326197e+02 # ~neutralino(2)
1000024 8.33511959e+02 # ~chargino(1)
1000025 -1.16506398e+03 # ~neutralino(3)
1000035 1.17431114e+03 # ~neutralino(4)
1000037 1.17426378e+03 # ~chargino(2)
2000001 1.95725891e+03 # ~d_R
2000002 1.96643438e+03 # ~u_R
2000003 1.95722882e+03 # ~s_R
2000004 1.96642936e+03 # ~c_R
2000005 1.91226273e+03 # ~b_2
2000006 5.89630362e+03 # ~t_2
2000011 3.89848754e+02 # ~e_R
2000013 3.90994434e+02 # ~mu_R
2000015 6.83829650e+02 # ~stau_2
1000039 1.00000000e+19 # ~gravitino
# Higgs mixing
Block alpha # Effective Higgs mixing parameter
-4.19274059e-02 # alpha
Block stopmix # stop mixing matrix
1 1 2.79281177e-01 # O_{11}
1 2 9.60209365e-01 # O_{12}
2 1 9.60209365e-01 # O_{21}
2 2 -2.79281177e-01 # O_{22}
Block sbotmix # sbottom mixing matrix
1 1 9.20381430e-01 # O_{11}
1 2 3.91021767e-01 # O_{12}
2 1 -3.91021767e-01 # O_{21}
2 2 9.20381430e-01 # O_{22}
Block staumix # stau mixing matrix
1 1 1.51237634e-01 # O_{11}
1 2 9.88497435e-01 # O_{12}
2 1 9.88497435e-01 # O_{21}
2 2 -1.51237634e-01 # O_{22}
Block nmix # neutralino mixing matrix
1 1 9.98752152e-01 # N_{1,1}
1 2 -4.17675711e-03 # N_{1,2}
1 3 4.58882321e-02 # N_{1,3}
1 4 -1.92604001e-02 # N_{1,4}
2 1 1.24061780e-02 # N_{2,1}
2 2 9.85198015e-01 # N_{2,2}
2 3 -1.37960495e-01 # N_{2,3}
2 4 1.00984452e-01 # N_{2,4}
3 1 -1.86861773e-02 # N_{3,1}
3 2 2.65932151e-02 # N_{3,2}
3 3 7.06044732e-01 # N_{3,3}
3 4 7.07420995e-01 # N_{3,4}
4 1 -4.46212022e-02 # N_{4,1}
4 2 1.69293315e-01 # N_{4,2}
4 3 6.93081531e-01 # N_{4,3}
4 4 -6.99275849e-01 # N_{4,4}
Block Umix # chargino U mixing matrix
1 1 9.81030146e-01 # U_{1,1}
1 2 -1.93855234e-01 # U_{1,2}
2 1 1.93855234e-01 # U_{2,1}
2 2 9.81030146e-01 # U_{2,2}
Block Vmix # chargino V mixing matrix
1 1 9.89698462e-01 # V_{1,1}
1 2 -1.43167573e-01 # V_{1,2}
2 1 1.43167573e-01 # V_{2,1}
2 2 9.89698462e-01 # V_{2,2}
Block RVLAMLLE Q= 9.11876000e+01 # R-Parity violating LLE couplings
1 1 1 0.00000000e+00 # lambda_{111}
1 1 2 0.00000000e+00 # lambda_{112}
1 1 3 0.00000000e+00 # lambda_{113}
1 2 1 0.00000000e+02 # lambda_{121}
1 2 2 0.00000000e+00 # lambda_{122}
1 2 3 0.00000000e+00 # lambda_{123}
1 3 1 0.00000000e+00 # lambda_{131}
1 3 2 0.00000000e+00 # lambda_{132}
1 3 3 0.000000000+00 # lambda_{133}
2 1 1 0.00000000e+00 # lambda_{211}
2 1 2 0.00000000e+00 # lambda_{212}
2 1 3 0.00000000e+00 # lambda_{213}
2 2 1 0.00000000e+00 # lambda_{221}
2 2 2 0.00000000e+00 # lambda_{222}
2 2 3 0.00000000e+00 # lambda_{223}
2 3 1 0.00000000e+00 # lambda_{231}
2 3 2 0.00000000e+00 # lambda_{232}
2 3 3 0.000000000+00 # lambda_{233}
3 1 1 0.00000000e+00 # lambda_{311}
3 1 2 0.00000000e+00 # lambda_{312}
3 1 3 0.00000000e+00 # lambda_{313}
3 2 1 0.00000000e+00 # lambda_{321}
3 2 2 0.00000000e+00 # lambda_{322}
3 2 3 0.00000000e+00 # lambda_{323}
3 3 1 0.00000000e+00 # lambda_{331}
3 3 2 0.00000000e+00 # lambda_{332}
3 3 3 0.00000000e+00 # lambda_{333}
Block RVLAMLQD Q= 9.11876000e+01 # R-Parity violating LQD couplings
1 1 1 0.00000000e+00 # lambda'_{111}
1 1 2 0.00000000e+00 # lambda'_{112}
1 1 3 0.00000000e+00 # lambda'_{113}
1 2 1 0.00000000e+00 # lambda'_{121}
1 2 2 0.000000000+00 # lambda'_{122}
1 2 3 0.00000000e+00 # lambda'_{123}
1 3 1 0.00000000e+00 # lambda'_{131}
1 3 2 0.00000000e+00 # lambda'_{132}
1 3 3 0.00000000e+00 # lambda'_{133}
2 1 1 0.00000000e+00 # lambda'_{211}
2 1 2 0.00000000e+00 # lambda'_{212}
2 1 3 0.00000000e+00 # lambda'_{213}
2 2 1 0.00000000e+00 # lambda'_{221}
2 2 2 0.00000000e+00 # lambda'_{222}
2 2 3 0.00000000e+00 # lambda'_{223}
2 3 1 0.00000000e+00 # lambda'_{231}
2 3 2 0.00000000e+00 # lambda'_{232}
2 3 3 0.00000000e+00 # lambda'_{233}
3 1 1 0.00000000e+00 # lambda'_{311}
3 1 2 0.00000000e+00 # lambda'_{312}
3 1 3 0.00000000e+00 # lambda'_{313}
3 2 1 0.00000000e+00 # lambda'_{321}
3 2 2 0.00000000e+00 # lambda'_{322}
3 2 3 0.00000000e+00 # lambda'_{323}
3 3 1 1.00000000e+00 # lambda'_{331}
3 3 2 0.00000000e+00 # lambda'_{332}
3 3 3 0.00000000e+00 # lambda'_{333}
Block RVLAMUDD Q= 9.11876000e+01 # R-Parity violating UDD couplings
1 1 1 0.00000000e+00 # lambda''_{111}
1 1 2 0.00000000e+00 # lambda''_{112}
1 1 3 0.00000000e+00 # lambda''_{113}
1 2 1 0.00000000e+00 # lambda''_{121}
1 2 2 0.00000000e+00 # lambda''_{122}
1 2 3 0.00000000e+00 # lambda''_{123}
1 3 1 0.00000000e+00 # lambda''_{131}
1 3 2 0.00000000e+00 # lambda''_{132}
1 3 3 0.00000000e+00 # lambda''_{133}
2 1 1 0.00000000e+00 # lambda''_{211}
2 1 2 0.00000000e+00 # lambda''_{212}
2 1 3 0.00000000e+00 # lambda''_{213}
2 2 1 0.00000000e+00 # lambda''_{221}
2 2 2 0.00000000e+00 # lambda''_{222}
2 2 3 0.00000000e+00 # lambda''_{223}
2 3 1 0.00000000e+00 # lambda''_{231}
2 3 2 0.00000000e+00 # lambda''_{232}
2 3 3 0.00000000e+00 # lambda''_{233}
3 1 1 0.00000000e+00 # lambda''_{311}
3 1 2 0.00000000e+00 # lambda''_{312}
3 1 3 0.00000000e+00 # lambda''_{313}
3 2 1 0.00000000e+00 # lambda''_{321}
3 2 2 0.00000000e+00 # lambda''_{322}
3 2 3 0.00000000e+00 # lambda''_{323}
3 3 1 0.00000000e+00 # lambda''_{331}
3 3 2 0.00000000e+00 # lambda''_{332}
3 3 3 0.00000000e+00 # lambda''_{333}
Block RVT Q= 9.11876000e+01 # R-Parity violating LLE soft terms
1 1 1 0.00000000e+00 # T_{111}
1 1 2 0.00000000e+00 # T_{112}
1 1 3 0.00000000e+00 # T_{113}
1 2 1 -3.13417017e+01 # T_{121}
1 2 2 4.53713224e-23 # T_{122}
1 2 3 0.00000000e+00 # T_{123}
1 3 1 0.00000000e+00 # T_{131}
1 3 2 0.00000000e+00 # T_{132}
1 3 3 2.48753355e-22 # T_{133}
2 1 1 3.13417017e+01 # T_{211}
2 1 2 -4.53713224e-23 # T_{212}
2 1 3 0.00000000e+00 # T_{213}
2 2 1 0.00000000e+00 # T_{221}
2 2 2 0.00000000e+00 # T_{222}
2 2 3 0.00000000e+00 # T_{223}
2 3 1 0.00000000e+00 # T_{231}
2 3 2 0.00000000e+00 # T_{232}
2 3 3 -1.72180354e-04 # T_{233}
3 1 1 0.00000000e+00 # T_{311}
3 1 2 0.00000000e+00 # T_{312}
3 1 3 -2.48753355e-22 # T_{313}
3 2 1 0.00000000e+00 # T_{321}
3 2 2 0.00000000e+00 # T_{322}
3 2 3 1.72180354e-04 # T_{323}
3 3 1 0.00000000e+00 # T_{331}
3 3 2 0.00000000e+00 # T_{332}
3 3 3 0.00000000e+00 # T_{333}
Block RVTP Q= 9.11876000e+01 # R-Parity violating LQD soft terms
1 1 1 1.11961414e-24 # T'_{111}
1 1 2 3.82100494e-50 # T'_{112}
1 1 3 0.00000000e+00 # T'_{113}
1 2 1 1.77606747e-51 # T'_{121}
1 2 2 2.45132647e-23 # T'_{122}
1 2 3 0.00000000e+00 # T'_{123}
1 3 1 0.00000000e+00 # T'_{131}
1 3 2 0.00000000e+00 # T'_{132}
1 3 3 9.11150487e-22 # T'_{133}
2 1 1 -7.74941233e-07 # T'_{211}
2 1 2 -2.64470962e-32 # T'_{212}
2 1 3 0.00000000e+00 # T'_{213}
2 2 1 -1.22930558e-33 # T'_{221}
2 2 2 -1.69668629e-05 # T'_{222}
2 2 3 0.00000000e+00 # T'_{223}
2 3 1 0.00000000e+00 # T'_{231}
2 3 2 0.00000000e+00 # T'_{232}
2 3 3 -6.30653937e-04 # T'_{233}
3 1 1 0.00000000e+00 # T'_{311}
3 1 2 0.00000000e+00 # T'_{312}
3 1 3 0.00000000e+00 # T'_{313}
3 2 1 0.00000000e+00 # T'_{321}
3 2 2 0.00000000e+00 # T'_{322}
3 2 3 0.00000000e+00 # T'_{323}
3 3 1 0.00000000e+00 # T'_{331}
3 3 2 0.00000000e+00 # T'_{332}
3 3 3 0.00000000e+00 # T'_{333}
Block RVTPP Q= 9.11876000e+01 # R-Parity violating UDD soft terms
1 1 1 0.00000000e+00 # T''_{111}
1 1 2 0.00000000e+00 # T''_{112}
1 1 3 0.00000000e+00 # T''_{113}
1 2 1 0.00000000e+00 # T''_{121}
1 2 2 0.00000000e+00 # T''_{122}
1 2 3 0.00000000e+00 # T''_{123}
1 3 1 0.00000000e+00 # T''_{131}
1 3 2 0.00000000e+00 # T''_{132}
1 3 3 0.00000000e+00 # T''_{133}
2 1 1 0.00000000e+00 # T''_{211}
2 1 2 0.00000000e+00 # T''_{212}
2 1 3 0.00000000e+00 # T''_{213}
2 2 1 0.00000000e+00 # T''_{221}
2 2 2 0.00000000e+00 # T''_{222}
2 2 3 0.00000000e+00 # T''_{223}
2 3 1 0.00000000e+00 # T''_{231}
2 3 2 0.00000000e+00 # T''_{232}
2 3 3 0.00000000e+00 # T''_{233}
3 1 1 0.00000000e+00 # T''_{311}
3 1 2 0.00000000e+00 # T''_{312}
3 1 3 0.00000000e+00 # T''_{313}
3 2 1 0.00000000e+00 # T''_{321}
3 2 2 0.00000000e+00 # T''_{322}
3 2 3 0.00000000e+00 # T''_{323}
3 3 1 0.00000000e+00 # T''_{331}
3 3 2 0.00000000e+00 # T''_{332}
3 3 3 0.00000000e+00 # T''_{333}
Block RVKAPPA Q= 9.11876000e+01 # R-Parity violating kappa
1 8.32106790e-22 # kappa_{1}
2 -5.75933628e-04 # kappa_{2}
3 0.00000000e+00 # kappa_{3}
Block RVD Q= 9.11876000e+01 # R-Parity violating D
1 -4.88333045e-19 # D_{1}
2 3.38030551e-01 # D_{2}
3 0.00000000e+00 # D_{3}
Block RVSNVEV Q= 1.70273821e+03 # sneutrino VEVs D
1 1.14230019e-22 # SneutrinoVev_{1}
2 1.98508447e-04 # SneutrinoVev_{2}
3 0.00000000e+00 # SneutrinoVev_{3}
Block RVM2LH1 Q= 9.11876000e+01 # M2LH1
1 9.72084737e-19 # M2LH1_{1}
2 -6.72929399e-01 # M2LH1_{2}
3 0.00000000e+00 # M2LH1_{3}
Block RVNMIX Q= 9.11876000e+01 # neutrino-neutralino mixing matrix
1 1 0.00000000e+00 # N_{11}
1 2 1.00000000e+00 # N_{12}
1 3 4.28960393e-19 # N_{13}
1 4 -1.71942418e-26 # N_{14}
1 5 -4.08813208e-26 # N_{15}
1 6 4.29464036e-25 # N_{16}
1 7 -4.28801436e-25 # N_{17}
2 1 0.00000000e+00 # N_{21}
2 2 -4.28960393e-19 # N_{22}
2 3 1.00000000e+00 # N_{23}
2 4 -1.00292285e-07 # N_{24}
2 5 1.33303664e-07 # N_{25}
2 6 -3.00144791e-07 # N_{26}
2 7 2.82091479e-07 # N_{27}
3 1 1.00000000e+00 # N_{31}
3 2 0.00000000e+00 # N_{32}
3 3 0.00000000e+00 # N_{33}
3 4 0.00000000e+00 # N_{34}
3 5 0.00000000e+00 # N_{35}
3 6 0.00000000e+00 # N_{36}
3 7 0.00000000e+00 # N_{37}
4 1 0.00000000e+00 # N_{41}
4 2 1.04394519e-26 # N_{42}
4 3 8.02858604e-08 # N_{43}
4 4 9.98747776e-01 # N_{44}
4 5 1.24491057e-02 # N_{45}
4 6 -1.86931555e-02 # N_{46}
4 7 4.47041917e-02 # N_{47}
5 1 0.00000000e+00 # N_{51}
5 2 -1.09872363e-26 # N_{52}
5 3 -7.61318948e-08 # N_{53}
5 4 -4.22292405e-03 # N_{54}
5 5 9.85264268e-01 # N_{55}
5 6 2.66016901e-02 # N_{56}
5 7 -1.68904820e-01 # N_{57}
6 1 0.00000000e+00 # N_{61}
6 2 -7.89902027e-25 # N_{62}
6 3 4.30405542e-07 # N_{63}
6 4 4.59557286e-02 # N_{64}
6 5 -1.37690051e-01 # N_{65}
6 6 7.06044508e-01 # N_{66}
6 7 -6.93131065e-01 # N_{67}
7 1 0.00000000e+00 # N_{71}
7 2 -2.94724002e-29 # N_{72}
7 3 -3.03419619e-10 # N_{73}
7 4 -1.93162359e-02 # N_{74}
7 5 1.00701494e-01 # N_{75}
7 6 7.07420715e-01 # N_{76}
7 7 6.99315396e-01 # N_{77}
Block gauge Q= 1.70273821e+03
1 3.64249287e-01 # g'(Q)MSSM DRbar
2 6.38859054e-01 # g(Q)MSSM DRbar
3 1.03204524e+00 # g3(Q)MSSM DRbar
Block yu Q= 1.70273821e+03
1 1 7.11571130e-06 # YU_{11} (Q)MSSM DRbar
2 2 3.25224982e-03 # YU_{22} (Q)MSSM DRbar
3 3 8.30688405e-01 # YU_{33} (Q)MSSM DRbar
Block yd Q= 1.70273821e+03
1 1 3.42365706e-04 # YD_{11} (Q)MSSM DRbar
2 2 7.49603532e-03 # YD_{22} (Q)MSSM DRbar
3 3 3.07818157e-01 # YD_{33} (Q)MSSM DRbar
Block ye Q= 1.70273821e+03
1 1 7.01703518e-05 # YE_{11} (Q)MSSM DRbar
2 2 1.43194064e-02 # YE_{22} (Q)MSSM DRbar
3 3 2.56942413e-01 # YE_{33} (Q)MSSM DRbar
Block UPMNS Q= 1.70273821e+03 # neutrino mixing matrix:
1 1 0.00000000e+00 # UPMNS_{11} matrix element
1 2 1.00000000e+00 # UPMNS_{12} matrix element
1 3 1.87375058e-18 # UPMNS_{13} matrix element
2 1 0.00000000e+00 # UPMNS_{21} matrix element
2 2 1.87375058e-18 # UPMNS_{22} matrix element
2 3 -1.00000000e+00 # UPMNS_{23} matrix element
3 1 1.00000000e+00 # UPMNS_{31} matrix element
3 2 0.00000000e+00 # UPMNS_{32} matrix element
3 3 0.00000000e+00 # UPMNS_{33} matrix element
Block hmix Q= 1.70273821e+03 # Higgs mixing parameters
1 1.15893883e+03 # mu(Q)MSSM DRbar
2 2.41572131e+01 # tan beta(Q)MSSM DRbar
3 2.44007782e+02 # higgs vev(Q)MSSM DRbar
4 1.49059154e+06 # mA^2(Q)MSSM DRbar
Block msoft Q= 1.70273821e+03 # MSSM DRbar SUSY breaking parameters
1 4.52211639e+02 # M_1(Q)
2 8.21799298e+02 # M_2(Q)
3 2.20692798e+03 # M_3(Q)
21 1.48682242e+05 # mH1^2(Q)
22 -1.34464042e+06 # mH2^2(Q)
31 6.76764511e+02 # meL(Q)
32 6.76732474e+02 # mmuL(Q)
33 6.66857111e+02 # mtauL(Q)
34 3.78074193e+02 # meR(Q)
35 3.79253489e+02 # mmuR(Q)
36 3.39978037e+02 # mtauR(Q)
41 1.98518187e+03 # mqL1(Q)
42 1.98516509e+03 # mqL2(Q)
43 1.82006542e+03 # mqL3(Q)
44 1.90322814e+03 # muR(Q)
45 1.90322305e+03 # mcR(Q)
46 1.58879849e+03 # mtR(Q)
47 1.89289941e+03 # mdR(Q)
48 1.89286890e+03 # msR(Q)
49 1.84364589e+03 # mbR(Q)
Block au Q= 1.70273821e+03
1 1 -2.21574740e+03 # Au(Q)MSSM DRbar
2 2 -2.21572733e+03 # Ac(Q)MSSM DRbar
3 3 -1.72697589e+03 # At(Q)MSSM DRbar
Block ad Q= 1.70273821e+03
1 1 -2.60972596e+03 # Ad(Q)MSSM DRbar
2 2 -2.60968470e+03 # As(Q)MSSM DRbar
3 3 -2.39067424e+03 # Ab(Q)MSSM DRbar
Block ae Q= 1.70273821e+03
1 1 -5.34812142e+02 # Ae(Q)MSSM DRbar
2 2 -5.35386053e+02 # Amu(Q)MSSM DRbar
3 3 -5.15700586e+02 # Atau(Q)MSSM DRbar
DECAY 1000006 5.19649555E+01 # stop1
# stop1 2-body decays
# BR NDA ID1 ID2
1.00000000E-00 2 -15 1
#
"""
import FWCore.ParameterSet.Config as cms
from GeneratorInterface.ExternalDecays.TauolaSettings_cff import *
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
filterEfficiency = cms.untracked.double(1.),
maxEventsToPrint = cms.untracked.int32(1),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1),
SLHAFileForPythia8 = cms.string('%s' % SLHA_TABLE),
ExternalDecays = cms.PSet(Tauola = cms.untracked.PSet(TauolaPolar, TauolaDefaultInputCards ),
parameterSets = cms.vstring('Tauola')
),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'SUSY:all off',
'SUSY:gg2squarkantisquark = on',
'SUSY:qqbar2squarkantisquark = on',
'SUSY:idA = 1000006',
'SUSY:idB = 1000006',
),
parameterSets = cms.vstring(
'pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters'
)
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"saptaparna.bhattacharya@cern.ch"
] | saptaparna.bhattacharya@cern.ch |
c723e01098140dae38ba2781b5263766148e056c | 6ac2631c256f156d4ddf169e6c67f1fe66ebcaaf | /081/pyteacher/app_base/models.py | bfd1736a59fa47f89e025fc3e617a5e1ee89ba2d | [] | no_license | kasaiee/how-to-pyteacher | 101f106aeeed1b34756cecf502337ff8ee584ff5 | 074a57533f53fd1b8c7f37cd11dbc3b32ab8a08f | refs/heads/master | 2022-12-10T23:50:46.851784 | 2019-07-15T19:31:03 | 2019-07-15T19:31:03 | 187,372,111 | 6 | 4 | null | 2022-12-08T01:55:05 | 2019-05-18T15:08:03 | null | UTF-8 | Python | false | false | 6,187 | py | from django.db import models
from ckeditor_uploader.fields import RichTextUploadingField
import jdatetime
from django.utils.timezone import localtime
from django.db import models
from django.urls import reverse
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.contenttypes.fields import GenericForeignKey
from app_chat.models import Chat
from app_social.models import Like, Bookmark, Comment
from django.contrib.auth import get_user_model
User = get_user_model()
def course_image_path(instance, filename):
return instance.title
def attachment_path(instance, filename):
return instance.title
class Course(models.Model):
slug = models.SlugField(null=True, allow_unicode=True, blank=True)
image = models.ImageField(upload_to=course_image_path, null=True)
title = models.CharField(max_length=100, null=True)
description = RichTextUploadingField(null=True)
chats = GenericRelation(Chat)
likes = GenericRelation(Like)
bookmarks = GenericRelation(Bookmark)
comments = GenericRelation(Comment)
def price(self):
return sum([se.price for se in self.coursesession_set.all()])
def get_absolute_url(self):
params = {'slug': self.slug}
return reverse('app-base:course-detail', kwargs=params)
def save(self, *args, **kwargs):
self.slug = self.title.replace(' ', '-')
super().save(*args, **kwargs)
def __str__(self):
return self.title
def get_upload_path(instance, filename):
return 'session/private-videos/%s/%s' % (instance.id, filename)
class CourseSession(models.Model):
slug = models.SlugField(null=True, allow_unicode=True, blank=True)
course = models.ForeignKey(Course, on_delete=models.CASCADE, null=True)
title = models.CharField(max_length=100, null=True)
description = models.TextField(null=True)
aparat_video = models.TextField(null=True, blank=True)
next_session = models.ForeignKey(
'CourseSession', on_delete=models.SET_NULL, null=True, related_name='next', blank=True)
video = models.FileField(upload_to=get_upload_path, null=True, blank=True)
attachment_files = GenericRelation('AttachmentFiles')
chats = GenericRelation(Chat)
likes = GenericRelation(Like)
bookmarks = GenericRelation(Bookmark)
comments = GenericRelation(Comment)
price = models.PositiveIntegerField(null=True, default=0)
def prev_session(self):
return CourseSession.objects.get(next_session=self)
@property
def has_price(self):
return boll(self.price)
def image(self):
return self.course.image
def save(self, *args, **kwargs):
self.slug = self.title.replace(' ', '-')
super().save(*args, **kwargs)
def get_absolute_url(self):
params = {'course_slug': self.course.slug, 'session_slug': self.slug}
return reverse('app-base:course-session-detail', kwargs=params)
def __str__(self):
return self.title
class CourseSessionExercise(models.Model):
slug = models.SlugField(null=True, allow_unicode=True, blank=True)
course_session = models.ForeignKey(CourseSession, on_delete=models.CASCADE, null=True)
title = models.CharField(max_length=100, null=True)
description = RichTextUploadingField(null=True)
aparat_video = models.TextField(null=True, blank=True)
attachment_files = GenericRelation('AttachmentFiles')
chats = GenericRelation(Chat)
likes = GenericRelation(Like)
bookmarks = GenericRelation(Bookmark)
comments = GenericRelation(Comment)
def image(self):
return self.course_session.course.image
def save(self, *args, **kwargs):
self.slug = self.title.replace(' ', '-')
super().save(*args, **kwargs)
def get_absolute_url(self):
params = {'course_slug': self.course_session.course.slug,
'session_slug': self.course_session.slug, 'exercise_slug': self.slug}
return reverse('app-base:course-session-exercise-detail', kwargs=params)
def user(self):
return [c.user for c in self.chats.all() if not c.user.is_superuser][0]
def __str__(self):
return self.title
class ExerciseByStudent(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
exercise = models.ForeignKey(CourseSessionExercise, on_delete=models.CASCADE, null=True)
# rate = models.PositiveSmallIntegerField(null=True)
done = models.BooleanField(default=True)
code = models.TextField(null=True, blank=True)
done_datetime = models.DateTimeField(auto_now_add=True, null=True)
def jd_done_datetime(self):
self.done_datetime = localtime(self.done_datetime)
jdatetime.set_locale('fa_IR')
jdatetime.datetime.now().strftime('%A %B')
jd_datetime = jdatetime.datetime.fromgregorian(
year=self.done_datetime.year,
month=self.done_datetime.month,
day=self.done_datetime.day,
hour=self.done_datetime.hour,
minute=self.done_datetime.minute,
second=self.done_datetime.second,
)
return jd_datetime.strftime('%A, %d %B %y %H:%M:%S')
def __str__(self):
return self.user.username + ' ' + self.exercise.title
class AttachmentFiles(models.Model):
file = models.FileField(upload_to='attach-files/%y-%m-%d_%H:%M')
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
@property
def title(self):
return self.file.url.split('/')[-1]
@property
def color(self):
colors = {
'ppt': 'orange',
'pptx': 'orange',
'doc': 'light-blue darken-3',
'docx': 'light-blue darken-3',
'csv': 'green',
'xlsx': 'green',
'xls': 'green',
'py': 'yellow',
'pdf': 'pink',
}
file_format = self.title.split('.')[-1]
return colors.setdefault(file_format, 'grey')
def __str__(self):
return self.content_object.title
| [
"1tapciran@gmail.com"
] | 1tapciran@gmail.com |
721db38f5608aae8294e9b5c455423a9532f1398 | 250db406ad4a62e3d576e55b979bcfdc3407f226 | /Leetcode分类/7. LinkedList/Leetcode_86_Partition List/my_solution.py | 4eb4526fd86b4a98a47fac363c9b88d4e4013760 | [] | no_license | chenshanghao/Interview_preparation | 0830f0e461a2fe287b8ec24ae761974f50268767 | 4e7701d32990604c16ba18a8083c2108c0232306 | refs/heads/master | 2020-04-25T02:36:19.499364 | 2019-06-10T04:51:00 | 2019-06-10T04:51:00 | 172,446,284 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | # Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def partition(self, head, x):
"""
:type head: ListNode
:type x: int
:rtype: ListNode
"""
lowerList = lowerTail = ListNode(-1)
higherList = higherTail = ListNode(-1)
while head:
tmp = head
print(tmp.val)
head = head.next
if tmp.val < x:
lowerTail.next = tmp
lowerTail = lowerTail.next
else:
higherTail.next = tmp
higherTail = higherTail.next
higherTail.next = None
lowerTail.next = higherList.next
return lowerList.next | [
"21551021@zju.edu.cn"
] | 21551021@zju.edu.cn |
73f01ba1318b8cf9b7f36cc8844270b1e79095d8 | 35fd40fbc4cfa46272c4031b9ca0cb88572e3fa4 | /xmonitor/tests/functional/db/base.py | dc3dd29373804c9b3a5cd2422cc7817988fff656 | [
"Apache-2.0"
] | permissive | froyobin/xmonitor | 3d662541387226a4ff1c18ef450fdc77a769d0b8 | 092dcaa01f834353ffd8dd3c40edf9e97543bfe8 | refs/heads/master | 2020-12-23T22:33:15.758127 | 2016-06-30T06:18:05 | 2016-06-30T06:18:05 | 62,284,213 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99,802 | py | # Copyright 2010-2012 OpenStack Foundation
# Copyright 2012 Justin Santa Barbara
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import uuid
import mock
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
from six.moves import reduce
from xmonitor.common import exception
from xmonitor.common import timeutils
from xmonitor import context
from xmonitor.tests import functional
import xmonitor.tests.functional.db as db_tests
from xmonitor.tests import utils as test_utils
# The default sort order of results is whatever sort key is specified,
# plus created_at and id for ties. When we're not specifying a sort_key,
# we get the default (created_at). Some tests below expect the fixtures to be
# returned in array-order, so if the created_at timestamps are the same,
# these tests rely on the UUID* values being in order
UUID1, UUID2, UUID3 = sorted([str(uuid.uuid4()) for x in range(3)])
def build_image_fixture(**kwargs):
default_datetime = timeutils.utcnow()
image = {
'id': str(uuid.uuid4()),
'name': 'fake image #2',
'status': 'active',
'disk_format': 'vhd',
'container_format': 'ovf',
'is_public': True,
'created_at': default_datetime,
'updated_at': default_datetime,
'deleted_at': None,
'deleted': False,
'checksum': None,
'min_disk': 5,
'min_ram': 256,
'size': 19,
'locations': [{'url': "file:///tmp/xmonitor-tests/2",
'metadata': {}, 'status': 'active'}],
'properties': {},
}
image.update(kwargs)
return image
def build_task_fixture(**kwargs):
default_datetime = timeutils.utcnow()
task = {
'id': str(uuid.uuid4()),
'type': 'import',
'status': 'pending',
'input': {'ping': 'pong'},
'owner': str(uuid.uuid4()),
'message': None,
'expires_at': None,
'created_at': default_datetime,
'updated_at': default_datetime,
}
task.update(kwargs)
return task
class FunctionalInitWrapper(functional.FunctionalTest):
def setUp(self):
super(FunctionalInitWrapper, self).setUp()
self.config(policy_file=self.policy_file, group='oslo_policy')
class TestDriver(test_utils.BaseTestCase):
def setUp(self):
super(TestDriver, self).setUp()
context_cls = context.RequestContext
self.adm_context = context_cls(is_admin=True,
auth_token='user:user:admin')
self.context = context_cls(is_admin=False,
auth_token='user:user:user')
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self.fixtures = self.build_image_fixtures()
self.create_images(self.fixtures)
def build_image_fixtures(self):
dt1 = timeutils.utcnow()
dt2 = dt1 + datetime.timedelta(microseconds=5)
fixtures = [
{
'id': UUID1,
'created_at': dt1,
'updated_at': dt1,
'properties': {'foo': 'bar', 'far': 'boo'},
'size': 13,
},
{
'id': UUID2,
'created_at': dt1,
'updated_at': dt2,
'size': 17,
},
{
'id': UUID3,
'created_at': dt2,
'updated_at': dt2,
},
]
return [build_image_fixture(**fixture) for fixture in fixtures]
def create_images(self, images):
for fixture in images:
self.db_api.image_create(self.adm_context, fixture)
class DriverTests(object):
def test_image_create_requires_status(self):
fixture = {'name': 'mark', 'size': 12}
self.assertRaises(exception.Invalid,
self.db_api.image_create, self.context, fixture)
fixture = {'name': 'mark', 'size': 12, 'status': 'queued'}
self.db_api.image_create(self.context, fixture)
@mock.patch.object(timeutils, 'utcnow')
def test_image_create_defaults(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime.utcnow()
create_time = timeutils.utcnow()
values = {'status': 'queued',
'created_at': create_time,
'updated_at': create_time}
image = self.db_api.image_create(self.context, values)
self.assertIsNone(image['name'])
self.assertIsNone(image['container_format'])
self.assertEqual(0, image['min_ram'])
self.assertEqual(0, image['min_disk'])
self.assertIsNone(image['owner'])
self.assertFalse(image['is_public'])
self.assertIsNone(image['size'])
self.assertIsNone(image['checksum'])
self.assertIsNone(image['disk_format'])
self.assertEqual([], image['locations'])
self.assertFalse(image['protected'])
self.assertFalse(image['deleted'])
self.assertIsNone(image['deleted_at'])
self.assertEqual([], image['properties'])
self.assertEqual(create_time, image['created_at'])
self.assertEqual(create_time, image['updated_at'])
# Image IDs aren't predictable, but they should be populated
self.assertTrue(uuid.UUID(image['id']))
# NOTE(bcwaldon): the tags attribute should not be returned as a part
# of a core image entity
self.assertNotIn('tags', image)
def test_image_create_duplicate_id(self):
self.assertRaises(exception.Duplicate,
self.db_api.image_create,
self.context, {'id': UUID1, 'status': 'queued'})
def test_image_create_with_locations(self):
locations = [{'url': 'a', 'metadata': {}, 'status': 'active'},
{'url': 'b', 'metadata': {}, 'status': 'active'}]
fixture = {'status': 'queued',
'locations': locations}
image = self.db_api.image_create(self.context, fixture)
actual = [{'url': l['url'], 'metadata': l['metadata'],
'status': l['status']}
for l in image['locations']]
self.assertEqual(locations, actual)
def test_image_create_without_locations(self):
locations = []
fixture = {'status': 'queued',
'locations': locations}
self.db_api.image_create(self.context, fixture)
def test_image_create_with_location_data(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': 'b', 'metadata': {},
'status': 'active'}]
fixture = {'status': 'queued', 'locations': location_data}
image = self.db_api.image_create(self.context, fixture)
actual = [{'url': l['url'], 'metadata': l['metadata'],
'status': l['status']}
for l in image['locations']]
self.assertEqual(location_data, actual)
def test_image_create_properties(self):
fixture = {'status': 'queued', 'properties': {'ping': 'pong'}}
image = self.db_api.image_create(self.context, fixture)
expected = [{'name': 'ping', 'value': 'pong'}]
actual = [{'name': p['name'], 'value': p['value']}
for p in image['properties']]
self.assertEqual(expected, actual)
def test_image_create_unknown_attributes(self):
fixture = {'ping': 'pong'}
self.assertRaises(exception.Invalid,
self.db_api.image_create, self.context, fixture)
def test_image_create_bad_name(self):
bad_name = u'A name with forbidden symbol \U0001f62a'
fixture = {'name': bad_name, 'size': 12, 'status': 'queued'}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
def test_image_create_bad_checksum(self):
# checksum should be no longer than 32 characters
bad_checksum = "42" * 42
fixture = {'checksum': bad_checksum}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
# if checksum is not longer than 32 characters but non-ascii ->
# still raise 400
fixture = {'checksum': u'\u042f' * 32}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
def test_image_create_bad_int_params(self):
int_too_long = 2 ** 31 + 42
for param in ['min_disk', 'min_ram']:
fixture = {param: int_too_long}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
def test_image_create_bad_property(self):
# bad value
fixture = {'status': 'queued',
'properties': {'bad': u'Bad \U0001f62a'}}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
# bad property names are also not allowed
fixture = {'status': 'queued', 'properties': {u'Bad \U0001f62a': 'ok'}}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
def test_image_create_bad_location(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': u'Bad \U0001f60a', 'metadata': {},
'status': 'active'}]
fixture = {'status': 'queued', 'locations': location_data}
self.assertRaises(exception.Invalid, self.db_api.image_create,
self.context, fixture)
def test_image_update_core_attribute(self):
fixture = {'status': 'queued'}
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
self.assertEqual('queued', image['status'])
self.assertNotEqual(image['created_at'], image['updated_at'])
def test_image_update_with_locations(self):
locations = [{'url': 'a', 'metadata': {}, 'status': 'active'},
{'url': 'b', 'metadata': {}, 'status': 'active'}]
fixture = {'locations': locations}
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
self.assertEqual(2, len(image['locations']))
self.assertIn('id', image['locations'][0])
self.assertIn('id', image['locations'][1])
image['locations'][0].pop('id')
image['locations'][1].pop('id')
self.assertEqual(locations, image['locations'])
def test_image_update_with_location_data(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': 'b', 'metadata': {}, 'status': 'active'}]
fixture = {'locations': location_data}
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
self.assertEqual(2, len(image['locations']))
self.assertIn('id', image['locations'][0])
self.assertIn('id', image['locations'][1])
image['locations'][0].pop('id')
image['locations'][1].pop('id')
self.assertEqual(location_data, image['locations'])
def test_image_update(self):
fixture = {'status': 'queued', 'properties': {'ping': 'pong'}}
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
expected = [{'name': 'ping', 'value': 'pong'}]
actual = [{'name': p['name'], 'value': p['value']}
for p in image['properties']]
self.assertEqual(expected, actual)
self.assertEqual('queued', image['status'])
self.assertNotEqual(image['created_at'], image['updated_at'])
def test_image_update_properties(self):
fixture = {'properties': {'ping': 'pong'}}
image = self.db_api.image_update(self.adm_context, UUID1, fixture)
expected = {'ping': 'pong', 'foo': 'bar', 'far': 'boo'}
actual = {p['name']: p['value'] for p in image['properties']}
self.assertEqual(expected, actual)
self.assertNotEqual(image['created_at'], image['updated_at'])
def test_image_update_purge_properties(self):
fixture = {'properties': {'ping': 'pong'}}
image = self.db_api.image_update(self.adm_context, UUID1,
fixture, purge_props=True)
properties = {p['name']: p for p in image['properties']}
# New properties are set
self.assertIn('ping', properties)
self.assertEqual('pong', properties['ping']['value'])
self.assertFalse(properties['ping']['deleted'])
# Original properties still show up, but with deleted=True
# TODO(markwash): db api should not return deleted properties
self.assertIn('foo', properties)
self.assertEqual('bar', properties['foo']['value'])
self.assertTrue(properties['foo']['deleted'])
def test_image_update_bad_name(self):
fixture = {'name': u'A new name with forbidden symbol \U0001f62a'}
self.assertRaises(exception.Invalid, self.db_api.image_update,
self.adm_context, UUID1, fixture)
def test_image_update_bad_property(self):
# bad value
fixture = {'status': 'queued',
'properties': {'bad': u'Bad \U0001f62a'}}
self.assertRaises(exception.Invalid, self.db_api.image_update,
self.adm_context, UUID1, fixture)
# bad property names are also not allowed
fixture = {'status': 'queued', 'properties': {u'Bad \U0001f62a': 'ok'}}
self.assertRaises(exception.Invalid, self.db_api.image_update,
self.adm_context, UUID1, fixture)
def test_image_update_bad_location(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': u'Bad \U0001f60a', 'metadata': {},
'status': 'active'}]
fixture = {'status': 'queued', 'locations': location_data}
self.assertRaises(exception.Invalid, self.db_api.image_update,
self.adm_context, UUID1, fixture)
def test_update_locations_direct(self):
"""
For some reasons update_locations can be called directly
(not via image_update), so better check that everything is ok if passed
4 byte unicode characters
"""
# update locations correctly first to retrieve existing location id
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'}]
fixture = {'locations': location_data}
image = self.db_api.image_update(self.adm_context, UUID1, fixture)
self.assertEqual(1, len(image['locations']))
self.assertIn('id', image['locations'][0])
loc_id = image['locations'][0].pop('id')
bad_location = {'url': u'Bad \U0001f60a', 'metadata': {},
'status': 'active', 'id': loc_id}
self.assertRaises(exception.Invalid,
self.db_api.image_location_update,
self.adm_context, UUID1, bad_location)
def test_image_property_delete(self):
fixture = {'name': 'ping', 'value': 'pong', 'image_id': UUID1}
prop = self.db_api.image_property_create(self.context, fixture)
prop = self.db_api.image_property_delete(self.context,
prop['name'], UUID1)
self.assertIsNotNone(prop['deleted_at'])
self.assertTrue(prop['deleted'])
def test_image_get(self):
image = self.db_api.image_get(self.context, UUID1)
self.assertEqual(self.fixtures[0]['id'], image['id'])
def test_image_get_disallow_deleted(self):
self.db_api.image_destroy(self.adm_context, UUID1)
self.assertRaises(exception.NotFound, self.db_api.image_get,
self.context, UUID1)
def test_image_get_allow_deleted(self):
self.db_api.image_destroy(self.adm_context, UUID1)
image = self.db_api.image_get(self.adm_context, UUID1)
self.assertEqual(self.fixtures[0]['id'], image['id'])
self.assertTrue(image['deleted'])
def test_image_get_force_allow_deleted(self):
self.db_api.image_destroy(self.adm_context, UUID1)
image = self.db_api.image_get(self.context, UUID1,
force_show_deleted=True)
self.assertEqual(self.fixtures[0]['id'], image['id'])
def test_image_get_not_owned(self):
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
ctxt2 = context.RequestContext(is_admin=False, tenant=TENANT2,
auth_token='user:%s:user' % TENANT2)
image = self.db_api.image_create(
ctxt1, {'status': 'queued', 'owner': TENANT1})
self.assertRaises(exception.Forbidden,
self.db_api.image_get, ctxt2, image['id'])
def test_image_get_not_found(self):
UUID = str(uuid.uuid4())
self.assertRaises(exception.NotFound,
self.db_api.image_get, self.context, UUID)
def test_image_get_all(self):
images = self.db_api.image_get_all(self.context)
self.assertEqual(3, len(images))
def test_image_get_all_with_filter(self):
images = self.db_api.image_get_all(self.context,
filters={
'id': self.fixtures[0]['id'],
})
self.assertEqual(1, len(images))
self.assertEqual(self.fixtures[0]['id'], images[0]['id'])
def test_image_get_all_with_filter_user_defined_property(self):
images = self.db_api.image_get_all(self.context,
filters={'foo': 'bar'})
self.assertEqual(1, len(images))
self.assertEqual(self.fixtures[0]['id'], images[0]['id'])
def test_image_get_all_with_filter_nonexistent_userdef_property(self):
images = self.db_api.image_get_all(self.context,
filters={'faz': 'boo'})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_userdef_prop_nonexistent_value(self):
images = self.db_api.image_get_all(self.context,
filters={'foo': 'baz'})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_multiple_user_defined_properties(self):
images = self.db_api.image_get_all(self.context,
filters={'foo': 'bar',
'far': 'boo'})
self.assertEqual(1, len(images))
self.assertEqual(images[0]['id'], self.fixtures[0]['id'])
def test_image_get_all_with_filter_nonexistent_user_defined_property(self):
images = self.db_api.image_get_all(self.context,
filters={'foo': 'bar',
'faz': 'boo'})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_user_deleted_property(self):
fixture = {'name': 'poo', 'value': 'bear', 'image_id': UUID1}
prop = self.db_api.image_property_create(self.context,
fixture)
images = self.db_api.image_get_all(self.context,
filters={
'properties': {'poo': 'bear'},
})
self.assertEqual(1, len(images))
self.db_api.image_property_delete(self.context,
prop['name'], images[0]['id'])
images = self.db_api.image_get_all(self.context,
filters={
'properties': {'poo': 'bear'},
})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_undefined_property(self):
images = self.db_api.image_get_all(self.context,
filters={'poo': 'bear'})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_comparative_created_at(self):
anchor = timeutils.isotime(self.fixtures[0]['created_at'])
time_expr = 'lt:' + anchor
images = self.db_api.image_get_all(self.context,
filters={'created_at': time_expr})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_comparative_updated_at(self):
anchor = timeutils.isotime(self.fixtures[0]['updated_at'])
time_expr = 'lt:' + anchor
images = self.db_api.image_get_all(self.context,
filters={'updated_at': time_expr})
self.assertEqual(0, len(images))
def test_filter_image_by_invalid_operator(self):
self.assertRaises(exception.InvalidFilterOperatorValue,
self.db_api.image_get_all,
self.context, filters={'status': 'lala:active'})
def test_image_get_all_with_filter_in_status(self):
images = self.db_api.image_get_all(self.context,
filters={'status': 'in:active'})
self.assertEqual(3, len(images))
def test_image_get_all_with_filter_in_name(self):
data = 'in:%s' % self.fixtures[0]['name']
images = self.db_api.image_get_all(self.context,
filters={'name': data})
self.assertEqual(3, len(images))
def test_image_get_all_with_filter_in_container_format(self):
images = self.db_api.image_get_all(self.context,
filters={'container_format':
'in:ami,bare,ovf'})
self.assertEqual(3, len(images))
def test_image_get_all_with_filter_in_disk_format(self):
images = self.db_api.image_get_all(self.context,
filters={'disk_format':
'in:vhd'})
self.assertEqual(3, len(images))
def test_image_get_all_with_filter_in_id(self):
data = 'in:%s,%s' % (UUID1, UUID2)
images = self.db_api.image_get_all(self.context,
filters={'id': data})
self.assertEqual(2, len(images))
def test_image_get_all_with_quotes(self):
fixture = {'name': 'fake\\\"name'}
self.db_api.image_update(self.adm_context, UUID3, fixture)
fixture = {'name': 'fake,name'}
self.db_api.image_update(self.adm_context, UUID2, fixture)
fixture = {'name': 'fakename'}
self.db_api.image_update(self.adm_context, UUID1, fixture)
data = 'in:\"fake\\\"name\",fakename,\"fake,name\"'
images = self.db_api.image_get_all(self.context,
filters={'name': data})
self.assertEqual(3, len(images))
def test_image_get_all_with_invalid_quotes(self):
invalid_expr = ['in:\"name', 'in:\"name\"name', 'in:name\"dd\"',
'in:na\"me', 'in:\"name\"\"name\"']
for expr in invalid_expr:
self.assertRaises(exception.InvalidParameterValue,
self.db_api.image_get_all,
self.context, filters={'name': expr})
def test_image_get_all_size_min_max(self):
images = self.db_api.image_get_all(self.context,
filters={
'size_min': 10,
'size_max': 15,
})
self.assertEqual(1, len(images))
self.assertEqual(self.fixtures[0]['id'], images[0]['id'])
def test_image_get_all_size_min(self):
images = self.db_api.image_get_all(self.context,
filters={'size_min': 15})
self.assertEqual(2, len(images))
self.assertEqual(self.fixtures[2]['id'], images[0]['id'])
self.assertEqual(self.fixtures[1]['id'], images[1]['id'])
def test_image_get_all_size_range(self):
images = self.db_api.image_get_all(self.context,
filters={'size_max': 15,
'size_min': 20})
self.assertEqual(0, len(images))
def test_image_get_all_size_max(self):
images = self.db_api.image_get_all(self.context,
filters={'size_max': 15})
self.assertEqual(1, len(images))
self.assertEqual(self.fixtures[0]['id'], images[0]['id'])
def test_image_get_all_with_filter_min_range_bad_value(self):
self.assertRaises(exception.InvalidFilterRangeValue,
self.db_api.image_get_all,
self.context, filters={'size_min': 'blah'})
def test_image_get_all_with_filter_max_range_bad_value(self):
self.assertRaises(exception.InvalidFilterRangeValue,
self.db_api.image_get_all,
self.context, filters={'size_max': 'blah'})
def test_image_get_all_marker(self):
images = self.db_api.image_get_all(self.context, marker=UUID3)
self.assertEqual(2, len(images))
def test_image_get_all_marker_with_size(self):
# Use sort_key=size to test BigInteger
images = self.db_api.image_get_all(self.context, sort_key=['size'],
marker=UUID3)
self.assertEqual(2, len(images))
self.assertEqual(17, images[0]['size'])
self.assertEqual(13, images[1]['size'])
def test_image_get_all_marker_deleted(self):
"""Cannot specify a deleted image as a marker."""
self.db_api.image_destroy(self.adm_context, UUID1)
filters = {'deleted': False}
self.assertRaises(exception.NotFound, self.db_api.image_get_all,
self.context, marker=UUID1, filters=filters)
def test_image_get_all_marker_deleted_showing_deleted_as_admin(self):
"""Specify a deleted image as a marker if showing deleted images."""
self.db_api.image_destroy(self.adm_context, UUID3)
images = self.db_api.image_get_all(self.adm_context, marker=UUID3)
# NOTE(bcwaldon): an admin should see all images (deleted or not)
self.assertEqual(2, len(images))
def test_image_get_all_marker_deleted_showing_deleted(self):
"""Specify a deleted image as a marker if showing deleted images.
A non-admin user has to explicitly ask for deleted
images, and should only see deleted images in the results
"""
self.db_api.image_destroy(self.adm_context, UUID3)
self.db_api.image_destroy(self.adm_context, UUID1)
filters = {'deleted': True}
images = self.db_api.image_get_all(self.context, marker=UUID3,
filters=filters)
self.assertEqual(1, len(images))
def test_image_get_all_marker_null_name_desc(self):
"""Check an image with name null is handled
Check an image with name null is handled
marker is specified and order is descending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'name': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['name'],
sort_dir=['desc'])
image_ids = [image['id'] for image in images]
expected = []
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_disk_format_desc(self):
"""Check an image with disk_format null is handled
Check an image with disk_format null is handled when
marker is specified and order is descending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'disk_format': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['disk_format'],
sort_dir=['desc'])
image_ids = [image['id'] for image in images]
expected = []
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_container_format_desc(self):
"""Check an image with container_format null is handled
Check an image with container_format null is handled when
marker is specified and order is descending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'container_format': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['container_format'],
sort_dir=['desc'])
image_ids = [image['id'] for image in images]
expected = []
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_name_asc(self):
"""Check an image with name null is handled
Check an image with name null is handled when
marker is specified and order is ascending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'name': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['name'],
sort_dir=['asc'])
image_ids = [image['id'] for image in images]
expected = [UUID3, UUID2, UUID1]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_disk_format_asc(self):
"""Check an image with disk_format null is handled
Check an image with disk_format null is handled when
marker is specified and order is ascending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'disk_format': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['disk_format'],
sort_dir=['asc'])
image_ids = [image['id'] for image in images]
expected = [UUID3, UUID2, UUID1]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_marker_null_container_format_asc(self):
"""Check an image with container_format null is handled
Check an image with container_format null is handled when
marker is specified and order is ascending
"""
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'container_format': None,
'owner': TENANT1})
images = self.db_api.image_get_all(ctxt1, marker=UUIDX,
sort_key=['container_format'],
sort_dir=['asc'])
image_ids = [image['id'] for image in images]
expected = [UUID3, UUID2, UUID1]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_limit(self):
images = self.db_api.image_get_all(self.context, limit=2)
self.assertEqual(2, len(images))
# A limit of None should not equate to zero
images = self.db_api.image_get_all(self.context, limit=None)
self.assertEqual(3, len(images))
# A limit of zero should actually mean zero
images = self.db_api.image_get_all(self.context, limit=0)
self.assertEqual(0, len(images))
def test_image_get_all_owned(self):
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False,
tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
image_meta_data = {'id': UUIDX, 'status': 'queued', 'owner': TENANT1}
self.db_api.image_create(ctxt1, image_meta_data)
TENANT2 = str(uuid.uuid4())
ctxt2 = context.RequestContext(is_admin=False,
tenant=TENANT2,
auth_token='user:%s:user' % TENANT2)
UUIDY = str(uuid.uuid4())
image_meta_data = {'id': UUIDY, 'status': 'queued', 'owner': TENANT2}
self.db_api.image_create(ctxt2, image_meta_data)
images = self.db_api.image_get_all(ctxt1)
image_ids = [image['id'] for image in images]
expected = [UUIDX, UUID3, UUID2, UUID1]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_owned_checksum(self):
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False,
tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
UUIDX = str(uuid.uuid4())
CHECKSUM1 = '91264c3edf5972c9f1cb309543d38a5c'
image_meta_data = {
'id': UUIDX,
'status': 'queued',
'checksum': CHECKSUM1,
'owner': TENANT1
}
self.db_api.image_create(ctxt1, image_meta_data)
image_member_data = {
'image_id': UUIDX,
'member': TENANT1,
'can_share': False,
"status": "accepted",
}
self.db_api.image_member_create(ctxt1, image_member_data)
TENANT2 = str(uuid.uuid4())
ctxt2 = context.RequestContext(is_admin=False,
tenant=TENANT2,
auth_token='user:%s:user' % TENANT2)
UUIDY = str(uuid.uuid4())
CHECKSUM2 = '92264c3edf5972c9f1cb309543d38a5c'
image_meta_data = {
'id': UUIDY,
'status': 'queued',
'checksum': CHECKSUM2,
'owner': TENANT2
}
self.db_api.image_create(ctxt2, image_meta_data)
image_member_data = {
'image_id': UUIDY,
'member': TENANT2,
'can_share': False,
"status": "accepted",
}
self.db_api.image_member_create(ctxt2, image_member_data)
filters = {'visibility': 'shared', 'checksum': CHECKSUM2}
images = self.db_api.image_get_all(ctxt2, filters)
self.assertEqual(1, len(images))
self.assertEqual(UUIDY, images[0]['id'])
def test_image_get_all_with_filter_tags(self):
self.db_api.image_tag_create(self.context, UUID1, 'x86')
self.db_api.image_tag_create(self.context, UUID1, '64bit')
self.db_api.image_tag_create(self.context, UUID2, 'power')
self.db_api.image_tag_create(self.context, UUID2, '64bit')
images = self.db_api.image_get_all(self.context,
filters={'tags': ['64bit']})
self.assertEqual(2, len(images))
image_ids = [image['id'] for image in images]
expected = [UUID1, UUID2]
self.assertEqual(sorted(expected), sorted(image_ids))
def test_image_get_all_with_filter_multi_tags(self):
self.db_api.image_tag_create(self.context, UUID1, 'x86')
self.db_api.image_tag_create(self.context, UUID1, '64bit')
self.db_api.image_tag_create(self.context, UUID2, 'power')
self.db_api.image_tag_create(self.context, UUID2, '64bit')
images = self.db_api.image_get_all(self.context,
filters={'tags': ['64bit', 'power']
})
self.assertEqual(1, len(images))
self.assertEqual(UUID2, images[0]['id'])
def test_image_get_all_with_filter_tags_and_nonexistent(self):
self.db_api.image_tag_create(self.context, UUID1, 'x86')
images = self.db_api.image_get_all(self.context,
filters={'tags': ['x86', 'fake']
})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_deleted_tags(self):
tag = self.db_api.image_tag_create(self.context, UUID1, 'AIX')
images = self.db_api.image_get_all(self.context,
filters={
'tags': [tag],
})
self.assertEqual(1, len(images))
self.db_api.image_tag_delete(self.context, UUID1, tag)
images = self.db_api.image_get_all(self.context,
filters={
'tags': [tag],
})
self.assertEqual(0, len(images))
def test_image_get_all_with_filter_undefined_tags(self):
images = self.db_api.image_get_all(self.context,
filters={'tags': ['fake']})
self.assertEqual(0, len(images))
def test_image_paginate(self):
"""Paginate through a list of images using limit and marker"""
now = timeutils.utcnow()
extra_uuids = [(str(uuid.uuid4()),
now + datetime.timedelta(seconds=i * 5))
for i in range(2)]
extra_images = [build_image_fixture(id=_id,
created_at=_dt,
updated_at=_dt)
for _id, _dt in extra_uuids]
self.create_images(extra_images)
# Reverse uuids to match default sort of created_at
extra_uuids.reverse()
page = self.db_api.image_get_all(self.context, limit=2)
self.assertEqual([i[0] for i in extra_uuids], [i['id'] for i in page])
last = page[-1]['id']
page = self.db_api.image_get_all(self.context, limit=2, marker=last)
self.assertEqual([UUID3, UUID2], [i['id'] for i in page])
page = self.db_api.image_get_all(self.context, limit=2, marker=UUID2)
self.assertEqual([UUID1], [i['id'] for i in page])
def test_image_get_all_invalid_sort_key(self):
self.assertRaises(exception.InvalidSortKey, self.db_api.image_get_all,
self.context, sort_key=['blah'])
def test_image_get_all_limit_marker(self):
images = self.db_api.image_get_all(self.context, limit=2)
self.assertEqual(2, len(images))
def test_image_get_all_with_tag_returning(self):
expected_tags = {UUID1: ['foo'], UUID2: ['bar'], UUID3: ['baz']}
self.db_api.image_tag_create(self.context, UUID1,
expected_tags[UUID1][0])
self.db_api.image_tag_create(self.context, UUID2,
expected_tags[UUID2][0])
self.db_api.image_tag_create(self.context, UUID3,
expected_tags[UUID3][0])
images = self.db_api.image_get_all(self.context, return_tag=True)
self.assertEqual(3, len(images))
for image in images:
self.assertIn('tags', image)
self.assertEqual(expected_tags[image['id']], image['tags'])
self.db_api.image_tag_delete(self.context, UUID1,
expected_tags[UUID1][0])
expected_tags[UUID1] = []
images = self.db_api.image_get_all(self.context, return_tag=True)
self.assertEqual(3, len(images))
for image in images:
self.assertIn('tags', image)
self.assertEqual(expected_tags[image['id']], image['tags'])
def test_image_destroy(self):
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': 'b', 'metadata': {},
'status': 'active'}]
fixture = {'status': 'queued', 'locations': location_data}
image = self.db_api.image_create(self.context, fixture)
IMG_ID = image['id']
fixture = {'name': 'ping', 'value': 'pong', 'image_id': IMG_ID}
prop = self.db_api.image_property_create(self.context, fixture)
TENANT2 = str(uuid.uuid4())
fixture = {'image_id': IMG_ID, 'member': TENANT2, 'can_share': False}
member = self.db_api.image_member_create(self.context, fixture)
self.db_api.image_tag_create(self.context, IMG_ID, 'snarf')
self.assertEqual(2, len(image['locations']))
self.assertIn('id', image['locations'][0])
self.assertIn('id', image['locations'][1])
image['locations'][0].pop('id')
image['locations'][1].pop('id')
self.assertEqual(location_data, image['locations'])
self.assertEqual(('ping', 'pong', IMG_ID, False),
(prop['name'], prop['value'],
prop['image_id'], prop['deleted']))
self.assertEqual((TENANT2, IMG_ID, False),
(member['member'], member['image_id'],
member['can_share']))
self.assertEqual(['snarf'],
self.db_api.image_tag_get_all(self.context, IMG_ID))
image = self.db_api.image_destroy(self.adm_context, IMG_ID)
self.assertTrue(image['deleted'])
self.assertTrue(image['deleted_at'])
self.assertRaises(exception.NotFound, self.db_api.image_get,
self.context, IMG_ID)
self.assertEqual([], image['locations'])
prop = image['properties'][0]
self.assertEqual(('ping', IMG_ID, True),
(prop['name'], prop['image_id'], prop['deleted']))
self.context.auth_token = 'user:%s:user' % TENANT2
members = self.db_api.image_member_find(self.context, IMG_ID)
self.assertEqual([], members)
tags = self.db_api.image_tag_get_all(self.context, IMG_ID)
self.assertEqual([], tags)
def test_image_destroy_with_delete_all(self):
"""Check the image child element's _image_delete_all methods.
checks if all the image_delete_all methods deletes only the child
elements of the image to be deleted.
"""
TENANT2 = str(uuid.uuid4())
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
'status': 'active'},
{'url': 'b', 'metadata': {}, 'status': 'active'}]
def _create_image_with_child_entries():
fixture = {'status': 'queued', 'locations': location_data}
image_id = self.db_api.image_create(self.context, fixture)['id']
fixture = {'name': 'ping', 'value': 'pong', 'image_id': image_id}
self.db_api.image_property_create(self.context, fixture)
fixture = {'image_id': image_id, 'member': TENANT2,
'can_share': False}
self.db_api.image_member_create(self.context, fixture)
self.db_api.image_tag_create(self.context, image_id, 'snarf')
return image_id
ACTIVE_IMG_ID = _create_image_with_child_entries()
DEL_IMG_ID = _create_image_with_child_entries()
deleted_image = self.db_api.image_destroy(self.adm_context, DEL_IMG_ID)
self.assertTrue(deleted_image['deleted'])
self.assertTrue(deleted_image['deleted_at'])
self.assertRaises(exception.NotFound, self.db_api.image_get,
self.context, DEL_IMG_ID)
active_image = self.db_api.image_get(self.context, ACTIVE_IMG_ID)
self.assertFalse(active_image['deleted'])
self.assertFalse(active_image['deleted_at'])
self.assertEqual(2, len(active_image['locations']))
self.assertIn('id', active_image['locations'][0])
self.assertIn('id', active_image['locations'][1])
active_image['locations'][0].pop('id')
active_image['locations'][1].pop('id')
self.assertEqual(location_data, active_image['locations'])
self.assertEqual(1, len(active_image['properties']))
prop = active_image['properties'][0]
self.assertEqual(('ping', 'pong', ACTIVE_IMG_ID),
(prop['name'], prop['value'],
prop['image_id']))
self.assertEqual((False, None),
(prop['deleted'], prop['deleted_at']))
self.context.auth_token = 'user:%s:user' % TENANT2
members = self.db_api.image_member_find(self.context, ACTIVE_IMG_ID)
self.assertEqual(1, len(members))
member = members[0]
self.assertEqual((TENANT2, ACTIVE_IMG_ID, False),
(member['member'], member['image_id'],
member['can_share']))
tags = self.db_api.image_tag_get_all(self.context, ACTIVE_IMG_ID)
self.assertEqual(['snarf'], tags)
def test_image_get_multiple_members(self):
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1,
owner_is_tenant=True)
ctxt2 = context.RequestContext(is_admin=False, user=TENANT2,
auth_token='user:%s:user' % TENANT2,
owner_is_tenant=False)
UUIDX = str(uuid.uuid4())
# We need private image and context.owner should not match image
# owner
self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'is_public': False,
'owner': TENANT1})
values = {'image_id': UUIDX, 'member': TENANT2, 'can_share': False}
self.db_api.image_member_create(ctxt1, values)
image = self.db_api.image_get(ctxt2, UUIDX)
self.assertEqual(UUIDX, image['id'])
# by default get_all displays only images with status 'accepted'
images = self.db_api.image_get_all(ctxt2)
self.assertEqual(3, len(images))
# filter by rejected
images = self.db_api.image_get_all(ctxt2, member_status='rejected')
self.assertEqual(3, len(images))
# filter by visibility
images = self.db_api.image_get_all(ctxt2,
filters={'visibility': 'shared'})
self.assertEqual(0, len(images))
# filter by visibility
images = self.db_api.image_get_all(ctxt2, member_status='pending',
filters={'visibility': 'shared'})
self.assertEqual(1, len(images))
# filter by visibility
images = self.db_api.image_get_all(ctxt2, member_status='all',
filters={'visibility': 'shared'})
self.assertEqual(1, len(images))
# filter by status pending
images = self.db_api.image_get_all(ctxt2, member_status='pending')
self.assertEqual(4, len(images))
# filter by status all
images = self.db_api.image_get_all(ctxt2, member_status='all')
self.assertEqual(4, len(images))
def test_is_image_visible(self):
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1,
auth_token='user:%s:user' % TENANT1,
owner_is_tenant=True)
ctxt2 = context.RequestContext(is_admin=False, user=TENANT2,
auth_token='user:%s:user' % TENANT2,
owner_is_tenant=False)
UUIDX = str(uuid.uuid4())
# We need private image and context.owner should not match image
# owner
image = self.db_api.image_create(ctxt1, {'id': UUIDX,
'status': 'queued',
'is_public': False,
'owner': TENANT1})
values = {'image_id': UUIDX, 'member': TENANT2, 'can_share': False}
self.db_api.image_member_create(ctxt1, values)
result = self.db_api.is_image_visible(ctxt2, image)
self.assertTrue(result)
# image should not be visible for a deleted member
members = self.db_api.image_member_find(ctxt1, image_id=UUIDX)
self.db_api.image_member_delete(ctxt1, members[0]['id'])
result = self.db_api.is_image_visible(ctxt2, image)
self.assertFalse(result)
def test_image_tag_create(self):
tag = self.db_api.image_tag_create(self.context, UUID1, 'snap')
self.assertEqual('snap', tag)
def test_image_tag_create_bad_value(self):
self.assertRaises(exception.Invalid,
self.db_api.image_tag_create, self.context,
UUID1, u'Bad \U0001f62a')
def test_image_tag_set_all(self):
tags = self.db_api.image_tag_get_all(self.context, UUID1)
self.assertEqual([], tags)
self.db_api.image_tag_set_all(self.context, UUID1, ['ping', 'pong'])
tags = self.db_api.image_tag_get_all(self.context, UUID1)
# NOTE(bcwaldon): tag ordering should match exactly what was provided
self.assertEqual(['ping', 'pong'], tags)
def test_image_tag_get_all(self):
self.db_api.image_tag_create(self.context, UUID1, 'snap')
self.db_api.image_tag_create(self.context, UUID1, 'snarf')
self.db_api.image_tag_create(self.context, UUID2, 'snarf')
# Check the tags for the first image
tags = self.db_api.image_tag_get_all(self.context, UUID1)
expected = ['snap', 'snarf']
self.assertEqual(expected, tags)
# Check the tags for the second image
tags = self.db_api.image_tag_get_all(self.context, UUID2)
expected = ['snarf']
self.assertEqual(expected, tags)
def test_image_tag_get_all_no_tags(self):
actual = self.db_api.image_tag_get_all(self.context, UUID1)
self.assertEqual([], actual)
def test_image_tag_get_all_non_existent_image(self):
bad_image_id = str(uuid.uuid4())
actual = self.db_api.image_tag_get_all(self.context, bad_image_id)
self.assertEqual([], actual)
def test_image_tag_delete(self):
self.db_api.image_tag_create(self.context, UUID1, 'snap')
self.db_api.image_tag_delete(self.context, UUID1, 'snap')
self.assertRaises(exception.NotFound, self.db_api.image_tag_delete,
self.context, UUID1, 'snap')
@mock.patch.object(timeutils, 'utcnow')
def test_image_member_create(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime.utcnow()
memberships = self.db_api.image_member_find(self.context)
self.assertEqual([], memberships)
TENANT1 = str(uuid.uuid4())
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
self.db_api.image_member_create(self.context,
{'member': TENANT1, 'image_id': UUID1})
memberships = self.db_api.image_member_find(self.context)
self.assertEqual(1, len(memberships))
actual = memberships[0]
self.assertIsNotNone(actual['created_at'])
self.assertIsNotNone(actual['updated_at'])
actual.pop('id')
actual.pop('created_at')
actual.pop('updated_at')
expected = {
'member': TENANT1,
'image_id': UUID1,
'can_share': False,
'status': 'pending',
'deleted': False,
}
self.assertEqual(expected, actual)
def test_image_member_update(self):
TENANT1 = str(uuid.uuid4())
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
member = self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
member_id = member.pop('id')
member.pop('created_at')
member.pop('updated_at')
expected = {'member': TENANT1,
'image_id': UUID1,
'status': 'pending',
'can_share': False,
'deleted': False}
self.assertEqual(expected, member)
member = self.db_api.image_member_update(self.context,
member_id,
{'can_share': True})
self.assertNotEqual(member['created_at'], member['updated_at'])
member.pop('id')
member.pop('created_at')
member.pop('updated_at')
expected = {'member': TENANT1,
'image_id': UUID1,
'status': 'pending',
'can_share': True,
'deleted': False}
self.assertEqual(expected, member)
members = self.db_api.image_member_find(self.context,
member=TENANT1,
image_id=UUID1)
member = members[0]
member.pop('id')
member.pop('created_at')
member.pop('updated_at')
self.assertEqual(expected, member)
def test_image_member_update_status(self):
TENANT1 = str(uuid.uuid4())
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
member = self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
member_id = member.pop('id')
member.pop('created_at')
member.pop('updated_at')
expected = {'member': TENANT1,
'image_id': UUID1,
'status': 'pending',
'can_share': False,
'deleted': False}
self.assertEqual(expected, member)
member = self.db_api.image_member_update(self.context,
member_id,
{'status': 'accepted'})
self.assertNotEqual(member['created_at'], member['updated_at'])
member.pop('id')
member.pop('created_at')
member.pop('updated_at')
expected = {'member': TENANT1,
'image_id': UUID1,
'status': 'accepted',
'can_share': False,
'deleted': False}
self.assertEqual(expected, member)
members = self.db_api.image_member_find(self.context,
member=TENANT1,
image_id=UUID1)
member = members[0]
member.pop('id')
member.pop('created_at')
member.pop('updated_at')
self.assertEqual(expected, member)
def test_image_member_find(self):
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
fixtures = [
{'member': TENANT1, 'image_id': UUID1},
{'member': TENANT1, 'image_id': UUID2, 'status': 'rejected'},
{'member': TENANT2, 'image_id': UUID1, 'status': 'accepted'},
]
for f in fixtures:
self.db_api.image_member_create(self.context, copy.deepcopy(f))
def _simplify(output):
return
def _assertMemberListMatch(list1, list2):
_simple = lambda x: set([(o['member'], o['image_id']) for o in x])
self.assertEqual(_simple(list1), _simple(list2))
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
output = self.db_api.image_member_find(self.context, member=TENANT1)
_assertMemberListMatch([fixtures[0], fixtures[1]], output)
output = self.db_api.image_member_find(self.adm_context,
image_id=UUID1)
_assertMemberListMatch([fixtures[0], fixtures[2]], output)
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT2
output = self.db_api.image_member_find(self.context,
member=TENANT2,
image_id=UUID1)
_assertMemberListMatch([fixtures[2]], output)
output = self.db_api.image_member_find(self.context,
status='accepted')
_assertMemberListMatch([fixtures[2]], output)
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
output = self.db_api.image_member_find(self.context,
status='rejected')
_assertMemberListMatch([fixtures[1]], output)
output = self.db_api.image_member_find(self.context,
status='pending')
_assertMemberListMatch([fixtures[0]], output)
output = self.db_api.image_member_find(self.context,
status='pending',
image_id=UUID2)
_assertMemberListMatch([], output)
image_id = str(uuid.uuid4())
output = self.db_api.image_member_find(self.context,
member=TENANT2,
image_id=image_id)
_assertMemberListMatch([], output)
def test_image_member_count(self):
TENANT1 = str(uuid.uuid4())
self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
actual = self.db_api.image_member_count(self.context, UUID1)
self.assertEqual(1, actual)
def test_image_member_count_invalid_image_id(self):
TENANT1 = str(uuid.uuid4())
self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
self.assertRaises(exception.Invalid, self.db_api.image_member_count,
self.context, None)
def test_image_member_count_empty_image_id(self):
TENANT1 = str(uuid.uuid4())
self.db_api.image_member_create(self.context,
{'member': TENANT1,
'image_id': UUID1})
self.assertRaises(exception.Invalid, self.db_api.image_member_count,
self.context, "")
def test_image_member_delete(self):
TENANT1 = str(uuid.uuid4())
# NOTE(flaper87): Update auth token, otherwise
# non visible members won't be returned.
self.context.auth_token = 'user:%s:user' % TENANT1
fixture = {'member': TENANT1, 'image_id': UUID1, 'can_share': True}
member = self.db_api.image_member_create(self.context, fixture)
self.assertEqual(1, len(self.db_api.image_member_find(self.context)))
member = self.db_api.image_member_delete(self.context, member['id'])
self.assertEqual(0, len(self.db_api.image_member_find(self.context)))
class DriverQuotaTests(test_utils.BaseTestCase):
def setUp(self):
super(DriverQuotaTests, self).setUp()
self.owner_id1 = str(uuid.uuid4())
self.context1 = context.RequestContext(
is_admin=False, user=self.owner_id1, tenant=self.owner_id1,
auth_token='%s:%s:user' % (self.owner_id1, self.owner_id1))
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
dt1 = timeutils.utcnow()
dt2 = dt1 + datetime.timedelta(microseconds=5)
fixtures = [
{
'id': UUID1,
'created_at': dt1,
'updated_at': dt1,
'size': 13,
'owner': self.owner_id1,
},
{
'id': UUID2,
'created_at': dt1,
'updated_at': dt2,
'size': 17,
'owner': self.owner_id1,
},
{
'id': UUID3,
'created_at': dt2,
'updated_at': dt2,
'size': 7,
'owner': self.owner_id1,
},
]
self.owner1_fixtures = [
build_image_fixture(**fixture) for fixture in fixtures]
for fixture in self.owner1_fixtures:
self.db_api.image_create(self.context1, fixture)
def test_storage_quota(self):
total = reduce(lambda x, y: x + y,
[f['size'] for f in self.owner1_fixtures])
x = self.db_api.user_get_storage_usage(self.context1, self.owner_id1)
self.assertEqual(total, x)
def test_storage_quota_without_image_id(self):
total = reduce(lambda x, y: x + y,
[f['size'] for f in self.owner1_fixtures])
total = total - self.owner1_fixtures[0]['size']
x = self.db_api.user_get_storage_usage(
self.context1, self.owner_id1,
image_id=self.owner1_fixtures[0]['id'])
self.assertEqual(total, x)
def test_storage_quota_multiple_locations(self):
dt1 = timeutils.utcnow()
sz = 53
new_fixture_dict = {'id': str(uuid.uuid4()), 'created_at': dt1,
'updated_at': dt1, 'size': sz,
'owner': self.owner_id1}
new_fixture = build_image_fixture(**new_fixture_dict)
new_fixture['locations'].append({'url': 'file:///some/path/file',
'metadata': {},
'status': 'active'})
self.db_api.image_create(self.context1, new_fixture)
total = reduce(lambda x, y: x + y,
[f['size'] for f in self.owner1_fixtures]) + (sz * 2)
x = self.db_api.user_get_storage_usage(self.context1, self.owner_id1)
self.assertEqual(total, x)
def test_storage_quota_deleted_image(self):
# NOTE(flaper87): This needs to be tested for
# soft deleted images as well. Currently there's no
# good way to delete locations.
dt1 = timeutils.utcnow()
sz = 53
image_id = str(uuid.uuid4())
new_fixture_dict = {'id': image_id, 'created_at': dt1,
'updated_at': dt1, 'size': sz,
'owner': self.owner_id1}
new_fixture = build_image_fixture(**new_fixture_dict)
new_fixture['locations'].append({'url': 'file:///some/path/file',
'metadata': {},
'status': 'active'})
self.db_api.image_create(self.context1, new_fixture)
total = reduce(lambda x, y: x + y,
[f['size'] for f in self.owner1_fixtures])
x = self.db_api.user_get_storage_usage(self.context1, self.owner_id1)
self.assertEqual(total + (sz * 2), x)
self.db_api.image_destroy(self.context1, image_id)
x = self.db_api.user_get_storage_usage(self.context1, self.owner_id1)
self.assertEqual(total, x)
class TaskTests(test_utils.BaseTestCase):
def setUp(self):
super(TaskTests, self).setUp()
self.owner_id = str(uuid.uuid4())
self.adm_context = context.RequestContext(is_admin=True,
auth_token='user:user:admin')
self.context = context.RequestContext(
is_admin=False, auth_token='user:user:user', user=self.owner_id)
self.db_api = db_tests.get_db(self.config)
self.fixtures = self.build_task_fixtures()
db_tests.reset_db(self.db_api)
def build_task_fixtures(self):
self.context.tenant = str(uuid.uuid4())
fixtures = [
{
'owner': self.context.owner,
'type': 'import',
'input': {'import_from': 'file:///a.img',
'import_from_format': 'qcow2',
'image_properties': {
"name": "GreatStack 1.22",
"tags": ["lamp", "custom"]
}},
},
{
'owner': self.context.owner,
'type': 'import',
'input': {'import_from': 'file:///b.img',
'import_from_format': 'qcow2',
'image_properties': {
"name": "GreatStack 1.23",
"tags": ["lamp", "good"]
}},
},
{
'owner': self.context.owner,
"type": "export",
"input": {
"export_uuid": "deadbeef-dead-dead-dead-beefbeefbeef",
"export_to":
"swift://cloud.foo/myaccount/mycontainer/path",
"export_format": "qcow2"
}
},
]
return [build_task_fixture(**fixture) for fixture in fixtures]
def test_task_get_all_with_filter(self):
for fixture in self.fixtures:
self.db_api.task_create(self.adm_context,
build_task_fixture(**fixture))
import_tasks = self.db_api.task_get_all(self.adm_context,
filters={'type': 'import'})
self.assertTrue(import_tasks)
self.assertEqual(2, len(import_tasks))
for task in import_tasks:
self.assertEqual('import', task['type'])
self.assertEqual(self.context.owner, task['owner'])
def test_task_get_all_as_admin(self):
tasks = []
for fixture in self.fixtures:
task = self.db_api.task_create(self.adm_context,
build_task_fixture(**fixture))
tasks.append(task)
import_tasks = self.db_api.task_get_all(self.adm_context)
self.assertTrue(import_tasks)
self.assertEqual(3, len(import_tasks))
def test_task_get_all_marker(self):
for fixture in self.fixtures:
self.db_api.task_create(self.adm_context,
build_task_fixture(**fixture))
tasks = self.db_api.task_get_all(self.adm_context, sort_key='id')
task_ids = [t['id'] for t in tasks]
tasks = self.db_api.task_get_all(self.adm_context, sort_key='id',
marker=task_ids[0])
self.assertEqual(2, len(tasks))
def test_task_get_all_limit(self):
for fixture in self.fixtures:
self.db_api.task_create(self.adm_context,
build_task_fixture(**fixture))
tasks = self.db_api.task_get_all(self.adm_context, limit=2)
self.assertEqual(2, len(tasks))
# A limit of None should not equate to zero
tasks = self.db_api.task_get_all(self.adm_context, limit=None)
self.assertEqual(3, len(tasks))
# A limit of zero should actually mean zero
tasks = self.db_api.task_get_all(self.adm_context, limit=0)
self.assertEqual(0, len(tasks))
def test_task_get_all_owned(self):
TENANT1 = str(uuid.uuid4())
ctxt1 = context.RequestContext(is_admin=False,
tenant=TENANT1,
auth_token='user:%s:user' % TENANT1)
task_values = {'type': 'import', 'status': 'pending',
'input': '{"loc": "fake"}', 'owner': TENANT1}
self.db_api.task_create(ctxt1, task_values)
TENANT2 = str(uuid.uuid4())
ctxt2 = context.RequestContext(is_admin=False,
tenant=TENANT2,
auth_token='user:%s:user' % TENANT2)
task_values = {'type': 'export', 'status': 'pending',
'input': '{"loc": "fake"}', 'owner': TENANT2}
self.db_api.task_create(ctxt2, task_values)
tasks = self.db_api.task_get_all(ctxt1)
task_owners = set([task['owner'] for task in tasks])
expected = set([TENANT1])
self.assertEqual(sorted(expected), sorted(task_owners))
def test_task_get(self):
expires_at = timeutils.utcnow()
image_id = str(uuid.uuid4())
fixture = {
'owner': self.context.owner,
'type': 'import',
'status': 'pending',
'input': '{"loc": "fake"}',
'result': "{'image_id': %s}" % image_id,
'message': 'blah',
'expires_at': expires_at
}
task = self.db_api.task_create(self.adm_context, fixture)
self.assertIsNotNone(task)
self.assertIsNotNone(task['id'])
task_id = task['id']
task = self.db_api.task_get(self.adm_context, task_id)
self.assertIsNotNone(task)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('import', task['type'])
self.assertEqual('pending', task['status'])
self.assertEqual(fixture['input'], task['input'])
self.assertEqual(fixture['result'], task['result'])
self.assertEqual(fixture['message'], task['message'])
self.assertEqual(expires_at, task['expires_at'])
def test_task_get_all(self):
now = timeutils.utcnow()
image_id = str(uuid.uuid4())
fixture1 = {
'owner': self.context.owner,
'type': 'import',
'status': 'pending',
'input': '{"loc": "fake_1"}',
'result': "{'image_id': %s}" % image_id,
'message': 'blah_1',
'expires_at': now,
'created_at': now,
'updated_at': now
}
fixture2 = {
'owner': self.context.owner,
'type': 'import',
'status': 'pending',
'input': '{"loc": "fake_2"}',
'result': "{'image_id': %s}" % image_id,
'message': 'blah_2',
'expires_at': now,
'created_at': now,
'updated_at': now
}
task1 = self.db_api.task_create(self.adm_context, fixture1)
task2 = self.db_api.task_create(self.adm_context, fixture2)
self.assertIsNotNone(task1)
self.assertIsNotNone(task2)
task1_id = task1['id']
task2_id = task2['id']
task_fixtures = {task1_id: fixture1, task2_id: fixture2}
tasks = self.db_api.task_get_all(self.adm_context)
self.assertEqual(2, len(tasks))
self.assertEqual(set((tasks[0]['id'], tasks[1]['id'])),
set((task1_id, task2_id)))
for task in tasks:
fixture = task_fixtures[task['id']]
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual(fixture['type'], task['type'])
self.assertEqual(fixture['status'], task['status'])
self.assertEqual(fixture['expires_at'], task['expires_at'])
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
self.assertEqual(fixture['created_at'], task['created_at'])
self.assertEqual(fixture['updated_at'], task['updated_at'])
task_details_keys = ['input', 'message', 'result']
for key in task_details_keys:
self.assertNotIn(key, task)
def test_task_create(self):
task_id = str(uuid.uuid4())
self.context.tenant = self.context.owner
values = {
'id': task_id,
'owner': self.context.owner,
'type': 'export',
'status': 'pending',
}
task_values = build_task_fixture(**values)
task = self.db_api.task_create(self.adm_context, task_values)
self.assertIsNotNone(task)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('export', task['type'])
self.assertEqual('pending', task['status'])
self.assertEqual({'ping': 'pong'}, task['input'])
def test_task_create_with_all_task_info_null(self):
task_id = str(uuid.uuid4())
self.context.tenant = str(uuid.uuid4())
values = {
'id': task_id,
'owner': self.context.owner,
'type': 'export',
'status': 'pending',
'input': None,
'result': None,
'message': None,
}
task_values = build_task_fixture(**values)
task = self.db_api.task_create(self.adm_context, task_values)
self.assertIsNotNone(task)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('export', task['type'])
self.assertEqual('pending', task['status'])
self.assertIsNone(task['input'])
self.assertIsNone(task['result'])
self.assertIsNone(task['message'])
def test_task_update(self):
self.context.tenant = str(uuid.uuid4())
result = {'foo': 'bar'}
task_values = build_task_fixture(owner=self.context.owner,
result=result)
task = self.db_api.task_create(self.adm_context, task_values)
task_id = task['id']
fixture = {
'status': 'processing',
'message': 'This is a error string',
}
task = self.db_api.task_update(self.adm_context, task_id, fixture)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('import', task['type'])
self.assertEqual('processing', task['status'])
self.assertEqual({'ping': 'pong'}, task['input'])
self.assertEqual(result, task['result'])
self.assertEqual('This is a error string', task['message'])
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
self.assertIsNone(task['expires_at'])
self.assertEqual(task_values['created_at'], task['created_at'])
self.assertGreater(task['updated_at'], task['created_at'])
def test_task_update_with_all_task_info_null(self):
self.context.tenant = str(uuid.uuid4())
task_values = build_task_fixture(owner=self.context.owner,
input=None,
result=None,
message=None)
task = self.db_api.task_create(self.adm_context, task_values)
task_id = task['id']
fixture = {'status': 'processing'}
task = self.db_api.task_update(self.adm_context, task_id, fixture)
self.assertEqual(task_id, task['id'])
self.assertEqual(self.context.owner, task['owner'])
self.assertEqual('import', task['type'])
self.assertEqual('processing', task['status'])
self.assertIsNone(task['input'])
self.assertIsNone(task['result'])
self.assertIsNone(task['message'])
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
self.assertIsNone(task['expires_at'])
self.assertEqual(task_values['created_at'], task['created_at'])
self.assertGreater(task['updated_at'], task['created_at'])
def test_task_delete(self):
task_values = build_task_fixture(owner=self.context.owner)
task = self.db_api.task_create(self.adm_context, task_values)
self.assertIsNotNone(task)
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
task_id = task['id']
self.db_api.task_delete(self.adm_context, task_id)
self.assertRaises(exception.TaskNotFound, self.db_api.task_get,
self.context, task_id)
def test_task_delete_as_admin(self):
task_values = build_task_fixture(owner=self.context.owner)
task = self.db_api.task_create(self.adm_context, task_values)
self.assertIsNotNone(task)
self.assertFalse(task['deleted'])
self.assertIsNone(task['deleted_at'])
task_id = task['id']
self.db_api.task_delete(self.adm_context, task_id)
del_task = self.db_api.task_get(self.adm_context,
task_id,
force_show_deleted=True)
self.assertIsNotNone(del_task)
self.assertEqual(task_id, del_task['id'])
self.assertTrue(del_task['deleted'])
self.assertIsNotNone(del_task['deleted_at'])
class DBPurgeTests(test_utils.BaseTestCase):
def setUp(self):
super(DBPurgeTests, self).setUp()
self.adm_context = context.get_admin_context(show_deleted=True)
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self.image_fixtures, self.task_fixtures = self.build_fixtures()
self.create_tasks(self.task_fixtures)
self.create_images(self.image_fixtures)
def build_fixtures(self):
dt1 = timeutils.utcnow() - datetime.timedelta(days=5)
dt2 = dt1 + datetime.timedelta(days=1)
dt3 = dt2 + datetime.timedelta(days=1)
fixtures = [
{
'created_at': dt1,
'updated_at': dt1,
'deleted_at': dt3,
'deleted': True,
},
{
'created_at': dt1,
'updated_at': dt2,
'deleted_at': timeutils.utcnow(),
'deleted': True,
},
{
'created_at': dt2,
'updated_at': dt2,
'deleted_at': None,
'deleted': False,
},
]
return (
[build_image_fixture(**fixture) for fixture in fixtures],
[build_task_fixture(**fixture) for fixture in fixtures],
)
def create_images(self, images):
for fixture in images:
self.db_api.image_create(self.adm_context, fixture)
def create_tasks(self, tasks):
for fixture in tasks:
self.db_api.task_create(self.adm_context, fixture)
def test_db_purge(self):
self.db_api.purge_deleted_rows(self.adm_context, 1, 5)
images = self.db_api.image_get_all(self.adm_context)
self.assertEqual(len(images), 2)
tasks = self.db_api.task_get_all(self.adm_context)
self.assertEqual(len(tasks), 2)
class TestVisibility(test_utils.BaseTestCase):
def setUp(self):
super(TestVisibility, self).setUp()
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self.setup_tenants()
self.setup_contexts()
self.fixtures = self.build_image_fixtures()
self.create_images(self.fixtures)
def setup_tenants(self):
self.admin_tenant = str(uuid.uuid4())
self.tenant1 = str(uuid.uuid4())
self.tenant2 = str(uuid.uuid4())
def setup_contexts(self):
self.admin_context = context.RequestContext(
is_admin=True, tenant=self.admin_tenant)
self.admin_none_context = context.RequestContext(
is_admin=True, tenant=None)
self.tenant1_context = context.RequestContext(tenant=self.tenant1)
self.tenant2_context = context.RequestContext(tenant=self.tenant2)
self.none_context = context.RequestContext(tenant=None)
def build_image_fixtures(self):
fixtures = []
owners = {
'Unowned': None,
'Admin Tenant': self.admin_tenant,
'Tenant 1': self.tenant1,
'Tenant 2': self.tenant2,
}
visibilities = {'public': True, 'private': False}
for owner_label, owner in owners.items():
for visibility, is_public in visibilities.items():
fixture = {
'name': '%s, %s' % (owner_label, visibility),
'owner': owner,
'is_public': is_public,
}
fixtures.append(fixture)
return [build_image_fixture(**f) for f in fixtures]
def create_images(self, images):
for fixture in images:
self.db_api.image_create(self.admin_context, fixture)
class VisibilityTests(object):
def test_unknown_admin_sees_all(self):
images = self.db_api.image_get_all(self.admin_none_context)
self.assertEqual(8, len(images))
def test_unknown_admin_is_public_true(self):
images = self.db_api.image_get_all(self.admin_none_context,
is_public=True)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_admin_is_public_false(self):
images = self.db_api.image_get_all(self.admin_none_context,
is_public=False)
self.assertEqual(4, len(images))
for i in images:
self.assertFalse(i['is_public'])
def test_unknown_admin_is_public_none(self):
images = self.db_api.image_get_all(self.admin_none_context)
self.assertEqual(8, len(images))
def test_unknown_admin_visibility_public(self):
images = self.db_api.image_get_all(self.admin_none_context,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_admin_visibility_private(self):
images = self.db_api.image_get_all(self.admin_none_context,
filters={'visibility': 'private'})
self.assertEqual(4, len(images))
for i in images:
self.assertFalse(i['is_public'])
def test_known_admin_sees_all(self):
images = self.db_api.image_get_all(self.admin_context)
self.assertEqual(8, len(images))
def test_known_admin_is_public_true(self):
images = self.db_api.image_get_all(self.admin_context, is_public=True)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_known_admin_is_public_false(self):
images = self.db_api.image_get_all(self.admin_context,
is_public=False)
self.assertEqual(4, len(images))
for i in images:
self.assertFalse(i['is_public'])
def test_known_admin_is_public_none(self):
images = self.db_api.image_get_all(self.admin_context)
self.assertEqual(8, len(images))
def test_admin_as_user_true(self):
images = self.db_api.image_get_all(self.admin_context,
admin_as_user=True)
self.assertEqual(5, len(images))
for i in images:
self.assertTrue(i['is_public'] or i['owner'] == self.admin_tenant)
def test_known_admin_visibility_public(self):
images = self.db_api.image_get_all(self.admin_context,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_known_admin_visibility_private(self):
images = self.db_api.image_get_all(self.admin_context,
filters={'visibility': 'private'})
self.assertEqual(4, len(images))
for i in images:
self.assertFalse(i['is_public'])
def test_what_unknown_user_sees(self):
images = self.db_api.image_get_all(self.none_context)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_user_is_public_true(self):
images = self.db_api.image_get_all(self.none_context, is_public=True)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_user_is_public_false(self):
images = self.db_api.image_get_all(self.none_context, is_public=False)
self.assertEqual(0, len(images))
def test_unknown_user_is_public_none(self):
images = self.db_api.image_get_all(self.none_context)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_user_visibility_public(self):
images = self.db_api.image_get_all(self.none_context,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_unknown_user_visibility_private(self):
images = self.db_api.image_get_all(self.none_context,
filters={'visibility': 'private'})
self.assertEqual(0, len(images))
def test_what_tenant1_sees(self):
images = self.db_api.image_get_all(self.tenant1_context)
self.assertEqual(5, len(images))
for i in images:
if not i['is_public']:
self.assertEqual(i['owner'], self.tenant1)
def test_tenant1_is_public_true(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=True)
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_tenant1_is_public_false(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=False)
self.assertEqual(1, len(images))
self.assertFalse(images[0]['is_public'])
self.assertEqual(images[0]['owner'], self.tenant1)
def test_tenant1_is_public_none(self):
images = self.db_api.image_get_all(self.tenant1_context)
self.assertEqual(5, len(images))
for i in images:
if not i['is_public']:
self.assertEqual(self.tenant1, i['owner'])
def test_tenant1_visibility_public(self):
images = self.db_api.image_get_all(self.tenant1_context,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
for i in images:
self.assertTrue(i['is_public'])
def test_tenant1_visibility_private(self):
images = self.db_api.image_get_all(self.tenant1_context,
filters={'visibility': 'private'})
self.assertEqual(1, len(images))
self.assertFalse(images[0]['is_public'])
self.assertEqual(self.tenant1, images[0]['owner'])
def _setup_is_public_red_herring(self):
values = {
'name': 'Red Herring',
'owner': self.tenant1,
'is_public': False,
'properties': {'is_public': 'silly'}
}
fixture = build_image_fixture(**values)
self.db_api.image_create(self.admin_context, fixture)
def test_is_public_is_a_normal_filter_for_admin(self):
self._setup_is_public_red_herring()
images = self.db_api.image_get_all(self.admin_context,
filters={'is_public': 'silly'})
self.assertEqual(1, len(images))
self.assertEqual('Red Herring', images[0]['name'])
def test_is_public_is_a_normal_filter_for_user(self):
self._setup_is_public_red_herring()
images = self.db_api.image_get_all(self.tenant1_context,
filters={'is_public': 'silly'})
self.assertEqual(1, len(images))
self.assertEqual('Red Herring', images[0]['name'])
# NOTE(markwash): the following tests are sanity checks to make sure
# visibility filtering and is_public=(True|False) do not interact in
# unexpected ways. However, using both of the filtering techniques
# simultaneously is not an anticipated use case.
def test_admin_is_public_true_and_visibility_public(self):
images = self.db_api.image_get_all(self.admin_context, is_public=True,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
def test_admin_is_public_false_and_visibility_public(self):
images = self.db_api.image_get_all(self.admin_context, is_public=False,
filters={'visibility': 'public'})
self.assertEqual(0, len(images))
def test_admin_is_public_true_and_visibility_private(self):
images = self.db_api.image_get_all(self.admin_context, is_public=True,
filters={'visibility': 'private'})
self.assertEqual(0, len(images))
def test_admin_is_public_false_and_visibility_private(self):
images = self.db_api.image_get_all(self.admin_context, is_public=False,
filters={'visibility': 'private'})
self.assertEqual(4, len(images))
def test_tenant1_is_public_true_and_visibility_public(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=True,
filters={'visibility': 'public'})
self.assertEqual(4, len(images))
def test_tenant1_is_public_false_and_visibility_public(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=False,
filters={'visibility': 'public'})
self.assertEqual(0, len(images))
def test_tenant1_is_public_true_and_visibility_private(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=True,
filters={'visibility': 'private'})
self.assertEqual(0, len(images))
def test_tenant1_is_public_false_and_visibility_private(self):
images = self.db_api.image_get_all(self.tenant1_context,
is_public=False,
filters={'visibility': 'private'})
self.assertEqual(1, len(images))
class TestMembershipVisibility(test_utils.BaseTestCase):
def setUp(self):
super(TestMembershipVisibility, self).setUp()
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self._create_contexts()
self._create_images()
def _create_contexts(self):
self.owner1, self.owner1_ctx = self._user_fixture()
self.owner2, self.owner2_ctx = self._user_fixture()
self.tenant1, self.user1_ctx = self._user_fixture()
self.tenant2, self.user2_ctx = self._user_fixture()
self.tenant3, self.user3_ctx = self._user_fixture()
self.admin_tenant, self.admin_ctx = self._user_fixture(admin=True)
def _user_fixture(self, admin=False):
tenant_id = str(uuid.uuid4())
ctx = context.RequestContext(tenant=tenant_id, is_admin=admin)
return tenant_id, ctx
def _create_images(self):
self.image_ids = {}
for owner in [self.owner1, self.owner2]:
self._create_image('not_shared', owner)
self._create_image('shared-with-1', owner, members=[self.tenant1])
self._create_image('shared-with-2', owner, members=[self.tenant2])
self._create_image('shared-with-both', owner,
members=[self.tenant1, self.tenant2])
def _create_image(self, name, owner, members=None):
image = build_image_fixture(name=name, owner=owner, is_public=False)
self.image_ids[(owner, name)] = image['id']
self.db_api.image_create(self.admin_ctx, image)
for member in members or []:
member = {'image_id': image['id'], 'member': member}
self.db_api.image_member_create(self.admin_ctx, member)
class MembershipVisibilityTests(object):
def _check_by_member(self, ctx, member_id, expected):
members = self.db_api.image_member_find(ctx, member=member_id)
images = [self.db_api.image_get(self.admin_ctx, member['image_id'])
for member in members]
facets = [(image['owner'], image['name']) for image in images]
self.assertEqual(set(expected), set(facets))
def test_owner1_finding_user1_memberships(self):
"""Owner1 should see images it owns that are shared with User1."""
expected = [
(self.owner1, 'shared-with-1'),
(self.owner1, 'shared-with-both'),
]
self._check_by_member(self.owner1_ctx, self.tenant1, expected)
def test_user1_finding_user1_memberships(self):
"""User1 should see all images shared with User1 """
expected = [
(self.owner1, 'shared-with-1'),
(self.owner1, 'shared-with-both'),
(self.owner2, 'shared-with-1'),
(self.owner2, 'shared-with-both'),
]
self._check_by_member(self.user1_ctx, self.tenant1, expected)
def test_user2_finding_user1_memberships(self):
"""User2 should see no images shared with User1 """
expected = []
self._check_by_member(self.user2_ctx, self.tenant1, expected)
def test_admin_finding_user1_memberships(self):
"""Admin should see all images shared with User1 """
expected = [
(self.owner1, 'shared-with-1'),
(self.owner1, 'shared-with-both'),
(self.owner2, 'shared-with-1'),
(self.owner2, 'shared-with-both'),
]
self._check_by_member(self.admin_ctx, self.tenant1, expected)
def _check_by_image(self, context, image_id, expected):
members = self.db_api.image_member_find(context, image_id=image_id)
member_ids = [member['member'] for member in members]
self.assertEqual(set(expected), set(member_ids))
def test_owner1_finding_owner1s_image_members(self):
"""Owner1 should see all memberships of its image """
expected = [self.tenant1, self.tenant2]
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.owner1_ctx, image_id, expected)
def test_admin_finding_owner1s_image_members(self):
"""Admin should see all memberships of owner1's image """
expected = [self.tenant1, self.tenant2]
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.admin_ctx, image_id, expected)
def test_user1_finding_owner1s_image_members(self):
"""User1 should see its own membership of owner1's image """
expected = [self.tenant1]
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.user1_ctx, image_id, expected)
def test_user2_finding_owner1s_image_members(self):
"""User2 should see its own membership of owner1's image """
expected = [self.tenant2]
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.user2_ctx, image_id, expected)
def test_user3_finding_owner1s_image_members(self):
"""User3 should see no memberships of owner1's image """
expected = []
image_id = self.image_ids[(self.owner1, 'shared-with-both')]
self._check_by_image(self.user3_ctx, image_id, expected)
| [
"froyo.bin@gmail.com"
] | froyo.bin@gmail.com |
e2966c2a118b6034281b5b1b5ed6e51dfc5c0cf6 | a62fad21b7d00360e08a4c2666ced6e0a938d772 | /blood_finder_api/blood_finder_api/asgi.py | 56c9802576900480eb5889e63e5f485e18ea4841 | [] | no_license | NumanIbnMazid/blood-finder | 46153efc5094601d628c16f685fb3d4a68e259ac | 95ace66f7e9a5460389940cfc6341cfb218b7148 | refs/heads/master | 2023-07-29T16:47:30.723456 | 2021-09-13T11:23:00 | 2021-09-13T11:23:00 | 405,943,320 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 680 | py | """
ASGI config for blood_finder_api project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
# os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blood_finder_api.settings')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blood_finder_api.settings.development')
# os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blood_finder_api.settings.pythonanywhere')
# os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blood_finder_api.settings.heroku')
application = get_asgi_application()
| [
"numanibnmazid@gmail.com"
] | numanibnmazid@gmail.com |
d17f43149aafd5d87d8b4ef49f3a0806a3ccffcc | 04b2e1c38fc64b8fd1020dfd5232eb3034f6cc1a | /ui/LogPlotSwitcher.py | 4d37292d20ada961aa6c953beeab4ff74c2930c8 | [
"Apache-2.0"
] | permissive | Kevin2599/GRIPy | faac3c0ffb98fc26094349bcc40c3522bd53b76b | 9cd79dded7f57b52515410a9bcb0a8cead48bfdd | refs/heads/master | 2020-04-13T17:27:51.655974 | 2018-08-29T20:39:09 | 2018-08-29T20:39:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,939 | py | # -*- coding: utf-8 -*-
import wx
from wx.combo import BitmapComboBox
from collections import OrderedDict
from om.Manager import ObjectManager
class Dialog(wx.Dialog):
def __init__(self, parent, colors, color_names, i_color, welluid, lims, loguid, *args, **kwargs):
if 'on_ok_callback' in kwargs:
self.on_ok_callback = kwargs.pop('on_ok_callback')
else:
self.on_ok_callback = None
if 'on_cancel_callback' in kwargs:
self.on_cancel_callback = kwargs.pop('on_cancel_callback')
else:
self.on_cancel_callback = None
super(Dialog, self).__init__(parent, *args, **kwargs)
self._OM = ObjectManager(self)
self.cur_loguid = loguid
self.lims = OrderedDict()
for uid, lim in lims.items():
self.lims[uid] = [str(a) for a in lim]
button_sizer = self.CreateButtonSizer(wx.OK | wx.CANCEL)
self.Bind(wx.EVT_BUTTON, self.on_button)
self.SetTitle(u"Alterar Perfil")
fgs = wx.FlexGridSizer(3, 2, 4, 4)
color_label = wx.StaticText(self, label="Cor: ")
log_label = wx.StaticText(self, label="Perfil: ")
lim_label = wx.StaticText(self, label="Limites: ")
self.color_box = BitmapComboBox(self, style=wx.CB_READONLY)
for c, cn in zip(colors, color_names):
self.color_box.Append(cn, wx.EmptyBitmapRGBA(32, 2, c[0], c[1],
c[2], 255))
self.log_box = wx.Choice(self)
self.log_box.AppendItems([log.name for log in self._OM.list('log', welluid)])
self.loguidmap = [log.uid for log in self._OM.list('log', welluid)]
self.log_box.Bind(wx.EVT_CHOICE, self.on_log_select)
lim_sizer = wx.BoxSizer(wx.HORIZONTAL)
self.lim1_ctrl = wx.TextCtrl(self, style=wx.TE_RIGHT)
lim_sizer.Add(self.lim1_ctrl, 1, wx.EXPAND)
self.lim2_ctrl = wx.TextCtrl(self, style=wx.TE_RIGHT)
lim_sizer.Add(self.lim2_ctrl, 1, wx.EXPAND)
fgs.AddMany([(color_label), (self.color_box, 1, wx.EXPAND),
(log_label), (self.log_box, 1, wx.EXPAND),
(lim_label), (lim_sizer, 1, wx.EXPAND)])
fgs.AddGrowableCol(1, 1)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(fgs, flag=wx.ALL | wx.EXPAND, border=8)
vbox.Add(button_sizer, flag=wx.ALIGN_RIGHT, border=8)
if i_color is not None:
self.color_box.SetSelection(i_color)
if loguid is not None:
idx = self.loguidmap.index(loguid)
self.log_box.SetSelection(idx)
self.lim1_ctrl.SetValue(self.lims[loguid][0])
self.lim2_ctrl.SetValue(self.lims[loguid][1])
self.SetSizerAndFit(vbox)
def on_log_select(self, event):
idx = event.GetSelection()
loguid = self.loguidmap[idx]
if loguid != self.cur_loguid:
l1 = self.lim1_ctrl.GetValue()
l2 = self.lim2_ctrl.GetValue()
if self.cur_loguid is not None:
self.lims[self.cur_loguid] = [l1, l2]
self.lim1_ctrl.SetValue(self.lims[loguid][0])
self.lim2_ctrl.SetValue(self.lims[loguid][1])
self.cur_loguid = loguid
event.Skip(True)
def on_button(self, event):
evt_id = event.GetId()
if evt_id == wx.ID_OK and self.on_ok_callback is not None:
self.on_ok_callback(event)
elif evt_id == wx.ID_CANCEL and self.on_cancel_callback is not None:
self.on_cancel_callback(event)
event.Skip(True)
def get_loguid(self):
idx = self.log_box.GetSelection()
loguid = self.loguidmap[idx]
return loguid
def get_i_color(self):
return self.color_box.GetSelection()
def get_lim(self):
return [float(self.lim1_ctrl.GetValue()),
float(self.lim2_ctrl.GetValue())]
| [
"adrianopaulo@gmail.com"
] | adrianopaulo@gmail.com |
3177575f51e0cd6e73acce8e9bef8aea053bfe42 | 9a38733c268e4a715c70c2bedba10433ddad3380 | /test_GCR_on_miniImagenet.py | 6e4d9a5131fb6b44729a971b08e179abb0b76661 | [] | no_license | lwj2018/few-shot | bca3d7d4148f607cc70e1a1c1e5847a0428ed53e | d2c5fc14f519f81e2e29e6abea6affe82e122b61 | refs/heads/master | 2021-05-17T21:52:10.277842 | 2020-04-08T11:26:38 | 2020-04-08T11:26:38 | 250,967,777 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,915 | py | import os.path as osp
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from datasets.mini_imagenet_drop500 import MiniImageNet2
from datasets.samplers import CategoriesSampler_train_100way, CategoriesSampler_val_100way
from models.GCR import GCR
from models.convnet import gcrConvnet
from utils.ioUtils import *
from utils.critUtils import loss_for_gcr
from utils.testUtils import test_100way
from utils.metricUtils import euclidean_metric
from torch.utils.tensorboard import SummaryWriter
from utils.dataUtils import getDataloader
from Arguments import Arguments
# Hyper params
epochs = 1000
learning_rate = 1e-3
# Options
shot = 5
dataset = 'miniImage'
store_name = 'test' + dataset + '_GCR' + '_%dshot'%(shot)
summary_name = 'runs/' + store_name
checkpoint = '/home/liweijie/projects/few-shot/checkpoint/20200401_miniImage_GCR_best.pth.tar'
log_interval = 20
device_list = '1'
num_workers = 8
model_path = "./checkpoint"
start_epoch = 0
best_acc = 0.00
# Get args
args = Arguments(shot,dataset)
# Use specific gpus
os.environ["CUDA_VISIBLE_DEVICES"]=device_list
# Device setting
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Use writer to record
writer = SummaryWriter(os.path.join(summary_name, time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))))
# Prepare dataset & dataloader
valset = MiniImageNet2('trainvaltest')
val_loader = DataLoader(dataset=valset, batch_size = 128,
num_workers=8, pin_memory=True, shuffle=True)
valset2 = MiniImageNet2('trainval')
val_loader2 = DataLoader(dataset=valset2, batch_size = 128,
num_workers=8, pin_memory=True, shuffle=True)
valset3 = MiniImageNet2('test')
val_loader3 = DataLoader(dataset=valset3, batch_size = 128,
num_workers=8, pin_memory=True, shuffle=True)
model_cnn = gcrConvnet().to(device)
model = GCR(model_cnn,train_way=args.train_way,\
test_way=args.test_way, shot=args.shot,query=args.query,query_val=args.query_val).to(device)
# Resume model
if checkpoint is not None:
start_epoch, best_acc = resume_gcr_model(model, checkpoint, args.n_base)
# Create loss criterion
criterion = nn.CrossEntropyLoss()
# Start Test
print("Test Started".center(60, '#'))
for epoch in range(start_epoch, start_epoch+1):
acc = test_100way(model,criterion,val_loader,device,epoch,log_interval,writer,args,euclidean_metric)
print('Batch accu_a on miniImagenet: {:.3f}'.format(acc))
acc = test_100way(model,criterion,val_loader2,device,epoch,log_interval,writer,args,euclidean_metric)
print('Batch accu_b on miniImagenet: {:.3f}'.format(acc))
acc = test_100way(model,criterion,val_loader3,device,epoch,log_interval,writer,args,euclidean_metric)
print('Batch accu_n on miniImagenet: {:.3f}'.format(acc))
print("Test Finished".center(60, '#')) | [
"lwj19970331@gmail.com"
] | lwj19970331@gmail.com |
fe5b66641f2aca7c783d08749756abe3c80ddca8 | 6fa3835d8193ed0ccf69804a142bc35aeb79c1ed | /pitcoin_mainnet_node_edition/pitcoin_modules/block/__init__.py | ebb5570be622e276d054aa7bcfd02b4d002babb9 | [] | no_license | akorunska/pitcoin_block_explorer | fca6dc81351946b6f9c88d2b962b0369df08ea49 | 73f5e3b380602c3ae12a370af9bd58f35ffbbd71 | refs/heads/master | 2021-07-01T06:34:18.772029 | 2020-10-25T10:20:43 | 2020-10-25T10:20:43 | 185,462,462 | 0 | 0 | null | 2020-10-25T10:20:44 | 2019-05-07T19:05:34 | Python | UTF-8 | Python | false | false | 61 | py | from .block import Block
from .merkle import get_merkle_root
| [
"akorunska@gmail.com"
] | akorunska@gmail.com |
898f4f336d98d93c12cdc2ef9a7995f33a80704b | c44904609923bbc20812ddc1f875ffb182f98518 | /Day 4 - Beginner - Randomisation and Python Lists/1. Random Exercise/main.py | 0df72c8cf8e418a612a922577f52721843005366 | [] | no_license | ceteongvanness/100-Days-of-Code-Python | a3f7b88a25aedbfe3cc5633cadf09fa746c2a2ec | 272b68c8d720f2d25e05245d41c7b8fff2851ddd | refs/heads/master | 2023-03-11T05:13:32.823152 | 2021-02-20T00:31:02 | 2021-02-20T00:31:02 | 310,734,202 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | import random
test_seed = int(input("Create a seed number: "))
random.seed(test_seed)
randomSide = random.randint(0, 1)
if randomSide == 1:
print("Heads")
else:
print("Tails") | [
"ceteongvanness@hotmail.com"
] | ceteongvanness@hotmail.com |
3863c03fc4e7392c2e05fb2378a9748752bc107b | 2186fdd8350d6dc72340a65c2cc1d345c2c51377 | /Python/Django/NinjaGold_project/NinjaGold_project/settings.py | a9422c5e56fbce0a0d010f6279270ef269389415 | [] | no_license | umanav/Lab206 | 2b494712b59585493e74c51089223696729eb716 | 31f0b098aa6722bbf7d2ad6e619fa38f29cab4d5 | refs/heads/master | 2020-03-10T07:54:25.904503 | 2018-04-12T15:37:20 | 2018-04-12T15:37:20 | 129,273,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,156 | py | """
Django settings for NinjaGold_project project.
Generated by 'django-admin startproject' using Django 1.11.12.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#5&0bo^_d1!^n%*bpya+85rrn+y00*!7ws!4^5rvc&uw=7-kcm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.NinjaGold_app',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'NinjaGold_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'NinjaGold_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"umanav@amazon.com"
] | umanav@amazon.com |
3cc902946a6b0a3ee3e399f506011de912eece59 | 07e396e3682465cd753d5c99ef06f9f70c374816 | /Test_scripts/homerest_rest_framework.py | 3de208f2c2b5c59ade5ec780ffc954b9710e9514 | [] | no_license | DavisDeco/Django-RestAPI | d1ac04346f32e3e9122a485e7cebd81c73253f2e | f88135437c3fe40f27fe214adca05905a1c80e92 | refs/heads/master | 2022-12-25T20:33:00.659364 | 2019-12-13T09:10:57 | 2019-12-13T09:10:57 | 227,798,733 | 0 | 1 | null | 2022-12-08T06:16:24 | 2019-12-13T09:03:55 | Python | UTF-8 | Python | false | false | 1,102 | py | import json
import requests
import os
ENDPOINT = "http://127.0.0.1:8000/api/status/"
image_path = os.path.join(os.getcwd(),"Arifu_Logo_Transparent.png")
def do_img(method='get', data={}, is_json=True, img_path=None):
header = {}
if is_json:
headers['content-type'] = 'application/json'
data = json.dumps(data)
if img_path is not None:
with open(image_path,'rb') as image:
file_data = {
'image' : image
}
r = request.request(method,ENDPOINT,data=data,files=file_data)
else:
r = request.request(method,ENDPOINT,data=data,headers=headers)
print(r.text)
print(r.status_code)
return r
do_img(method='post',data={'user':1,"content":""},is_json=False)
def do(method='get', data={}, is_json=True):
header = {}
if is_json:
headers['content-type'] = 'application/json'
data = json.dumps(data)
r = request.request(method,ENDPOINT,data=data,headers=headers)
print(r.text)
print(r.status_code)
return r | [
"you@example.com"
] | you@example.com |
bd0d602e44529f3602a633fba72b2687e55f1fa0 | e573b586a921084f29a36f8e2de5afcae2c65ff8 | /tasks/part_3/replacement_3.py | 7fbd0023900ac766dbda655780824a8c8f5546a5 | [] | no_license | HannaKulba/AdaptiveTraining_English | e69c8a0c444c1fa72b4783ba837cb3d9dc055d91 | 46497dc6827df37f4ebb69671912ef5b934ab6f0 | refs/heads/master | 2020-12-28T15:05:25.762072 | 2020-02-19T14:39:22 | 2020-02-19T14:39:22 | 238,381,636 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py | import sys
import re
for line in sys.stdin:
line = line.rstrip()
result = re.sub(r'\b(a|A+)+\b', 'argh', line, 1)
print(result)
| [
"anna.mirraza@gmail.com"
] | anna.mirraza@gmail.com |
2f39634fd8bf356dc61b2936c783b74ddf7e856d | e5b69a84c6e6a3e4b658dbfc07dda1394320dea5 | /config/includes.chroot/usr/lib/gedit/plugins/smart_highlight/smart_highlight.py | 8f9dc419979f841e57473a0867e52ddb2a333436 | [] | no_license | vikashpatty/kali | 2e384fad383ee643a19579ca8c5381d47e122ab9 | 13a91c2b2997d0db23d4b2f9c4f198768fa857af | refs/heads/master | 2020-03-08T21:54:34.399588 | 2014-02-08T02:02:28 | 2014-02-08T02:02:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,630 | py | # -*- encoding:utf-8 -*-
# smart_highlight.py is part of smart-highlighting-gedit.
#
#
# Copyright 2010-2012 swatch
#
# smart-highlighting-gedit is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from gi.repository import Gtk, Gdk, Gedit
import re
import os.path
#import pango
import shutil
import config_manager
from config_ui import ConfigUI
import gettext
APP_NAME = 'smart-highlight'
CONFIG_DIR = os.path.expanduser('~/.local/share/gedit/plugins/' + APP_NAME + '/config')
#LOCALE_DIR = '/usr/share/locale'
LOCALE_DIR = os.path.join(os.path.dirname(__file__), 'locale')
if not os.path.exists(LOCALE_DIR):
LOCALE_DIR = '/usr/share/locale'
try:
t = gettext.translation(APP_NAME, LOCALE_DIR)
_ = t.gettext
except:
pass
#gettext.install(APP_NAME, LOCALE_DIR, unicode=True)
ui_str = """<ui>
<menubar name="MenuBar">
<menu name="ToolsMenu" action="Tools">
<placeholder name="ToolsOps_0">
<separator/>
<menu name="SmartHighlightMenu" action="SmartHighlightMenu">
<placeholder name="SmartHighlightMenuHolder">
<menuitem name="smart_highlight_configure" action="smart_highlight_configure"/>
</placeholder>
</menu>
<separator/>
</placeholder>
</menu>
</menubar>
</ui>
"""
class SmartHighlightWindowHelper:
def __init__(self, plugin, window):
self._window = window
self._plugin = plugin
self.current_selection = ''
self.start_iter = None
self.end_iter = None
self.vadj_value = 0
views = self._window.get_views()
for view in views:
view.get_buffer().connect('mark-set', self.on_textbuffer_markset_event)
view.get_vadjustment().connect('value-changed', self.on_view_vadjustment_value_changed)
#view.connect('button-press-event', self.on_view_button_press_event)
self.active_tab_added_id = self._window.connect("tab-added", self.tab_added_action)
user_configfile = os.path.join(CONFIG_DIR, 'config.xml')
if not os.path.exists(user_configfile):
if not os.path.exists(os.path.dirname(user_configfile)):
os.makedirs(os.path.dirname(user_configfile))
shutil.copy2(os.path.dirname(__file__) + "/config/config.xml", os.path.dirname(user_configfile))
configfile = user_configfile
'''
user_configfile = os.path.join(os.path.expanduser('~/.local/share/gedit/plugins/' + 'smart_highlight'), 'config.xml')
if os.path.exists(user_configfile):
configfile = user_configfile
else:
configfile = os.path.join(os.path.dirname(__file__), "config.xml")
#'''
self.config_manager = config_manager.ConfigManager(configfile)
self.options = self.config_manager.load_configure('search_option')
self.config_manager.to_bool(self.options)
self.smart_highlight = self.config_manager.load_configure('smart_highlight')
self._insert_menu()
def deactivate(self):
# Remove any installed menu items
self._window.disconnect(self.active_tab_added_id)
self.config_manager.update_config_file(self.config_manager.config_file, 'search_option', self.options)
self.config_manager.update_config_file(self.config_manager.config_file, 'smart_highlight', self.smart_highlight)
def _insert_menu(self):
# Get the GtkUIManager
manager = self._window.get_ui_manager()
# Create a new action group
self._action_group = Gtk.ActionGroup("SmartHighlightActions")
self._action_group.add_actions( [("SmartHighlightMenu", None, _('Smart Highlighting'))] + \
[("smart_highlight_configure", None, _("Configuration"), None, _("Smart Highlighting Configure"), self.smart_highlight_configure)])
# Insert the action group
manager.insert_action_group(self._action_group, -1)
# Merge the UI
self._ui_id = manager.add_ui_from_string(ui_str)
def _remove_menu(self):
# Get the GtkUIManager
manager = self._window.get_ui_manager()
# Remove the ui
manager.remove_ui(self._ui_id)
# Remove the action group
manager.remove_action_group(self._action_group)
# Make sure the manager updates
manager.ensure_update()
def update_ui(self):
self._action_group.set_sensitive(self._window.get_active_document() != None)
'''
def show_message_dialog(self, text):
dlg = Gtk.MessageDialog(self._window,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.INFO,
Gtk.ButtonsType.CLOSE,
_(text))
dlg.run()
dlg.hide()
#'''
def create_regex(self, pattern, options):
if options['REGEX_SEARCH'] == False:
pattern = re.escape(unicode(r'%s' % pattern, "utf-8"))
else:
pattern = unicode(r'%s' % pattern, "utf-8")
if options['MATCH_WHOLE_WORD'] == True:
pattern = r'\b%s\b' % pattern
if options['MATCH_CASE'] == True:
regex = re.compile(pattern, re.MULTILINE)
else:
regex = re.compile(pattern, re.IGNORECASE | re.MULTILINE)
return regex
def smart_highlighting_action(self, doc, search_pattern, iter, clear_flg = True):
regex = self.create_regex(search_pattern, self.options)
if clear_flg == True:
self.smart_highlight_off(doc)
self.vadj_value = self._window.get_active_view().get_vadjustment().get_value()
current_line = iter.get_line()
start_line = current_line - 50
end_line = current_line + 50
if start_line <= 0:
self.start_iter = doc.get_start_iter()
else:
self.start_iter = doc.get_iter_at_line(start_line)
if end_line < doc.get_line_count():
self.end_iter = doc.get_iter_at_line(end_line)
else:
self.end_iter = doc.get_end_iter()
text = unicode(doc.get_text(self.start_iter, self.end_iter, True), 'utf-8')
match = regex.search(text)
while(match):
self.smart_highlight_on(doc, match.start()+self.start_iter.get_offset(), match.end() - match.start())
match = regex.search(text, match.end()+1)
def tab_added_action(self, action, tab):
view = tab.get_view()
view.get_buffer().connect('mark-set', self.on_textbuffer_markset_event)
view.get_vadjustment().connect('value-changed', self.on_view_vadjustment_value_changed)
#view.connect('button-press-event', self.on_view_button_press_event)
def on_textbuffer_markset_event(self, textbuffer, iter, textmark):
#print textmark.get_name()
if textmark.get_name() != 'selection_bound' and textmark.get_name() != 'insert':
return
if textbuffer.get_selection_bounds():
start, end = textbuffer.get_selection_bounds()
self.current_selection = textbuffer.get_text(start, end, True)
self.smart_highlighting_action(textbuffer, self.current_selection, iter)
else:
self.current_selection = ''
self.smart_highlight_off(textbuffer)
def smart_highlight_on(self, doc, highlight_start, highlight_len):
if doc.get_tag_table().lookup('smart_highlight') == None:
tag = doc.create_tag("smart_highlight", foreground=self.smart_highlight['FOREGROUND_COLOR'], background=self.smart_highlight['BACKGROUND_COLOR'])
doc.apply_tag_by_name('smart_highlight', doc.get_iter_at_offset(highlight_start), doc.get_iter_at_offset(highlight_start + highlight_len))
def smart_highlight_off(self, doc):
start, end = doc.get_bounds()
if doc.get_tag_table().lookup('smart_highlight') == None:
tag = doc.create_tag("smart_highlight", foreground=self.smart_highlight['FOREGROUND_COLOR'], background=self.smart_highlight['BACKGROUND_COLOR'])
doc.remove_tag_by_name('smart_highlight', start, end)
def smart_highlight_configure(self, action, data = None):
config_ui = ConfigUI(self._plugin)
def on_view_vadjustment_value_changed(self, object, data = None):
if self.current_selection == '':
return
if object.get_value() < self.vadj_value: #scroll up
self.smart_highlighting_action(self._window.get_active_document(), self.current_selection, self.start_iter, False)
else: #scroll down
self.smart_highlighting_action(self._window.get_active_document(), self.current_selection, self.end_iter, False)
'''
def auto_select_word_bounds(self, pattern=r'[_a-zA-Z][_a-zA-Z0-9]*'):
doc = self._window.get_active_document()
if doc.get_has_selection():
start, end = doc.get_selection_bounds()
return start, end
else:
current_iter = doc.get_iter_at_mark(doc.get_insert())
line_num = current_iter.get_line()
line_start = doc.get_iter_at_line(line_num)
line_text = doc.get_text(line_start, doc.get_iter_at_line(line_num + 1), True)
line_start_pos = line_start.get_offset()
matches = re.finditer(pattern, line_text)
for match in matches:
if current_iter.get_offset() in range(line_start_pos + match.start(), line_start_pos + match.end() + 1):
return doc.get_iter_at_offset(line_start_pos + match.start()), doc.get_iter_at_offset(line_start_pos+match.end())
return None
#'''
'''
def on_view_button_press_event(self, object, event):
#if event.button == 1 and event.type == Gdk.EventType.BUTTON_PRESS:
if event.button == 1 and event.type == 5: #EventType 2BUTTON_PRESS
print '2button press'
start, end = self.auto_select_word_bounds()
print self._window.get_active_document().get_text(start, end, True)
self._window.get_active_document().select_range(start, end)
#'''
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
8a0208ea6f4f05a60f00b54f6cdd4233171e6574 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_055/ch85_2020_05_09_19_47_41_223416.py | 40546593ba1c709f5bd32f0a8ecff9073cf49f93 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | ocorrencias_banana = 0
with open('macacos-me-mordam.txt', 'r') as arquivo:
texto = arquivo.read()
for string in texto:
for l in range(len(string) - 1):
if string[l] == 'b' or 'B' and string[l + 1] == 'a' or 'A' and string[l + 2] == 'n' or 'N' and string[l + 3] == 'a' or 'A' and string[l + 4] == 'n' or 'N' and string[l + 5] == 'a' or 'A':
ocorrencias_banana += 1
print(ocorrencias_banana)
| [
"you@example.com"
] | you@example.com |
62c2725da0cb80c3f6d53c537923e3046ad1c8ad | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/6/p6k.py | 593c4262fcdfe6029b71f89aa8fa95c8bbd57ce8 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'p6K':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
2e0c46b2e3309b3b760e0fa11ae5d3a7644321bf | 19fdaccb11ba28ddf73ad88cfe7d158e6872b4a7 | /Projects_csld/common/redis_token.py | 1f1f7d87eaaab69d6792073dfc3bf03a931af22f | [] | no_license | basheGG/CSLD | 130a243780d65d596a146cb0894bf0355b5165fb | 286c5cbcab1487ecc36724fb1221bc07f021db45 | refs/heads/master | 2020-09-20T05:28:07.185000 | 2019-11-27T09:30:40 | 2019-11-27T09:30:40 | 224,388,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | import redis
import json
r = redis.Redis(host='127.0.0.1', port=6379, password='', db=1, decode_responses=True)
class Token():
Token = ""
Id = ""
Type = ""
Permissions = []
def LoadToken(self, token):
print('***********************************************')
print(token)
print('***********************************************')
j = r.get("FaceMakeMoney:{Token}".format(Token=token))
o = json.loads(j)
self.Token = o["Token"]
self.Id = o["Id"]
self.Type = o["Type"]
return self | [
"admin@example.com"
] | admin@example.com |
df401df878b31aa05c781a01902a20325731ffd7 | b99195cf2d181dec5c31aa7e58d747f474153802 | /MyScripts/while.py | 41f7991f03b0f4fdd67f02483f2687fc06f96852 | [] | no_license | eldadpuzach/MyPythonProjects | b1b4d56a822fd781c7c4c7a9e4bb5408c180c187 | 3a961a7c265caf1369067d98e94564f01f1bde74 | refs/heads/master | 2020-03-20T18:07:43.319331 | 2019-02-13T22:07:10 | 2019-02-13T22:07:10 | 137,570,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | x = 0
while x < 10:
print('Current counter is {0}'.format(x))
x +=1 | [
"eldadpuzach@gmail.com"
] | eldadpuzach@gmail.com |
c176e3f41b84dbe540dacb097d32f02880d5c6f0 | c4feb6227cc68e96c7454ee7682a91f6f6afd164 | /supervised_learning/0x08-deep_cnns/5-dense_block.py | 3c0f8e19dbbbc41427fb787ed218f4146de7e322 | [] | no_license | Karenahv/holbertonschool-machine_learning | 4b7ae5ad4cd1f06f8bae87a509d11b5c8069f8c9 | 884db3d605c2d0eee968f03ce7f525f2a557f261 | refs/heads/master | 2022-12-24T16:17:34.753055 | 2020-09-30T02:09:08 | 2020-09-30T02:09:08 | 255,319,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,051 | py | #!/usr/bin/env python3
"""builds a dense block as
described in Densely Connected
Convolutional Networks"""
import tensorflow.keras as K
def dense_block(X, nb_filters, growth_rate, layers):
""" builds a dense block as
described in Densely Connected
Convolutional Networks"""
kernel_init = K.initializers.he_normal(seed=None)
for i in range(layers):
x = K.layers.BatchNormalization()(X)
x = K.layers.Activation('relu')(x)
x = K.layers.Conv2D(filters=4*growth_rate,
kernel_size=1,
padding='same',
kernel_initializer=kernel_init)(x)
x = K.layers.BatchNormalization()(x)
x = K.layers.Activation('relu')(x)
x = K.layers.Conv2D(filters=growth_rate,
kernel_size=3,
padding='same',
kernel_initializer=kernel_init)(x)
X = K.layers.concatenate([X, x])
nb_filters += growth_rate
return X, nb_filters
| [
"you@example.com"
] | you@example.com |
2c96ac1a2773232924361a5a429b20d26afb2287 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/130/usersdata/162/33622/submittedfiles/al8.py | f937fc1c073c5b47ba4a97a0d3fd006ae097d61d | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | # -*- coding: utf-8 -*-
n=int(input('Digite o valor de n:'))
i=1
for i in range(1,(n+1),1):
fatorial=fatorial*i
print(fatorial)
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
91ad66dae791a33f3cf1c674c9da232c59a763ab | a5eab1deb33a380a48444b836c871f0c93307e47 | /conkit/io/tests/test__iotools.py | d4ea7801c48c3147051277ccc5d876f640778c48 | [
"BSD-3-Clause"
] | permissive | xiangf/conkit | a425131bef2da193da0fe9100213b1cd5a40ce01 | 633978d4665dcf99f28c6e35e602e91c37c49229 | refs/heads/master | 2021-04-05T23:49:27.750815 | 2018-02-26T12:36:04 | 2018-02-26T12:36:04 | 125,077,426 | 0 | 0 | null | 2018-03-13T15:57:26 | 2018-03-13T15:57:26 | null | UTF-8 | Python | false | false | 3,868 | py | """Testing facility for conkit.io._iotools"""
__author__ = "Felix Simkovic"
__date__ = "21 Nov 2016"
import os
import unittest
from conkit.io import _iotools
class Test(unittest.TestCase):
def test_create_tmp_f_1(self):
fname = _iotools.create_tmp_f()
self.assertTrue(os.path.isfile(fname))
os.unlink(fname)
def test_create_tmp_f_2(self):
content = 'Hello, World!'
fname = _iotools.create_tmp_f(content=content, mode='w')
self.assertTrue(os.path.isfile(fname))
with open(fname, 'r') as f_in:
written_content = f_in.read()
self.assertEqual(content, written_content)
os.unlink(fname)
def test_create_tmp_f_3(self):
content = 'Hello, World!'
content_bytes = content.encode('utf-8')
fname = _iotools.create_tmp_f(content=content_bytes, mode='wb')
self.assertTrue(os.path.isfile(fname))
with open(fname, 'rb') as f_in:
written_content_bytes = f_in.read()
written_content = written_content_bytes.decode('utf-8')
self.assertEqual(content, written_content)
os.unlink(fname)
def test_is_str_like_1(self):
self.assertTrue(_iotools.is_str_like('foo')) # str
self.assertFalse(_iotools.is_str_like(1)) # int
self.assertFalse(_iotools.is_str_like(1.)) # float
self.assertFalse(_iotools.is_str_like([])) # list
self.assertFalse(_iotools.is_str_like(())) # tuple
self.assertFalse(_iotools.is_str_like({})) # dict
self.assertFalse(_iotools.is_str_like(set())) # set
def test_open_f_handle_1(self):
fname = _iotools.create_tmp_f()
with _iotools.open_f_handle(fname, 'append') as fhandle:
self.assertEqual('a', fhandle.mode)
f_in_handle = _iotools.open_f_handle(fname, 'append')
with _iotools.open_f_handle(f_in_handle, 'append') as fhandle:
self.assertEqual('a', fhandle.mode)
f_in_handle.close()
os.unlink(fname)
def test_open_f_handle_2(self):
fname = _iotools.create_tmp_f()
with _iotools.open_f_handle(fname, 'read') as fhandle:
self.assertEqual('r', fhandle.mode)
f_in_handle = _iotools.open_f_handle(fname, 'read')
with _iotools.open_f_handle(f_in_handle, 'read') as fhandle:
self.assertEqual('r', fhandle.mode)
f_in_handle.close()
os.unlink(fname)
def test_open_f_handle_3(self):
fname = _iotools.create_tmp_f()
with _iotools.open_f_handle(fname, 'write') as fhandle:
self.assertEqual('w', fhandle.mode)
f_in_handle = _iotools.open_f_handle(fname, 'write')
with _iotools.open_f_handle(f_in_handle, 'write') as fhandle:
self.assertEqual('w', fhandle.mode)
f_in_handle.close()
os.unlink(fname)
def test_open_f_handle_4(self):
fname = _iotools.create_tmp_f()
with _iotools.open_f_handle(fname, 'write') as fhandle:
self.assertEqual('w', fhandle.mode)
fhandle.write("hello world!")
with _iotools.open_f_handle(fname, 'read') as fhandle:
self.assertEqual('r', fhandle.mode)
self.assertEqual("hello world!", fhandle.read().strip())
os.unlink(fname)
def test_open_f_handle_5(self):
with self.assertRaises(TypeError):
_iotools.open_f_handle(1, 'read')
with self.assertRaises(TypeError):
_iotools.open_f_handle(1.0, 'write')
def test_open_f_handle_6(self):
fname = _iotools.create_tmp_f()
with self.assertRaises(ValueError):
_iotools.open_f_handle(fname, 'foo')
with self.assertRaises(ValueError):
_iotools.open_f_handle(fname, 'bar')
if __name__ == "__main__":
unittest.main(verbosity=2)
| [
"felixsimkovic@me.com"
] | felixsimkovic@me.com |
153e0370ae6af4a26ea68a01c9d3909b89fac7dc | 2ca4d75dc00c20b886f16855d62d3cce444bb301 | /testsuits/test_baidu_search1.py | a928b1fbb78ffae2c522cce0168010df06d3edb8 | [] | no_license | christinecoco/python_automation | d81103aed00910595e662b581ef4350c16a9c316 | bad2e90d0ae23f3d34096d7cc6682b94702a8730 | refs/heads/master | 2020-05-23T22:37:47.718254 | 2019-05-16T07:43:15 | 2019-05-16T07:43:15 | 186,977,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,491 | py | #coding=utf-8
import time
import unittest
# import sys
# reload(sys)
# sys.setdefaultencoding('utf-8')
from framework.browser_engine import BrowserEngine
from pageobjects.baidu_homepage import HomePage
class BaiduSearch(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""
测试固件的setUp()的代码,主要是测试的前提准备工作
:return:
"""
browse=BrowserEngine(cls)
cls.driver=browse.open_browser(cls)
@classmethod
def tearDownClass(cls):
"""
测试结束后的操作,这里基本上都是关闭浏览器
:return:
"""
cls.driver.quit()
def test_baidu_search(self):
"""
这里一定要test开头,把测试逻辑代码封装到一个test开头的方法里
:return:
"""
# reload(sys)
# sys.setdefaultencoding('utf-8')
homepage = HomePage(self.driver)
homepage.type_search('selenium')
homepage.send_submit_btn()
time.sleep(2)
homepage.get_window_img()
try:
assert 'selenium' in homepage.get_page_title()
print('test pass')
except Exception as e:
print('test fail',format(e))
def test_search1(self):
homepage=HomePage(self.driver)
homepage.type_search('python')
homepage.send_submit_btn()
time.sleep(2)
homepage.get_window_img()
if __name__=='__main__':
unittest.main() | [
"tester@test.com"
] | tester@test.com |
f88ea05adf755edb13e91361742fe7015daeadda | a37c48267bfb8476476dad7219c4e3329f9e2991 | /Packs/GreyNoise/Integrations/GreyNoise/GreyNoise.py | 98eeec36b1c94bd69939479030148fc54d968818 | [
"MIT"
] | permissive | adambaumeister/content | 611ce9fba412a5eb28fbefa8a43282e98d3f9327 | 01b57f8c658c2faed047313d3034e8052ffa83ce | refs/heads/master | 2023-03-09T18:16:18.623380 | 2022-07-13T18:11:09 | 2022-07-13T18:11:09 | 274,290,989 | 2 | 0 | MIT | 2023-03-06T12:22:17 | 2020-06-23T02:36:53 | Python | UTF-8 | Python | false | false | 36,582 | py | from CommonServerPython import *
""" Imports """
import urllib3 # type: ignore
import traceback
import requests
import re
import copy
from typing import Tuple, Dict, Any
from greynoise import GreyNoise, exceptions, util # type: ignore
from greynoise.exceptions import RequestFailure, RateLimitError # type: ignore
# Disable insecure warnings
urllib3.disable_warnings()
util.LOGGER.warning = util.LOGGER.debug
""" CONSTANTS """
TIMEOUT = 10
PRETTY_KEY = {
"ip": "IP",
"first_seen": "First Seen",
"last_seen": "Last Seen",
"seen": "Seen",
"tags": "Tags",
"actor": "Actor",
"spoofable": "Spoofable",
"classification": "Classification",
"cve": "CVE",
"metadata": "MetaData",
"asn": "ASN",
"city": "City",
"country": "Country",
"country_code": "Country Code",
"organization": "Organization",
"category": "Category",
"tor": "Tor",
"rdns": "RDNS",
"os": "OS",
"region": "Region",
"vpn": "VPN",
"vpn_service": "VPN Service",
"raw_data": "raw_data",
"scan": "scan",
"port": "port",
"protocol": "protocol",
"web": "web",
"paths": "paths",
"useragents": "useragents",
"ja3": "ja3",
"fingerprint": "fingerprint",
"hassh": "hassh",
"bot": "BOT",
}
IP_CONTEXT_HEADERS = [
"IP",
"Classification",
"Actor",
"CVE",
"Tags",
"Spoofable",
"VPN",
"BOT",
"Tor",
"First Seen",
"Last Seen",
]
RIOT_HEADERS = ["IP", "Category", "Name", "Trust Level", "Description", "Last Updated"]
API_SERVER = util.DEFAULT_CONFIG.get("api_server")
IP_QUICK_CHECK_HEADERS = ["IP", "Noise", "RIOT", "Code", "Code Description"]
STATS_KEY = {
"classifications": "Classifications",
"spoofable": "Spoofable",
"organizations": "Organizations",
"actors": "Actors",
"countries": "Countries",
"tags": "Tags",
"operating_systems": "Operating Systems",
"categories": "Categories",
"asns": "ASNs",
}
STATS_H_KEY = {
"classification": "Classification",
"spoofable": "Spoofable",
"organization": "Organization",
"actor": "Actor",
"country": "Country",
"tag": "Tag",
"operating_system": "Operating System",
"category": "Category",
"asn": "ASN",
"count": "Count",
}
QUERY_OUTPUT_PREFIX: Dict[str, str] = {
"IP": "GreyNoise.IP(val.address && val.address == obj.address)",
"QUERY": "GreyNoise.Query(val.query && val.query == obj.query)",
}
EXCEPTION_MESSAGES = {
"API_RATE_LIMIT": "API Rate limit hit. Try after sometime.",
"UNAUTHENTICATED": "Unauthenticated. Check the configured API Key.",
"COMMAND_FAIL": "Failed to execute {} command.\n Error: {}",
"SERVER_ERROR": "The server encountered an internal error for GreyNoise and was unable to complete your request.",
"CONNECTION_TIMEOUT": "Connection timed out. Check your network connectivity.",
"PROXY": "Proxy Error - cannot connect to proxy. Either try clearing the "
"'Use system proxy' check-box or check the host, "
"authentication details and connection details for the proxy.",
"INVALID_RESPONSE": "Invalid response from GreyNoise. Response: {}",
"QUERY_STATS_RESPONSE": "GreyNoise request failed. Reason: {}",
}
""" CLIENT CLASS """
class Client(GreyNoise):
"""Client class to interact with the service API"""
def authenticate(self):
"""
Used to authenticate GreyNoise credentials.
"""
current_date = datetime.now()
try:
response = self.test_connection()
expiration_date = datetime.strptime(response["expiration"], "%Y-%m-%d")
if current_date < expiration_date and response["offering"] != "community":
return "ok"
else:
raise DemistoException(
f"Invalid API Offering ({response['offering']})or Expiration Date ({expiration_date})"
)
except RateLimitError:
raise DemistoException(EXCEPTION_MESSAGES["API_RATE_LIMIT"])
except RequestFailure as err:
status_code = err.args[0]
body = str(err.args[1])
if status_code == 401:
raise DemistoException(EXCEPTION_MESSAGES["UNAUTHENTICATED"])
elif status_code == 429:
raise DemistoException(EXCEPTION_MESSAGES["API_RATE_LIMIT"])
elif 400 <= status_code < 500:
raise DemistoException(EXCEPTION_MESSAGES["COMMAND_FAIL"].format(demisto.command(), body))
elif status_code >= 500:
raise DemistoException(EXCEPTION_MESSAGES["SERVER_ERROR"])
else:
raise DemistoException(str(err))
except requests.exceptions.ConnectTimeout:
raise DemistoException(EXCEPTION_MESSAGES["CONNECTION_TIMEOUT"])
except requests.exceptions.ProxyError:
raise DemistoException(EXCEPTION_MESSAGES["PROXY"])
""" HELPER FUNCTIONS """
def exception_handler(func: Any) -> Any:
"""
Decorator to handle all type of errors possible with GreyNoise SDK.
"""
def inner_func(*args, **kwargs):
try:
return func(*args, **kwargs)
except exceptions.RateLimitError:
raise DemistoException(EXCEPTION_MESSAGES["API_RATE_LIMIT"])
except exceptions.RequestFailure as err:
status_code, body = parse_code_and_body(str(err))
if status_code == 401 and "forbidden" in body:
raise DemistoException(EXCEPTION_MESSAGES["UNAUTHENTICATED"])
elif 400 <= status_code < 500:
raise DemistoException(EXCEPTION_MESSAGES["COMMAND_FAIL"].format(demisto.command(), body))
elif status_code >= 500:
raise DemistoException(EXCEPTION_MESSAGES["SERVER_ERROR"])
else:
raise DemistoException(str(err))
except requests.exceptions.ConnectTimeout:
raise DemistoException(EXCEPTION_MESSAGES["CONNECTION_TIMEOUT"])
except requests.exceptions.ProxyError:
raise DemistoException(EXCEPTION_MESSAGES["PROXY"])
return inner_func
def parse_code_and_body(message: str) -> Tuple[int, str]:
"""Parse status code and body
Parses code and body from the Exception raised by GreyNoise SDK.
:type message: ``str``
:param message: Exception message.
:return: response code and response body.
:rtype: ``tuple``
"""
re_response = re.search(r"\(([0-9]+), (.*)\)", message) # NOSONAR
if re_response:
code, body = re_response.groups()
body = body.strip("'")
else:
return 0, message
return int(code), body
def get_ip_context_data(responses: list) -> list:
"""Parse ip context and raw data from GreyNoise SDK response.
Returns value of ip context data.
Returns value of ip raw data.
:type responses: ``list``
:param responses: list of values of ip-context or ip-query.
:return: list of ips context data.
:rtype: ``list``
"""
ip_context_responses = []
responses = remove_empty_elements(responses)
for response in responses:
metadata_list: list = []
tmp_response: dict = {}
for key, value in response.get("metadata", {}).items():
if value != "":
metadata_list.append(f"{PRETTY_KEY.get(key, key)}: {value}")
# bring TOR key to top level for table view
if key == "tor":
tmp_response[PRETTY_KEY.get(key, key)] = value
tmp_response["MetaData"] = metadata_list
for key, value in response.items():
if value != "" and key not in ["metadata", "raw_data"]:
tmp_response[PRETTY_KEY.get(key, key)] = value
ip = tmp_response["IP"]
tmp_response["IP"] = f"[{ip}](https://www.greynoise.io/viz/ip/{ip})"
ip_context_responses.append(tmp_response)
return ip_context_responses
def get_ip_reputation_score(classification: str) -> Tuple[int, str]:
"""Get DBot score and human readable of score.
:type classification: ``str``
:param classification: classification of ip provided from GreyNoise.
:return: tuple of dbot score and it's readable form.
:rtype: ``tuple``
"""
if classification == "unknown":
return Common.DBotScore.SUSPICIOUS, "Suspicious"
elif classification == "benign":
return Common.DBotScore.GOOD, "Good"
elif classification == "malicious":
return Common.DBotScore.BAD, "Bad"
else:
return Common.DBotScore.NONE, "Unknown"
def generate_advanced_query(args: dict) -> str:
"""Generate advance query for GreyNoise from args.
:type args: ``dict``
:param args: All command arguments, usually passed from ``demisto.args()``.
:return: advanced query.
:rtype: ``str``
"""
advanced_query = args.get("advanced_query", "")
used_args: dict = {
"actor": args.get("actor"),
"classification": args.get("classification"),
"spoofable": args.get("spoofable"),
"last_seen": args.get("last_seen"),
"organization": args.get("organization"),
"cve": args.get("cve"),
}
if advanced_query:
advanced_query = advanced_query.replace(": ", ":")
advanced_query = advanced_query.replace(" :", ":")
arg_list = list(used_args.keys())
arg_list.sort()
for each in arg_list:
if used_args[each] and f"{each}:" not in advanced_query:
advanced_query += f" {each}:{used_args.get(each)}"
advanced_query = advanced_query.strip(" ")
if not advanced_query:
advanced_query = "spoofable:false"
return advanced_query
""" COMMAND FUNCTIONS """
@exception_handler
@logger
def test_module(client: Client) -> str:
"""Tests API connectivity and authentication'
Returning 'ok' indicates that the integration works like it is supposed to.
Connection to the service is successful.
Raises exceptions if something goes wrong.
:type client: ``Client``
:param client: Client object for interaction with GreyNoise.
:return: 'ok' if test passed, anything else will fail the test.
:rtype: ``str``
"""
return client.authenticate()
@exception_handler
@logger
def ip_quick_check_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""Check whether a given IP address is Internet Background Noise,
or has been observed scanning or attacking devices across the internet.
:type client: ``Client``
:param client: Client object for interaction with GreyNoise.
:type args: ``Dict``
:param args: All command arguments, usually passed from ``demisto.args()``.
:return: A ``CommandResults`` object that is then passed to ``return_results``,
that contains the IP information.
:rtype: ``CommandResults``
"""
ip_address = argToList(args.get("ip"), ",")
response = client.quick(ip_address)
if not isinstance(response, list):
raise DemistoException(EXCEPTION_MESSAGES["INVALID_RESPONSE"].format(response))
original_response = copy.deepcopy(response)
hr_list = []
for record in response:
hr_record = {
"IP": record.get("ip") or record.get("address"),
"Noise": record.get("noise"),
"RIOT": record.get("riot"),
"Code": record.get("code"),
"Code Description": record.get("code_message"),
}
ip = hr_record["IP"]
hr_record["IP"] = f"[{ip}](https://www.greynoise.io/viz/ip/{ip})"
hr_list.append(hr_record)
hr = tableToMarkdown(name="IP Quick Check Details", t=hr_list, headers=IP_QUICK_CHECK_HEADERS, removeNull=True)
for resp in response:
if "ip" in resp:
resp["address"] = resp["ip"]
del resp["ip"]
resp["code_value"] = resp["code_message"]
del resp["code_message"]
return CommandResults(
outputs_prefix="GreyNoise.IP",
outputs_key_field="address",
outputs=remove_empty_elements(response),
readable_output=hr,
raw_response=original_response,
)
@exception_handler
@logger
def ip_reputation_command(client: Client, args: dict) -> List[CommandResults]:
"""Get information about a given IP address. Returns classification (benign, malicious or unknown),
IP metadata (network owner, ASN, reverse DNS pointer, country), associated actors, activity tags,
and raw port scan and web request information.
:type client: ``Client``
:param client: Client object for interaction with GreyNoise.
:type args: ``dict``
:param args: All command arguments, usually passed from ``demisto.args()``.
:return: A list of ``CommandResults`` object that is then passed to ``return_results``,
that contains the IP information.
:rtype: ``List[CommandResults]``
"""
ips = argToList(args.get("ip"), ",")
command_results = []
for ip in ips:
response = client.ip(ip)
riot_response = client.riot(ip)
if not isinstance(response, dict) or not isinstance(riot_response, dict):
raise DemistoException(EXCEPTION_MESSAGES["INVALID_RESPONSE"].format(response))
original_response = copy.deepcopy(response)
tmp_response = get_ip_context_data([response])
response = remove_empty_elements(response)
response["address"] = response["ip"]
del response["ip"]
riot_original_response = copy.deepcopy(riot_response)
riot_response = remove_empty_elements(riot_response)
riot_response["address"] = riot_response["ip"]
del riot_response["ip"]
if riot_response["riot"]:
if riot_response["trust_level"] == "1":
riot_response["classification"] = "benign"
riot_response["trust_level"] = "1 - Reasonably Ignore"
elif riot_response["trust_level"] == "2":
riot_response["classification"] = "unknown"
riot_response["trust_level"] = "2 - Commonly Seen"
if riot_response.get("logo_url", "") != "":
del riot_response["logo_url"]
try:
response_quick: Any = ip_quick_check_command(client, {"ip": ip})
malicious_description = response_quick.outputs[0].get("code_value")
except Exception:
malicious_description = ""
if response["seen"] and not riot_response["riot"]:
dbot_score_int, dbot_score_string = get_ip_reputation_score(response.get("classification"))
human_readable = f"### IP: {ip} found with Noise Reputation: {dbot_score_string}\n"
human_readable += tableToMarkdown(
name="GreyNoise Context IP Lookup", t=tmp_response, headers=IP_CONTEXT_HEADERS, removeNull=True
)
riot_tmp_response = {"IP": riot_response.get("address"), "RIOT": riot_response.get("riot")}
human_readable += f"### IP: {ip} Not Associated with Common Business Service\n"
human_readable += tableToMarkdown(
name="GreyNoise RIOT IP Lookup", t=riot_tmp_response, headers=["IP", "RIOT"], removeNull=False
)
response["riot"] = False
dbot_score = Common.DBotScore(
indicator=response.get("address"),
indicator_type=DBotScoreType.IP,
score=dbot_score_int,
integration_name="GreyNoise",
malicious_description=malicious_description,
)
city = response.get("metadata", {}).get("city", "")
region = response.get("metadata", {}).get("region", "")
country_code = response.get("metadata", {}).get("country_code", "")
geo_description = (
f"City: {city}, Region: {region}, Country Code: {country_code}"
if (city or region or country_code)
else ""
)
ip_standard_context = Common.IP(
ip=response.get("address"),
asn=response.get("metadata", {}).get("asn"),
hostname=response.get("actor"),
geo_country=response.get("metadata", {}).get("country"),
geo_description=geo_description,
dbot_score=dbot_score,
)
command_results.append(
CommandResults(
readable_output=human_readable,
outputs_prefix="GreyNoise.IP",
outputs_key_field="address",
outputs=response,
indicator=ip_standard_context,
raw_response=original_response,
)
)
if riot_response["riot"] and not response["seen"]:
riot_tmp_response = {
"IP": f"[{riot_response.get('address')}](https://www.greynoise.io/viz/riot/{riot_response.get('address')})",
"Name": riot_response.get("name"),
"Category": riot_response.get("category"),
"Trust Level": riot_response.get("trust_level"),
"Description": riot_response.get("description"),
"Last Updated": riot_response.get("last_updated"),
}
dbot_score_int, dbot_score_string = get_ip_reputation_score(riot_response.get("classification"))
human_readable = f"### IP: {ip} found with RIOT Reputation: {dbot_score_string}\n"
human_readable += f'Belongs to Common Business Service: {riot_response["name"]}\n'
human_readable += tableToMarkdown(
name="GreyNoise RIOT IP Lookup", t=riot_tmp_response, headers=RIOT_HEADERS, removeNull=False
)
tmp_response = [{"IP": response.get("address"), "Seen": response.get("seen")}]
human_readable += f"### IP: {ip} No Mass-Internet Scanning Noise Found\n"
human_readable += tableToMarkdown(
name="GreyNoise Context IP Lookup", t=tmp_response, headers=["IP", "Seen"], removeNull=False
)
riot_response["seen"] = False
dbot_score = Common.DBotScore(
indicator=response.get("address"),
indicator_type=DBotScoreType.IP,
score=dbot_score_int,
integration_name="GreyNoise",
malicious_description=malicious_description,
)
ip_standard_context = Common.IP(ip=response.get("address"), dbot_score=dbot_score)
command_results.append(
CommandResults(
readable_output=human_readable,
outputs_prefix="GreyNoise.Riot",
outputs_key_field="address",
outputs=riot_response,
indicator=ip_standard_context,
raw_response=riot_original_response,
)
)
if response["seen"] and riot_response["riot"]:
combo_response = response.copy()
combo_response.update(riot_response)
dbot_score_int, dbot_score_string = get_ip_reputation_score(response.get("classification"))
human_readable = f"### IP: {ip} found with Noise Reputation: {dbot_score_string}\n"
human_readable += tableToMarkdown(
name="GreyNoise Context IP Lookup", t=tmp_response, headers=IP_CONTEXT_HEADERS, removeNull=True
)
dbot_score = Common.DBotScore(
indicator=response.get("address"),
indicator_type=DBotScoreType.IP,
score=dbot_score_int,
integration_name="GreyNoise",
malicious_description=malicious_description,
)
city = response.get("metadata", {}).get("city", "")
region = response.get("metadata", {}).get("region", "")
country_code = response.get("metadata", {}).get("country_code", "")
geo_description = (
f"City: {city}, Region: {region}, Country Code: {country_code}"
if (city or region or country_code)
else ""
)
ip_standard_context = Common.IP(
ip=response.get("address"),
asn=response.get("metadata", {}).get("asn"),
hostname=response.get("actor"),
geo_country=response.get("metadata", {}).get("country"),
geo_description=geo_description,
dbot_score=dbot_score,
)
riot_tmp_response = {
"IP": f"[{riot_response.get('address')}](https://www.greynoise.io/viz/riot/{riot_response.get('address')})",
"Name": riot_response.get("name"),
"Category": riot_response.get("category"),
"Trust Level": riot_response.get("trust_level"),
"Description": riot_response.get("description"),
"Last Updated": riot_response.get("last_updated"),
}
human_readable += f"### IP: {ip} found with RIOT Reputation: {dbot_score_string}\n"
human_readable += f'Belongs to Common Business Service: {riot_response["name"]}\n'
human_readable += tableToMarkdown(
name="GreyNoise RIOT IP Lookup", t=riot_tmp_response, headers=RIOT_HEADERS, removeNull=False
)
command_results.append(
CommandResults(
readable_output=human_readable,
outputs_prefix="GreyNoise.IP",
outputs_key_field="address",
outputs=combo_response,
indicator=ip_standard_context,
raw_response=combo_response,
)
)
if not response["seen"] and not riot_response["riot"]:
combo_response = response.copy()
combo_response.update(riot_response)
combo_tmp_response = {
"IP": combo_response.get("address"),
"RIOT": combo_response.get("riot"),
"Seen": combo_response.get("seen"),
}
dbot_score_int, dbot_score_string = get_ip_reputation_score(combo_response.get("classification"))
dbot_score = Common.DBotScore(
indicator=combo_response.get("address"),
indicator_type=DBotScoreType.IP,
score=dbot_score_int,
integration_name="GreyNoise",
malicious_description=malicious_description,
)
ip_standard_context = Common.IP(ip=response.get("address"), dbot_score=dbot_score)
human_readable = f"### IP: {ip} No Mass-Internet Scanning Noise Found\n"
human_readable += tableToMarkdown(
name="GreyNoise Context IP Lookup", t=combo_tmp_response, headers=["IP", "Seen"], removeNull=True
)
human_readable += f"### IP: {ip} Not Associated with Common Business Service\n"
human_readable += tableToMarkdown(
name="GreyNoise RIOT IP Lookup", t=combo_tmp_response, headers=["IP", "RIOT"], removeNull=True
)
command_results.append(
CommandResults(
readable_output=human_readable,
outputs_prefix="GreyNoise.IP",
outputs_key_field="address",
indicator=ip_standard_context,
outputs=combo_response,
raw_response=combo_response,
)
)
return command_results
@exception_handler
@logger
def query_command(client: Client, args: dict) -> CommandResults:
"""Get the information of IP based on the providence filters.
:type client: ``Client``
:param client: Client object for interaction with GreyNoise.
:type args: ``dict``
:param args: All command arguments, usually passed from ``demisto.args()``.
:return: ``CommandResults`` object, that contains the IP information.
:rtype: ``CommandResults``
"""
advanced_query = generate_advanced_query(args)
query_response = client.query(query=advanced_query, size=args.get("size", "10"), scroll=args.get("next_token"))
if not isinstance(query_response, dict):
raise DemistoException(EXCEPTION_MESSAGES["INVALID_RESPONSE"].format(query_response))
if query_response.get("message") not in ["ok", "no results"]:
raise DemistoException(EXCEPTION_MESSAGES["QUERY_STATS_RESPONSE"].format(query_response.get("message")))
original_response = copy.deepcopy(query_response)
if query_response["message"] == "ok":
tmp_response = []
for each in query_response.get("data", []):
tmp_response += get_ip_context_data([each])
each["address"] = each["ip"]
del each["ip"]
human_readable = f'### Total findings: {query_response.get("count")}\n'
human_readable += tableToMarkdown(
name="IP Context", t=tmp_response, headers=IP_CONTEXT_HEADERS, removeNull=True
)
if not query_response.get("complete"):
human_readable += f'\n### Next Page Token: \n{query_response.get("scroll")}'
query = query_response.get("query", "").replace(" ", "+")
query_link = f"https://www.greynoise.io/viz/query/?gnql={query}"
query_link = query_link.replace("*", "*")
query_link = query_link.replace('"', """)
human_readable += f"\n*To view the detailed query result please click [here]({query_link}).*"
outputs = {
QUERY_OUTPUT_PREFIX["IP"]: query_response.get("data", []),
QUERY_OUTPUT_PREFIX["QUERY"]: {
"complete": query_response.get("complete"),
"count": query_response.get("count"),
"message": query_response.get("message"),
"query": query_response.get("query"),
"scroll": query_response.get("scroll"),
},
}
elif query_response["message"] == "no results":
outputs = {}
human_readable = "### GreyNoise Query returned No Results."
query = query_response.get("query", "").replace(" ", "+")
query_link = f"https://www.greynoise.io/viz/query/?gnql={query}"
query_link = query_link.replace("*", "*")
query_link = query_link.replace('"', """)
human_readable += f"\n*To view the detailed query result please click [here]({query_link}).*"
return CommandResults(
readable_output=human_readable, outputs=remove_empty_elements(outputs), raw_response=original_response
)
@exception_handler
@logger
def stats_command(client: Client, args: dict) -> Any:
"""Get aggregate statistics for the top organizations, actors, tags, ASNs, countries,
classifications, and operating systems of all the results of a given GNQL query.
:type client: ``Client``
:param client: Client object for interaction with GreyNoise.
:type args: ``dict``
:param args: All command arguments, usually passed from ``demisto.args()``.
:return: A ``CommandResults`` object that is then passed to ``return_results``,
that contains the IP information.
:rtype: ``CommandResults``
"""
advance_query = generate_advanced_query(args)
response = client.stats(query=advance_query, count=args.get("size", "10"))
if not isinstance(response, dict):
raise DemistoException(EXCEPTION_MESSAGES["INVALID_RESPONSE"].format(response))
if response["count"] > 0:
human_readable = f'### Stats\n### Query: {advance_query} Count: {response.get("count", "0")}\n'
for key, value in response.get("stats", {}).items():
hr_list: list = []
if value is None:
continue
for rec in value:
hr_rec: dict = {}
header = []
for k, v in rec.items():
hr_rec.update({f"{STATS_H_KEY.get(k)}": f"{v}"})
header.append(STATS_H_KEY.get(k))
hr_list.append(hr_rec)
human_readable += tableToMarkdown(
name=f"{STATS_KEY.get(key, key)}", t=hr_list, headers=header, removeNull=True
)
elif response.get("count") == 0:
human_readable = "### GreyNoise Stats Query returned No Results."
return CommandResults(
outputs_prefix="GreyNoise.Stats",
outputs_key_field="query",
outputs=remove_empty_elements(response),
readable_output=human_readable,
)
@exception_handler
@logger
def riot_command(client: Client, args: Dict) -> CommandResults:
"""
Returns information about IP whether it is harmful or not. RIOT (Rule It Out) means to inform the analyst about
the harmfulness of the IP. For the harmless IP, the value of Riot is "True" which in turn returns DNS and other
information about the IP. For the harmful IP, the value of Riot is "False".
:type client: ``Client``
:param client: client object
:type args: ``dict``
:param args: All command arguments, usually passed from ``demisto.args()``.
:return: A ``CommandResults`` object that is then passed to ``return_results``,
that contains the IP information.
:rtype: ``CommandResults``
"""
ip = args.get("ip", "")
response = client.riot(ip)
original_response = copy.deepcopy(response)
response = remove_empty_elements(response)
name = ""
if response.get("riot") is False or response.get("riot") == "false":
name = "GreyNoise: IP Not Found in RIOT"
hr = {
"IP": response.get("ip"),
"RIOT": response.get("riot"),
}
headers = ["IP", "RIOT"]
elif response.get("riot") is True or response.get("riot") == "true":
if response.get("logo_url", "") != "":
del response["logo_url"]
if response.get("trust_level") == "1":
response["trust_level"] = "1 - Reasonably Ignore"
elif response.get("trust_level") == "2":
response["trust_level"] = "2 - Commonly Seen"
name = "GreyNoise: IP Belongs to Common Business Service"
hr = {
"IP": f"[{response.get('ip')}](https://www.greynoise.io/viz/riot/{response.get('ip')})",
"Name": response.get("name"),
"Category": response.get("category"),
"Trust Level": response.get("trust_level"),
"Description": response.get("description"),
"Last Updated": response.get("last_updated"),
}
headers = RIOT_HEADERS
human_readable = tableToMarkdown(name=name, t=hr, headers=headers, removeNull=True)
return CommandResults(
outputs_prefix="GreyNoise.Riot",
outputs_key_field="ip",
outputs=response,
readable_output=human_readable,
raw_response=original_response,
)
@exception_handler
@logger
def context_command(client: Client, args: Dict) -> CommandResults:
"""
Returns information about IP whether it is harmful or not. RIOT (Rule It Out) means to inform the analyst about
the harmfulness of the IP. For the harmless IP, the value of Riot is "True" which in turn returns DNS and other
information about the IP. For the harmful IP, the value of Riot is "False".
:type client: ``Client``
:param client: client object
:type args: ``dict``
:param args: All command arguments, usually passed from ``demisto.args()``.
:return: A ``CommandResults`` object that is then passed to ``return_results``,
that contains the IP information.
:rtype: ``CommandResults``
"""
ip = args.get("ip", "")
response = client.ip(ip)
if not isinstance(response, dict):
raise DemistoException(EXCEPTION_MESSAGES["INVALID_RESPONSE"].format(response))
original_response = copy.deepcopy(response)
tmp_response = get_ip_context_data([response])
response = remove_empty_elements(response)
response["address"] = response["ip"]
del response["ip"]
dbot_score_int, dbot_score_string = get_ip_reputation_score(response.get("classification"))
if response["seen"]:
human_readable = f"### IP: {ip} found with Noise Reputation: {dbot_score_string}\n"
headers = IP_CONTEXT_HEADERS
else:
human_readable = f"### IP: {ip} No Mass-Internet Scanning Noise Found\n"
tmp_response = [{"IP": response.get("address"), "Seen": response.get("seen")}]
headers = ["IP", "Seen"]
human_readable += tableToMarkdown(
name="GreyNoise Context IP Lookup", t=tmp_response, headers=headers, removeNull=True
)
try:
response_quick: Any = ip_quick_check_command(client, {"ip": ip})
malicious_description = response_quick.outputs[0].get("code_value")
except Exception:
malicious_description = ""
dbot_score = Common.DBotScore(
indicator=response.get("address"),
indicator_type=DBotScoreType.IP,
score=dbot_score_int,
integration_name="GreyNoise",
malicious_description=malicious_description,
)
city = response.get("metadata", {}).get("city", "")
region = response.get("metadata", {}).get("region", "")
country_code = response.get("metadata", {}).get("country_code", "")
geo_description = (
f"City: {city}, Region: {region}, Country Code: {country_code}" if (city or region or country_code) else ""
)
ip_standard_context = Common.IP(
ip=response.get("address"),
asn=response.get("metadata", {}).get("asn"),
hostname=response.get("actor"),
geo_country=response.get("metadata", {}).get("country"),
geo_description=geo_description,
dbot_score=dbot_score,
)
return CommandResults(
readable_output=human_readable,
outputs_prefix="GreyNoise.IP",
outputs_key_field="address",
outputs=response,
indicator=ip_standard_context,
raw_response=original_response,
)
""" MAIN FUNCTION """
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
# get pack version
if is_demisto_version_ge("6.1.0"):
response = demisto.internalHttpRequest("GET", "/contentpacks/metadata/installed")
packs = json.loads(response["body"])
else:
packs = []
pack_version = "1.1.2"
for pack in packs:
if pack["name"] == "GreyNoise":
pack_version = pack["currentVersion"]
api_key = demisto.params().get("apikey")
proxy = demisto.params().get("proxy", False)
demisto.debug(f"Command being called is {demisto.command()}")
try:
client = Client(
api_key=api_key,
api_server=API_SERVER,
timeout=TIMEOUT,
proxy=handle_proxy("proxy", proxy).get("https", ""),
use_cache=False,
integration_name=f"xsoar-integration-v{pack_version}",
)
if demisto.command() == "test-module":
# This is the call made when pressing the integration Test button.
result: Any = test_module(client)
return_results(result)
elif demisto.command() == "greynoise-ip-quick-check":
result = ip_quick_check_command(client, demisto.args())
return_results(result)
elif demisto.command() == "ip":
result = ip_reputation_command(client, demisto.args())
return_results(result)
elif demisto.command() == "greynoise-stats":
result = stats_command(client, demisto.args())
return_results(result)
elif demisto.command() == "greynoise-query":
result = query_command(client, demisto.args())
return_results(result)
elif demisto.command() == "greynoise-riot":
result = riot_command(client, demisto.args())
return_results(result)
elif demisto.command() == "greynoise-context":
result = context_command(client, demisto.args())
return_results(result)
# Log exceptions and return errors
except DemistoException as err:
return_error(str(err))
except Exception as err:
demisto.error(traceback.format_exc()) # print the traceback
return_error(
EXCEPTION_MESSAGES["COMMAND_FAIL"].format(demisto.command(), str(err))
)
""" ENTRY POINT """
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
| [
"noreply@github.com"
] | adambaumeister.noreply@github.com |
c361fd4c9c62df1c52bbd066ad265b76e4f5d3bd | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/exp-big-1450.py | 4af975f9d49d0f7c1af22e4f2dc895b6dd87fb17 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,180 | py | # Compute x**y
def exp(x: int, y: int) -> int:
a: int = 0
a2: int = 0
a3: int = 0
a4: int = 0
a5: int = 0
def f(i: int) -> int:
nonlocal a
nonlocal a2
nonlocal a3
nonlocal a4
nonlocal a5
def geta() -> int:
return a
if i <= 0:
return geta()
else:
a = a * x
a2 = a * x
a3 = a * x
a4 = a * x
a5 = a * x
return f(i-1)
a = 1
a2 = 1
a3 = 1
a4 = 1
a5 = 1
return f(y)
def exp2(x: int, y: int, x2: int, y2: int) -> int:
a: int = 0
a2: int = 0
a3: int = 0
a4: int = 0
a5: int = 0
def f(i: int) -> int:
nonlocal a
nonlocal a2
nonlocal a3
nonlocal a4
nonlocal a5
def geta() -> int:
return a
if i <= 0:
return geta()
else:
a = a * x
a2 = a * x
a3 = a * x
a4 = a * x
a5 = a * x
return f(i-1)
a = 1
a2 = 1
a3 = 1
a4 = 1
a5 = 1
return f(y)
def exp3(x: int, y: int, x2: int, y2: int, x3: int, y3: int) -> int:
a: int = 0
a2: int = 0
a3: int = 0
a4: int = 0
a5: int = 0
def f(i: int) -> int:
nonlocal a
nonlocal a2
nonlocal a3
nonlocal a4
nonlocal a5
def geta() -> int:
return a
if i <= 0:
return geta()
else:
a = a * x
a2 = a * x
a3 = a * x
a4 = a * x
a5 = a * x
return f(i-1)
a = 1
a2 = 1
a3 = 1
a4 = 1
a5 = 1
return f(y)
def exp4(x: int, y: int, x2: int, y2: int, x3: int, y3: int, x4: int, y4: int) -> int:
a: int = 0
a2: int = 0
a3: int = 0
a4: int = 0
a5: int = 0
def f(i: int) -> int:
nonlocal a
nonlocal a2
nonlocal a3
nonlocal a4
nonlocal a5
def geta() -> int:
return a
if i <= 0:
return geta()
else:
a = a * x
a2 = a * x
a3 = a * x
a4 = a * x
a5 = a * x
return f(i-1)
a = 1
a2 = 1
a3 = 1
a4 = 1
a5 = 1
return f(y)
def exp5(x: int, y: int, x2: int, y2: int, x3: int, y3: int, x4: int, y4: int, x5: int, y5: int) -> int:
a: int = 0
a2: int = 0
a3: int = 0
a4: int = 0
a5: int = 0
def f(i: int) -> int:
nonlocal a
nonlocal a2
nonlocal a3
nonlocal a4
nonlocal a5
def geta() -> int:
return a
if i <= 0:
return geta()
else:
a = a * x
a2 = a * x
a3 = a * x
a4 = a * x
a5 = a * x
return f(i-1)
a = 1
a2 = 1
a3 = 1
a4 = 1
a5 = 1
return f(y)
# Input parameter
n:int = 42
n2:int = 42
n3:int = 42
n4:int = 42
n5:int = 42
# Run [0, n]
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
while i <= n:
print(exp(2, i % 31))
$Var = i + 1 | [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.