blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
090d7e24217c1a131ee19c720ec5adff441bb282 | 2b8f1b067a6602a6520e9846a2df8b83a359623a | /BOJ/BaaarkingDog/0x1D_다익스트라 알고리즘/17835.py | 048443f9ddf4f5ef97defa6cf7789b1c7040321c | [] | no_license | ymink716/PS | 3f9df821a1d4db110cd9d56b09b4c1d756951dd8 | e997ecf5a3bec1d840486b8d90b934ae1cbafe94 | refs/heads/master | 2023-08-18T18:21:45.416083 | 2023-08-16T07:26:18 | 2023-08-16T07:26:18 | 218,685,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,378 | py | # 면접보는 승범이네
# https://www.acmicpc.net/problem/17835
import heapq
import sys
input = sys.stdin.readline
n, m, k = map(int, input().split())
graph = [[] for _ in range(n + 1)]
# 도시 연결 정보를 역방향으로 지정
for _ in range(m):
u, v, c = map(int, input().split())
graph[v].append((u, c))
# 면접장 리스트
targets = list(map(int, input().split()))
def dijkstra():
q = []
# 힙큐에 면접장들을 모두 넣어줌
for t in targets:
heapq.heappush(q, (0, t))
distance[t] = 0
while q:
dist, now = heapq.heappop(q) # 가장 최단 거리가 짧은 도시 꺼내
# 해당 도시의 최단 거리 정보가 이미 갱신되어 현재 비용보다 적다면 넘어감
if distance[now] < dist:
continue
# 현재 도시와 연결된 도시들을 확인
for i in graph[now]:
cost = dist + i[1]
# 현재 도시를 거쳐 다른 도시로 이동하는 거리가 더 짧은 경우
if cost < distance[i[0]]:
distance[i[0]] = cost # 최단 거리 갱신
heapq.heappush(q, (cost, i[0])) # 힙큐에 넣어줌
distance = [int(1e11)] * (n + 1)
city, dist = 0, 0
dijkstra()
for i in range(1, n + 1):
if dist < distance[i]:
city, dist = i, distance[i]
print(city)
print(dist)
| [
"ymink716@gmail.com"
] | ymink716@gmail.com |
3a3288f366b89fd77fc0f90506a4ed22a73e68bf | 9d0dcacf6878a3451a01083ae0b87521e3bd436d | /fMRI/fx/multiconds/SST/multiconds_posterror_with_pss.py | 41c471fbc33322f58b1075ad7a8037cf6044d7ad | [] | no_license | UOSAN/DEV_scripts | 244a87fb3dfc230c3fad19b2e4db5a0a2fab1ef4 | 513121b4fb84aaed45099415d7f9cf5e876b5d57 | refs/heads/master | 2023-08-31T11:15:54.357529 | 2023-08-28T16:44:56 | 2023-08-28T16:44:56 | 144,056,482 | 0 | 2 | null | 2019-11-26T23:48:57 | 2018-08-08T19:04:35 | MATLAB | UTF-8 | Python | false | false | 12,558 | py | import argparse
import json
import re
from os import PathLike
from pathlib import Path
from typing import Union, List
import numpy as np
from numpy.core.records import fromarrays # https://stackoverflow.com/questions/33212855/how-can-i-create-a-matlab-struct-array-from-scipy-io
import scipy.io
from zmq import PROTOCOL_ERROR_ZMTP_MALFORMED_COMMAND_MESSAGE
from multiconds import *
def create_pss_parametric_modulator_struct(pss_list:List,posterror_masks_dict : dict,posterror_conditions: dict):
posterror_names = posterror_conditions['names']
pmod_list = []
def create_pss_parametric_modulator_struct(mod_values,condition_name):
if np.any(mod_values is not None):
#condition_column = condition_mask*reaction_time
#I'm unsure we should be mean-centering by condition here rather than across all reaction times, but it seems probably the right thing to do?
#because we don't have a pss for every value
#hmmm, what were we doing before? in the "rt" version, there wasn't a post-error reaction time for every single thing
#I think we were doing reaction times of post-error values...which makes sense.
#need to think carefully, then:
#(1) how do we arrange the conditions so that we can do the parametric modulation, and
# we might be able to keep the conditions the same, and just set blank parametric mod trials to 0, following the mean-centering
# I think that makes sense?
# maybe not perfect but perhaps close enough
# actually, think that moving the trials into another condition within the task is not really going to help us explain the variance any better.
#(2) are we meaning to pick the failed stop trials, the correct stop trials, or the trials immediately following them?
# it is the trials immediately following them. need to make sure that's what we're doing.
condition_column = mod_values - np.nanmean([m for m in mod_values])
condition_column[np.isnan(condition_column)]=0
if condition_column is None:
return None
else:
#TO DO; SEE: https://stackoverflow.com/questions/19797822/creating-matlab-cell-arrays-in-python
#THINK THAT IS THE SOLUTION.
condition_column_npt = np.empty(1,dtype='O')
condition_column_npt[0] = condition_column
caps = re.findall("[A-Z]",condition_name)
abbreviation = "".join(caps).lower()
abbreviation = abbreviation[0].upper() + abbreviation[1:]
pmod_item = (
abbreviation + "RT",
condition_column_npt,
[1.0]
)
return(pmod_item)
#pmod_list = pmod_list + [pmod_item]
else:
#raise Exception("need to verify the next level is prepped to deal with some subjects having a missing regressor.")
warnings.warn(
"need to verify the next level is prepped to deal with some subjects having a missing regressor for condition " + condition_name + ".")
return(None)
pmod_list = pmod_list + [create_pss_parametric_modulator_struct(pss_list['CorrectGoFollowingCorrectStop'],'CorrectGoFollowingCorrectStop')]
pmod_list = pmod_list + [create_pss_parametric_modulator_struct(pss_list['CorrectGoFollowingFailedStop'], 'CorrectGoFollowingFailedStop')]
#this only works if these items are at the start of the list. if they aren't, we have a problem to deal with.
if 'CorrectGoFollowingCorrectStop' in posterror_names and posterror_names[0]!='CorrectGoFollowingCorrectStop':
raise Exception('CorrectGoFollowingCorrectStop not where expected.')
if 'CorrectGoFollowingFailedStop' in posterror_names and 'CorrectGoFollowingFailedStop' not in posterror_names[0:2]:
raise Exception('CorrectGoFollowingFailedStop not where expected.')
pmod_list = [pmod for pmod in pmod_list if pmod_list is not None]
if len(pmod_list)==0:
return({}) #return nothing because there doesn't appear to be any params to pass
pmod_array = np.array(
pmod_list,
dtype=([('name','object',(1,)),('param','O',(1,)),('poly','object',(1,))])
)
#return({'R':duration_array,'names':name_list})
#above design was passing duration as a [multiple] regressor, when in fact, we need it as a parametric modulator.
return({
'pmod': pmod_array,
'orth': np.array([0]*len(pmod_list),dtype='O')
})
##NEXT TO DO: HOOK THIS UP AND RUN IT; IF IT RUNS, WRITE OUTPUT TO MAT FILES; IF IT DOESN'T, WORK OUT HOW TO DEAL WITH THE MISSING REGRESSOR ISSUE.
def main(input_dir: str, bids_dir: str = None, file_limit=None,
use_rt_for_go_success_trials=True,
include_rt_pmod=True,
folder_id='posterror_conditions_w_pss',
output_folder=""):
print(input_dir)
files = list(Path(input_dir).glob('DEV*.mat'))
files.sort()
pattern = 'DEV(\\d{3})_run(\\d{1})_.*.mat'
# pattern = 'DEV(\\d{3})_(\\d{1})_SST1\\.mat'
# for testing
if file_limit is not None:
files = files[0:file_limit]
file_condition_index = {}
file_condition_index['base'] = {}
file_condition_index['posterror'] = {}
multicond_df_list = []
for f in files:
match = re.search(pattern, str(f.name))
if match:
subject_id, wave_number = match.groups()
print(f.name)
# Read data out of the .mat file
trial_number, go_no_go_condition, subject_response, reaction_time, trial_duration, trial_start_time, arrow_presented = \
read_data(f, use_rt_for_go_success_trials=use_rt_for_go_success_trials)
# Create masks for the various conditions
masks = create_masks(go_no_go_condition, subject_response)
print_mask_signature(masks)
# create masks for the post-error slowing
posterror_masks_dict = create_posterror_masks_from_masks(masks)
posterror_masks = list(posterror_masks_dict.values())
print_mask_signature(posterror_masks)
# Perform some quality checking on the numbers of responses (should be 256),
# the number of null trials (should be 128),
# go trials (should be 96), and no-go trials (should be 32)
if subject_response.size != COUNT_RESPONSE:
print(f'Wrong number of responses : (subject, expected, actual) '
f'({subject_id}, {COUNT_RESPONSE}, {subject_response.size})')
if np.count_nonzero(masks[0] + masks[-1]) != COUNT_GO:
print(f'Wrong number of go trials : (subject, run, expected, actual) '
f'({subject_id}, {wave_number}, {COUNT_GO}, {np.count_nonzero(masks[0] + masks[-1])})')
if np.count_nonzero(masks[1] + masks[2]) != COUNT_NO_GO:
print(f'Wrong number of no-go trials: (subject, expected, actual) '
f'({subject_id}, {COUNT_NO_GO}, {np.count_nonzero(masks[1] + masks[2])})')
if np.count_nonzero(masks[3]) != COUNT_NULL:
print(f'Wrong number of null trials : (subject, expected, actual) '
f'({subject_id}, {COUNT_NULL}, {np.count_nonzero(masks[3])})')
# preprocess subject responses for attention check
cleaned_subject_response = clean_response_data(subject_response, arrow_presented)
if bids_dir: # create MAT files storing behavioral information in bids format
print("creating bids events")
trial_type = np.empty_like(trial_number, dtype=np.object)
trial_type_names = ['correct-go', 'correct-stop', 'failed-stop', 'null', 'failed-go']
for mask, name in zip(masks, trial_type_names):
np.putmask(trial_type, mask, name)
write_bids_events(bids_dir, subject_id, wave_number,
np.stack(
(trial_start_time, trial_duration, trial_type, arrow_presented,
cleaned_subject_response), axis=1))
else:
print("creating betaseries and conditions")
# create onset files for SPM first-level analysis
# trials = create_trials(trial_number, trial_start_time, trial_duration, subject_response)
# Create paths and file names
# write_betaseries(output_folder, subject_id, wave_number, trials)
pss_set = get_pss(masks,posterror_masks_dict, reaction_time)
#identify each error event
#look up the previous go trial
#look up the next
# conditions = create_conditions(trial_start_time, trial_duration, masks)
# write_beta_data(output_folder, 'conditions', subject_id, wave_number, conditions)
trial_df_row = pd.DataFrame({
'subject_id':subject_id,
'wave_number':wave_number,
'trial_number': trial_number,
'go_no_go_condition': go_no_go_condition,
'subject_response': subject_response,
'reaction_time': reaction_time,
'trial_duration': trial_duration,
'trial_start_time': trial_start_time,
'arrow_presented': arrow_presented,
'pss': pss_set['by_trial']})
multicond_df_list = multicond_df_list + [trial_df_row]
posterror_conditions = create_posterror_conditions(
trial_start_time, trial_duration, posterror_masks)
if include_rt_pmod:
posterror_reaction_times = create_pss_parametric_modulator_struct(
pss_set['by_poststop_trial_type'],posterror_masks_dict,posterror_conditions)
posterror_conditions.update(posterror_reaction_times)
write_beta_data(output_folder, folder_id, subject_id, wave_number, posterror_conditions)
file_condition_index['posterror'][(subject_id,wave_number)] = posterror_conditions['names']
print("written data for subject " + str(subject_id))
else:
print("match not found for " + str(f.name))
print("creating a complete list of the data with durations and reaction times...")
multicond_df = pd.concat(multicond_df_list)
multicond_df.to_csv(output_folder + folder_id + "_multicond_out.csv")
print("creating a list of each file with the set of conditions within that file...")
save_varying_condition_list(output_folder = output_folder,
subfolder = folder_id,
file_condition_dict = file_condition_index['posterror'],
target_conditions = ['CorrectGoFollowingCorrectStop', 'CorrectGoFollowingFailedStop'])
if __name__ == "__main__":
description = 'Create multi-condition files for SST task in DEV study'
print(description)
parser = argparse.ArgumentParser(description=description,
add_help=True,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i', '--input', metavar='Input directory', action='store',
type=str, required=True,
help='absolute path to directory containing behavioral output from the SST task.',
dest='input_dir'
)
parser.add_argument('-o', '--output', metavar='directory for output', action='store',
type=str, required=False, default=None,
help='absolute or relative path for output',
dest='output_dir'
)
args = parser.parse_args()
#we don't do BIDs here because BIDs, by convention, uses RT for duration.
main(args.input_dir, bids_dir=None, use_rt_for_go_success_trials=False,
include_rt_pmod=False, folder_id='posterror_conditions', output_folder=args.output_dir)
main(args.input_dir, bids_dir=None, use_rt_for_go_success_trials=False,
include_rt_pmod=True, folder_id='posterror_conditions_w_pss', output_folder= args.output_dir)
| [
"benjsmith@gmail.com"
] | benjsmith@gmail.com |
f4a46ee857aeca963d206c48f92c2d37fd973b73 | 2c33ab38df0a0ffd617513640fb91fcc360d3bc3 | /Graph-Algorithm/HoughLine/hough_line.py | 13c1f5b2e48cf818ee420912512ed21d2f043fa8 | [
"Apache-2.0"
] | permissive | FDU-VTS/CVCode | 71a0dc82cd973aca55e0d97ea015d8dc66d2cc94 | e9576dfbdd1ae2ff986dadde3183eb6bc0380f76 | refs/heads/master | 2021-07-22T10:42:34.384289 | 2020-04-24T09:48:43 | 2020-04-24T09:48:43 | 149,054,139 | 33 | 10 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | import matplotlib.pyplot as plt
import skimage.io
import skimage.color
import skimage.morphology
import skimage.transform
import skimage.feature
import copy
original_img = skimage.io.imread("cars.png")
img = copy.deepcopy(original_img)
h, w, _ = img.shape
for i in range(h):
for j in range(w):
r, g, b, a = img[i, j]
if r < 180 or g < 180 or b < 180:
img[i, j] = [0, 0, 0, 255]
img = skimage.color.rgb2gray(skimage.color.rgba2rgb(img))
img = skimage.morphology.dilation(img)
img = skimage.feature.canny(img)
lines = skimage.transform.probabilistic_hough_line(img)
for line in lines:
p0, p1 = line
plt.plot((p0[0], p1[0]), (p0[1], p1[1]), color='red')
plt.imshow(original_img)
plt.show()
| [
"529768926@qq.com"
] | 529768926@qq.com |
4e12442854c79ceaa4258e2f2b486a52b1219285 | 2acd5555380f6dffd7a25c1ffcceacdb83ad1b41 | /chapter_2/loadbalancer/scrap/main.py | 2a47814565f2fb9ddd00687b2a47ecdd353f3cf7 | [
"Apache-2.0"
] | permissive | minsung8/monitering_exercise | 1d2f37ac4c70591195640b1f79625686e66b71d6 | ecfaba49d7406a282d4db94b02febd83dfc03e51 | refs/heads/main | 2023-09-01T17:18:30.230581 | 2021-10-27T13:35:56 | 2021-10-27T13:35:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,253 | py | from typing import Optional
from fastapi import FastAPI, Request, Response
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from datetime import datetime
import sys
import httpx
from bs4 import BeautifulSoup
import urllib.parse
from exceptions import UnicornException
from settings import Settings
from log import init_log
from cors import init_cors
from instrumentator import init_instrumentator
from config import Config
app = FastAPI()
my_settings = Settings()
conf = Config(my_settings.CONFIG_PATH)
init_log(app, conf.section("log")["path"])
init_cors(app)
init_instrumentator(app)
@app.exception_handler(UnicornException)
async def unicorn_exception_handler(request: Request, exc: UnicornException):
import traceback
traceback.print_exc(file=sys.stderr)
return JSONResponse(
status_code=exc.status,
content={"code": exc.code, "message": exc.message},
)
async def call_api(url: str):
async with httpx.AsyncClient() as client:
r = await client.get(url)
return r.text
def parse_opengraph(body: str):
soup = BeautifulSoup(body, 'html.parser')
title = soup.find("meta", {"property":"og:title"})
url = soup.find("meta", {"property":"og:url"})
og_type = soup.find("meta", {"property":"og:type"})
image = soup.find("meta", {"property":"og:image"})
description = soup.find("meta", {"property":"og:description"})
author = soup.find("meta", {"property":"og:article:author"})
resp = {}
scrap = {}
scrap["title"] = title["content"] if title else None
scrap["url"] = url["content"] if url else None
scrap["type"] = og_type["content"] if og_type else None
scrap["image"] = image["content"] if image else None
scrap["description"] = description["content"] if description else None
scrap["author"] = author["content"] if author else None
resp["scrap"] = scrap
return resp
@app.get("/api/v1/scrap")
async def scrap(url: str):
try:
url = urllib.parse.unquote(url)
body = await call_api(url)
return parse_opengraph(body)
except Exception as e:
raise UnicornException(status=400, code=-20000, message=str(e))
| [
"charsyam@naver.com"
] | charsyam@naver.com |
bb6f6b77013fccba9b14917b054ff3287e723d46 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03139/s876271037.py | c33057c3fdc22c46568b4749567bddc0ebeaf856 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | n,a,b=map(int,input().split())
s=min(a,b)
t=max(a,b)
if s+t<=n:
u=0
else:
u=s+t-n
print(str(s)+' '+str(u)) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
e6e49fa0abbb83bea3b06e6e58562c59d83cd3e7 | 3097d8735287c8e574c56831d0d49eeb4c624ad7 | /luxon/core/session/__init__.py | 6fa20d027bdd77b3a585aca7eb9142c649fc066a | [
"BSD-3-Clause"
] | permissive | TachyonicProject/luxon | 897b91e8d24bb41b66dd09c7df8933e8f8135753 | 5cfdc41a9983821b27f832c87e6424d90c0a8098 | refs/heads/development | 2020-03-12T16:49:09.961915 | 2020-01-08T11:58:12 | 2020-01-08T11:58:12 | 130,724,301 | 5 | 7 | NOASSERTION | 2020-01-08T11:58:40 | 2018-04-23T16:15:40 | Python | UTF-8 | Python | false | false | 356 | py | from luxon.core.session.session import Session
from luxon.core.session.sessionauth import TrackToken
from luxon.core.session.sessioncookie import SessionCookie as Cookie
from luxon.core.session.sessioncookie import TrackCookie
from luxon.core.session.sessionredis import SessionRedis as Redis
from luxon.core.session.sessionfile import SessionFile as File
| [
"christiaan.rademan@gmail.com"
] | christiaan.rademan@gmail.com |
bfbae7f8b7bc700d28b3310a6c1074b340bb4859 | 22b30b51d6eabbcc9c317c3825f6bcb00f947b56 | /model/mysite1/app1/migrations/0001_initial.py | 7d7ae241763fd77cf102d24472427517100b9689 | [] | no_license | SanjayPJ/doc-exercise-django | eeb04cdcdee2618e65d46c188cb7ffe7bce11704 | 71857c82709600479299b726560d526a74050695 | refs/heads/master | 2020-03-28T02:04:28.097576 | 2018-09-16T06:50:08 | 2018-09-16T06:50:08 | 147,545,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | # Generated by Django 2.1 on 2018-09-16 05:27
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.TextField(blank=True, default=True)),
('members', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"sanjaypjayan2000@gmail.com"
] | sanjaypjayan2000@gmail.com |
7cbfb9269c7e422b591c35e10f80e558cdc00437 | 4e353bf7035eec30e5ad861e119b03c5cafc762d | /QtGui/QLCDNumber.py | ff1ede9cdeaafbfea7abf476b9b7fc51262978ba | [] | no_license | daym/PyQt4-Stubs | fb79f54d5c9a7fdb42e5f2506d11aa1181f3b7d5 | 57d880c0d453641e31e1e846be4087865fe793a9 | refs/heads/master | 2022-02-11T16:47:31.128023 | 2017-10-06T15:32:21 | 2017-10-06T15:32:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,196 | py | # encoding: utf-8
# module PyQt4.QtGui
# from C:\Python27\lib\site-packages\PyQt4\QtGui.pyd
# by generator 1.145
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
from QFrame import QFrame
class QLCDNumber(QFrame):
"""
QLCDNumber(QWidget parent=None)
QLCDNumber(int, QWidget parent=None)
"""
def actionEvent(self, *args, **kwargs): # real signature unknown
pass
def changeEvent(self, *args, **kwargs): # real signature unknown
pass
def checkOverflow(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QLCDNumber.checkOverflow(float) -> bool
QLCDNumber.checkOverflow(int) -> bool
"""
return False
def childEvent(self, *args, **kwargs): # real signature unknown
pass
def closeEvent(self, *args, **kwargs): # real signature unknown
pass
def connectNotify(self, *args, **kwargs): # real signature unknown
pass
def contextMenuEvent(self, *args, **kwargs): # real signature unknown
pass
def create(self, *args, **kwargs): # real signature unknown
pass
def customEvent(self, *args, **kwargs): # real signature unknown
pass
def destroy(self, *args, **kwargs): # real signature unknown
pass
def digitCount(self): # real signature unknown; restored from __doc__
""" QLCDNumber.digitCount() -> int """
return 0
def disconnectNotify(self, *args, **kwargs): # real signature unknown
pass
def display(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QLCDNumber.display(QString)
QLCDNumber.display(float)
QLCDNumber.display(int)
"""
pass
def dragEnterEvent(self, *args, **kwargs): # real signature unknown
pass
def dragLeaveEvent(self, *args, **kwargs): # real signature unknown
pass
def dragMoveEvent(self, *args, **kwargs): # real signature unknown
pass
def drawFrame(self, *args, **kwargs): # real signature unknown
pass
def dropEvent(self, *args, **kwargs): # real signature unknown
pass
def enabledChange(self, *args, **kwargs): # real signature unknown
pass
def enterEvent(self, *args, **kwargs): # real signature unknown
pass
def event(self, QEvent): # real signature unknown; restored from __doc__
""" QLCDNumber.event(QEvent) -> bool """
return False
def focusInEvent(self, *args, **kwargs): # real signature unknown
pass
def focusNextChild(self, *args, **kwargs): # real signature unknown
pass
def focusNextPrevChild(self, *args, **kwargs): # real signature unknown
pass
def focusOutEvent(self, *args, **kwargs): # real signature unknown
pass
def focusPreviousChild(self, *args, **kwargs): # real signature unknown
pass
def fontChange(self, *args, **kwargs): # real signature unknown
pass
def hideEvent(self, *args, **kwargs): # real signature unknown
pass
def inputMethodEvent(self, *args, **kwargs): # real signature unknown
pass
def intValue(self): # real signature unknown; restored from __doc__
""" QLCDNumber.intValue() -> int """
return 0
def keyPressEvent(self, *args, **kwargs): # real signature unknown
pass
def keyReleaseEvent(self, *args, **kwargs): # real signature unknown
pass
def languageChange(self, *args, **kwargs): # real signature unknown
pass
def leaveEvent(self, *args, **kwargs): # real signature unknown
pass
def metric(self, *args, **kwargs): # real signature unknown
pass
def mode(self): # real signature unknown; restored from __doc__
""" QLCDNumber.mode() -> QLCDNumber.Mode """
pass
def mouseDoubleClickEvent(self, *args, **kwargs): # real signature unknown
pass
def mouseMoveEvent(self, *args, **kwargs): # real signature unknown
pass
def mousePressEvent(self, *args, **kwargs): # real signature unknown
pass
def mouseReleaseEvent(self, *args, **kwargs): # real signature unknown
pass
def moveEvent(self, *args, **kwargs): # real signature unknown
pass
def numDigits(self): # real signature unknown; restored from __doc__
""" QLCDNumber.numDigits() -> int """
return 0
def overflow(self, *args, **kwargs): # real signature unknown
""" QLCDNumber.overflow [signal] """
pass
def paintEvent(self, QPaintEvent): # real signature unknown; restored from __doc__
""" QLCDNumber.paintEvent(QPaintEvent) """
pass
def paletteChange(self, *args, **kwargs): # real signature unknown
pass
def receivers(self, *args, **kwargs): # real signature unknown
pass
def resetInputContext(self, *args, **kwargs): # real signature unknown
pass
def resizeEvent(self, *args, **kwargs): # real signature unknown
pass
def segmentStyle(self): # real signature unknown; restored from __doc__
""" QLCDNumber.segmentStyle() -> QLCDNumber.SegmentStyle """
pass
def sender(self, *args, **kwargs): # real signature unknown
pass
def senderSignalIndex(self, *args, **kwargs): # real signature unknown
pass
def setBinMode(self): # real signature unknown; restored from __doc__
""" QLCDNumber.setBinMode() """
pass
def setDecMode(self): # real signature unknown; restored from __doc__
""" QLCDNumber.setDecMode() """
pass
def setDigitCount(self, p_int): # real signature unknown; restored from __doc__
""" QLCDNumber.setDigitCount(int) """
pass
def setHexMode(self): # real signature unknown; restored from __doc__
""" QLCDNumber.setHexMode() """
pass
def setMode(self, QLCDNumber_Mode): # real signature unknown; restored from __doc__
""" QLCDNumber.setMode(QLCDNumber.Mode) """
pass
def setNumDigits(self, p_int): # real signature unknown; restored from __doc__
""" QLCDNumber.setNumDigits(int) """
pass
def setOctMode(self): # real signature unknown; restored from __doc__
""" QLCDNumber.setOctMode() """
pass
def setSegmentStyle(self, QLCDNumber_SegmentStyle): # real signature unknown; restored from __doc__
""" QLCDNumber.setSegmentStyle(QLCDNumber.SegmentStyle) """
pass
def setSmallDecimalPoint(self, bool): # real signature unknown; restored from __doc__
""" QLCDNumber.setSmallDecimalPoint(bool) """
pass
def showEvent(self, *args, **kwargs): # real signature unknown
pass
def sizeHint(self): # real signature unknown; restored from __doc__
""" QLCDNumber.sizeHint() -> QSize """
pass
def smallDecimalPoint(self): # real signature unknown; restored from __doc__
""" QLCDNumber.smallDecimalPoint() -> bool """
return False
def tabletEvent(self, *args, **kwargs): # real signature unknown
pass
def timerEvent(self, *args, **kwargs): # real signature unknown
pass
def updateMicroFocus(self, *args, **kwargs): # real signature unknown
pass
def value(self): # real signature unknown; restored from __doc__
""" QLCDNumber.value() -> float """
return 0.0
def wheelEvent(self, *args, **kwargs): # real signature unknown
pass
def windowActivationChange(self, *args, **kwargs): # real signature unknown
pass
def winEvent(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
Bin = 3
Dec = 1
Filled = 1
Flat = 2
Hex = 0
Oct = 2
Outline = 0
| [
"thekewlstore@gmail.com"
] | thekewlstore@gmail.com |
8b8922533a39f13fe8ef9f9ed2fa1b7a6213aa3e | b51f277dfe339ea30dce10040eca40c20bd8a4dd | /src/weixin_v3/order_pay.py | d2095ef0ffd84e24cb351f2f2f67050bdb148f7e | [
"BSD-3-Clause"
] | permissive | jack139/fair | e08b3b48391d0cb8e72bbc47e7592c030f587f48 | fe0ff64f8edbd794c3fb951ab6af420054e9e585 | refs/heads/master | 2021-06-30T15:17:15.590764 | 2020-09-23T07:14:20 | 2020-09-23T07:14:20 | 160,322,019 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,685 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import web, json, time
from bson.objectid import ObjectId
from config import setting
import app_helper
db = setting.db_web
url = ('/wx/order_pay')
# 支付完成
class handler:
def POST(self):
web.header('Content-Type', 'application/json')
param = web.input(openid='', session_id='', order_id='', pay_type='', data='')
print param
if '' in (param.order_id, param.pay_type):
return json.dumps({'ret' : -2, 'msg' : '参数错误'})
if param.openid=='' and param.session_id=='':
return json.dumps({'ret' : -2, 'msg' : '参数错误1'})
# 同时支持openid和session_id
if param.openid!='':
uname = app_helper.check_openid(param.openid)
else:
uname = app_helper.wx_logged(param.session_id)
if uname:
db_user = db.app_user.find_one({'openid':uname['openid']},{'coupon':1, 'credit':1})
if db_user==None: # 不应该发生
return json.dumps({'ret' : -5, 'msg' : '未找到用户信息'})
# 支付操作:1,记录订单支付,2.改变订单状态,3.修改库存显示 !!!!!!
# 获得订单
db_order = db.order_app.find_one(
{'order_id' : param.order_id},
#{'status':1, 'cart':1, 'due':1, 'shop':1}
{'_id':0}
)
if db_order==None:
return json.dumps({'ret' : -3, 'msg' : '未找到订单!'})
# 支付宝和微信支付订单,已PAID说明提前收到异步通知
if db_order['status']=='PAID' and param.pay_type in ('ALIPAY','WXPAY'):
# 记录此次调用
db.order_app.update_one(
{
'order_id' : param.order_id,
},
{
'$set' : {
'pay_type' : param.pay_type,
'pay' : db_order['due'],
'paid2_time' : app_helper.time_str(),
'paid2_tick' : int(time.time()),
},
'$push' : { 'history' : (app_helper.time_str(), uname['openid'], '提交付款')},
}
)
return json.dumps({'ret' : 0, 'data' : {
'order_id' : param.order_id,
'due' : db_order['due'],
'paid' : db_order['due'],
'status' : '已支付'
}})
# 只能处理未支付订单
if db_order['status']!='DUE':
return json.dumps({'ret' : -3, 'msg' : '不是待付款订单!'})
# 微信支付未到账处理
if param.pay_type in ('ALIPAY', 'WXPAY'):
# 更新销货单信息,
r = db.order_app.find_one_and_update(
{
'order_id' : param.order_id,
'status' : 'DUE'
},
{
'$set' : {
'status' : 'PREPAID',
'pay_type' : param.pay_type,
'pay' : db_order['due'],
'paid2_time' : app_helper.time_str(),
'paid2_tick' : int(time.time()),
'pay_data' : param.data,
},
'$push' : { 'history' : (app_helper.time_str(), uname['openid'], '提交付款')},
},
{'status':1}
)
# 如果不是DUE,说明已收到异步通知
if r==None:
db.order_app.update_one(
{
'order_id' : param.order_id,
},
{
'$set' : {
'pay_type' : param.pay_type,
'pay' : db_order['due'],
'paid2_time' : app_helper.time_str(),
'paid2_tick' : int(time.time()),
},
'$push' : { 'history' : (app_helper.time_str(), uname['openid'], '提交付款')},
}
)
# 返回
return json.dumps({'ret' : 0, 'data' : {
'order_id' : param.order_id,
'due' : db_order['due'],
'paid' : db_order['due'],
'status' : '已支付',
'alert' : False,
'message' : '测试信息,还未收到异步通知',
'url' : 'http://app-test.urfresh.cn'
}})
else:
return json.dumps({'ret' : -4, 'msg' : '无效的openid'})
| [
"gt@f8geek.com"
] | gt@f8geek.com |
9bc56a8bfcd997a780c81f0a1f8e6b3cae3daf90 | 8cfee59143ecd307fe7d7a27986c3346aa8ce60c | /Analysis/Excel_Chap03/6excel_value_matches_pattern.py | 9e2b0eb303f8c3f83bb8fb7b8c5fcb805d8cd35f | [] | no_license | kiminhan/Python | daafc1fde804f172ebfb1385ab9d6205c7a45970 | dc6af486aaf7d25dbe13bcee4e115207f37d4696 | refs/heads/master | 2020-03-08T19:18:10.173346 | 2018-09-06T06:11:40 | 2018-09-06T06:11:40 | 128,288,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,485 | py | #!/usr/bin/env python3
import re
import sys
from datetime import date
from xlrd import open_workbook, xldate_as_tuple
from xlwt import Workbook
input_file = sys.argv[1]
output_file = sys.argv[2]
output_workbook = Workbook()
output_worksheet = output_workbook.add_sheet('jan_2013_output')
pattern = re.compile(r'(?P<my_pattern>^J.*)')
customer_name_column_index = 1
with open_workbook(input_file) as workbook:
worksheet = workbook.sheet_by_name('january_2013')
data = []
header = worksheet.row_values(0)
data.append(header)
for row_index in range(1,worksheet.nrows):
row_list = []
if pattern.search(worksheet.cell_value(row_index, customer_name_column_index)):
for column_index in range(worksheet.ncols):
cell_value = worksheet.cell_value(row_index, column_index)
cell_type = worksheet.cell_type(row_index, column_index)
if cell_type == 3:
date_cell = xldate_as_tuple(cell_value, workbook.datemode)
date_cell = date(*date_cell[0:3]).strftime('%m/%d/%Y')
row_list.append(date_cell)
else:
row_list.append(cell_value)
if row_list:
data.append(row_list)
for list_index, output_list in enumerate(data):
for element_index, element in enumerate(output_list):
output_worksheet.write(list_index, element_index, element)
output_workbook.save(output_file) | [
"rladlsgks4@naver.com"
] | rladlsgks4@naver.com |
6f5861a2b4e71d6f756f74ebd5b666e58c675b68 | f0987e17aea6668158cd334c1fbacfe6286d3c77 | /NITA/lib/jnpr/toby/tmp/RLI/RLI-27K/RLI-27608/Apps/cat_MS_DST_MAC.py | 709fe11253a24b5d97cd1f7de074bcb335c3b0c8 | [] | no_license | fengyun4623/file | 00bf21f952ea3f95ffc9fe18448b244b26b7fadb | 3966c63d48557b0b94303896eed7a767593a4832 | refs/heads/master | 2023-04-02T05:01:25.066052 | 2020-07-29T16:15:31 | 2020-07-29T16:15:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61,474 | py | import os
import sys
import logging
import time
import argparse
sys.path.append("/var/tmp")
# Importing API libraries
from grpc.beta import implementations
import authentication_service_pb2
from authentication_service_pb2 import *
import jnx_addr_pb2
from jnx_addr_pb2 import *
import firewall_service_pb2
from firewall_service_pb2 import *
import authentication_service_pb2
from authentication_service_pb2 import *
timestr = time.strftime("%Y%m%d_%H%M%S")
start_time = time.time()
log_file_name = 'cat_' + 'MS_DST_MAC' + '_' + timestr + '.log'
log_file = os.path.join('/var/tmp', log_file_name)
# Logging
fmt_str = "%(asctime)s:%(levelname)s:%(message)s" # check for more in logging module
logging.basicConfig(filename=log_file, filemode='w+', level=logging.INFO, format=fmt_str,datefmt="%b-%d-%Y %H:%M:%S")
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('-d','--device', help='Input host name',required=True)
parser.add_argument('-ifl','--iflname', help='Input interface name',required=True)
args = parser.parse_args()
# APP Config
device = args.device # DUT Name/IP here
APP_USER = 'regress' # username
APP_PASSWORD = 'MaRtInI' # Password
port = 9999 # GRPC port number
client_id = 'MS_DST_MAC' # App Id (Will be replaced by test name)
login_timeout = 100 # Timeout for Logincheck API
api_timeout = 10 # Timeout for all the API calls
# Pass/Fail flags and counters
calls_count = 0
pass_count = 0
fail_count = 0
pass_flag = 0
api_pass_flag = 0
api_pass_count = 0
api_fail_count = 0
def pause():
programPause = raw_input("Enter to continue...")
logger.info("Executing Python app")
pause()
def validate(result, api_name, api_request, count, category):
""" Function to validate the api response """
api_info = "--------------------------------------------\n\nExecuted API - {0}-({1})".format(api_name, count)
logger.info("{0}".format(api_info))
logger.info("\nREQUEST ===>\n{0}".format(api_request))
if not hasattr(result, '__iter__'):
response = str(result).replace('\\n', '\n')
response = str(response).replace('\\"', '\"')
logger.info("\nRESPONSE ===>\n{0}".format(response))
global pass_count, fail_count, pass_flag, calls_count
calls_count += 1
log_str = "API - {0}-({1}) ".format(api_name, count)
status = 0
if category == 'negative':
test_condition = 'result.status is not status'
else:
test_condition = 'result.status is status'
if hasattr(result, '__iter__'):
logger.info('\nRESPONSE (Streaming API) ===>\n')
fail_flag = 0
for i in result:
if i :
response = str(i).replace('\\n', '\n')
response = str(response).replace('\\"', '\"')
logger.info("\n{0}".format(response))
if i.status is not status and category != 'negative':
fail_flag = 1
if fail_flag is 0:
pass_count += 1
logger.info("\nRESULT ===>\n{0} -> PASS\n".format(log_str))
else:
fail_count += 1
logger.info("\nRESULT ===>\n{0} -> FAIL\n".format(log_str))
pass_flag = 1
else:
if eval(test_condition):
pass_count += 1
logger.info("\nRESULT ===>\n{0} -> PASS\n".format(log_str))
else:
fail_count += 1
logger.info("\nRESULT ===>\n{0} -> FAIL\n".format(log_str))
pass_flag = 1
try:
# Channel Creation and Authentication
channel = implementations.insecure_channel(host=device, port=port)
stub = authentication_service_pb2.beta_create_Login_stub(channel)
login_response = stub.LoginCheck(authentication_service_pb2.LoginRequest(user_name=APP_USER, password=APP_PASSWORD, client_id=client_id), login_timeout)
# Service stub creation
AclService_stub = beta_create_AclService_stub(channel)
# All the valid combinations
logger.info('All the valid combinations')
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 1, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 1, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 1, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 1, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 2, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 2, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 2, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 2, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 3, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 3, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 3, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 3, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 4, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 4, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 4, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 4, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 5, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 5, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 5, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 5, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 6, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 6, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 6, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 6, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 7, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 7, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 7, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 7, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 8, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 8, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 8, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_accept=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 8, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 9, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 9, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 9, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 9, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 10, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 10, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 10, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=0, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 10, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 11, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 11, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 11, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 11, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 12, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 12, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 12, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=24, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 12, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 13, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 13, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 13, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 13, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 14, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 14, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 14, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=32, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 14, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 15, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 15, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 15, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="aa:00:00:00:00:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 15, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListAdd(api_request, api_timeout)
print result
validate(result, 'AccessListAdd', api_request, 16, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindAdd(api_request, api_timeout)
print result
validate(result, 'AccessListBindAdd', api_request, 16, 'valid')
pause()
api_request = AccessListObjBind(bind_direction=1, bind_family=5, obj_type=1, bind_object=AccessListBindObjPoint(intf=args.iflname + ".0"), acl=AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1))
print api_request
result = AclService_stub.AccessListBindDelete(api_request, api_timeout)
print result
validate(result, 'AccessListBindDelete', api_request, 16, 'valid')
pause()
api_request = AccessList(ace_list=[AclEntry(mservice_entry=AclMultiServiceEntry(matches=AclEntryMatchMultiService(match_dst_macs=[AclMatchMacAddress(addr_len=48, match_op=1, addr=MacAddress(addr_string="a:0:0:0:0:11"))]), ace_name="t1", adjacency=AclAdjacency(type=1), actions=AclEntryMultiServiceAction(actions_nt=AclEntryMultiServiceNonTerminatingAction(action_next_term=1, action_syslog=1, action_sample=1, action_count=AclActionCounter(counter_name="C1"), action_log=1), action_t=AclEntryMultiServiceTerminatingAction(action_discard=1)), ace_op=1))], acl_family=5, acl_flag=0, acl_name="MS1", acl_type=1)
print api_request
result = AclService_stub.AccessListDelete(api_request, api_timeout)
print result
validate(result, 'AccessListDelete', api_request, 16, 'valid')
pause()
logger.info("--------------------------------------------\nAPI TEST SUMMARY : \n")
logger.info("TOTAL NUMBER OF TESTCASES EXECUTED : {0}\n".format(calls_count))
logger.info("TOTAL NUMBER OF TESTCASES PASSED : {0}\n".format(pass_count))
logger.info("TOTAL NUMBER OF TESTCASES FAILED : {0}\n".format(fail_count))
if pass_flag is 0:
logger.info("API TESTING ------PASS\n")
else:
logger.info("API TESTING ------FAIL\n")
duration = time.time() - start_time
logger.info("Exeution Duration(in seconds) = {}".format(duration))
logger.info("\nAPI TESTING COMPLETED\n")
except Exception as tx:
logger.info("Caught Exception {0}\nPlease check the API calls".format(tx))
pass
while True:
import signal
os.kill(os.getpid(), signal.SIGTERM)
| [
"srigupta@juniper.net"
] | srigupta@juniper.net |
6ff2697e438b56f55cf9199bf14522a507e9629e | 5bca2d5a7615d2783fae7a7569d57a9a9eb3d604 | /migrations/versions/73964d6faffe_tables.py | 56f6959a2501085e0b2d1356b2a6a39945db123f | [] | no_license | eduardolujan/product_hub | 9ff3fbf11b4703993c1efb2a6202ed3b1c446cda | 0bfe0059ab0d59243794b03f70ceffe3a1a263be | refs/heads/main | 2023-03-01T15:58:05.014636 | 2021-02-03T03:04:31 | 2021-02-03T03:04:31 | 330,279,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,325 | py | """Tables
Revision ID: 73964d6faffe
Revises: 61f765248e88
Create Date: 2021-01-24 14:52:29.409679
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '73964d6faffe'
down_revision = '61f765248e88'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('store',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('address',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('street', sa.String(), nullable=False),
sa.Column('external_number', sa.String(), nullable=False),
sa.Column('internal_number', sa.String(), nullable=False),
sa.Column('city', sa.String(), nullable=False),
sa.Column('state', sa.String(), nullable=False),
sa.Column('country', sa.String(), nullable=False),
sa.Column('zipcode', sa.String(), nullable=False),
sa.Column('store_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.ForeignKeyConstraint(['store_id'], ['store.id'], initially='DEFERRED', deferrable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_address_store_id'), 'address', ['store_id'], unique=False)
op.create_table('product',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('price', sa.Float(), nullable=False),
sa.Column('sku', sa.String(), nullable=False),
sa.Column('store_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.ForeignKeyConstraint(['store_id'], ['store.id'], initially='DEFERRED', deferrable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_product_store_id'), 'product', ['store_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_product_store_id'), table_name='product')
op.drop_table('product')
op.drop_index(op.f('ix_address_store_id'), table_name='address')
op.drop_table('address')
op.drop_table('store')
# ### end Alembic commands ###
| [
"eduardo.lujan.p@gmail.com"
] | eduardo.lujan.p@gmail.com |
eba70dfb05f89a0f5c308fa5ba16f7afa78b5a5a | 3b547e9f54a6391eee26dacdfb8c182db51861fa | /eval_model.py | bc76e12a74d1a2e93cf3f2f9573fa1ffe782a59b | [
"MIT"
] | permissive | LevinJ/CNN_LSTM_CTC_Tensorflow | faedcdd2574725ddf507e68b3584c693f7b6f470 | b6ee12032757136bdf0bcc2b21ad1605f7296413 | refs/heads/master | 2021-05-06T05:23:39.857506 | 2018-03-06T05:32:37 | 2018-03-06T05:32:37 | 115,079,266 | 3 | 3 | null | 2017-12-22T05:17:41 | 2017-12-22T05:17:40 | null | UTF-8 | Python | false | false | 2,868 | py | import datetime
import logging
import os
import time
import cv2
import numpy as np
import tensorflow as tf
import cnn_lstm_otc_ocr
import utils
import helper
from preparedata import PrepareData
FLAGS = utils.FLAGS
import math
import argparse
log_dir = './log/evals'
class EvaluateModel(PrepareData):
def __init__(self):
PrepareData.__init__(self)
return
def parse_param(self):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--split_name', help='which split of dataset to use', default="eval")
parser.add_argument('-c', '--checkpoint_path', help='which checkpoint to use', default= "./checkpoint/")
args = parser.parse_args()
self.checkpoint_path = args.checkpoint_path
self.split_name = args.split_name
return
def eval_model(self):
model = cnn_lstm_otc_ocr.LSTMOCR('eval')
model.build_graph()
val_feeder, num_samples = self.input_batch_generator(self.split_name, is_training=False, batch_size = FLAGS.batch_size)
num_batches_per_epoch = int(math.ceil(num_samples / float(FLAGS.batch_size)))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
saver = tf.train.Saver(tf.global_variables(), max_to_keep=100)
eval_writer = tf.summary.FileWriter("{}/{}".format(log_dir, self.split_name), sess.graph)
if tf.gfile.IsDirectory(self.checkpoint_path):
checkpoint_file = tf.train.latest_checkpoint(self.checkpoint_path)
else:
checkpoint_file = self.checkpoint_path
print('Evaluating checkpoint_path={}, split={}, num_samples={}'.format(checkpoint_file, self.split_name, num_samples))
saver.restore(sess, checkpoint_file)
for i in range(num_batches_per_epoch):
inputs, labels, _ = next(val_feeder)
feed = {model.inputs: inputs,
model.labels: labels}
start = time.time()
_ = sess.run(model.names_to_updates, feed)
elapsed = time.time()
elapsed = elapsed - start
# print('{}/{}, {:.5f} seconds.'.format(i, num_batches_per_epoch, elapsed))
# print the decode result
summary_str, step = sess.run([model.merged_summay, model.global_step])
eval_writer.add_summary(summary_str, step)
return
def run(self):
self.parse_param()
self.eval_model()
return
if __name__ == "__main__":
obj= EvaluateModel()
obj.run()
| [
"jianzhirong@gmail.com"
] | jianzhirong@gmail.com |
2b62f5e2598681862e85828948f048642f3a7e82 | 1eaa6c2500868d0c60b5b2cd552cd671b635de32 | /Algorithm/sword of offer/46.求1+2+3+...+n.py | 08943c3680730a65a85553b7c90f42b642214139 | [] | no_license | jiangyuwei666/my-study-demo | f85f14a599c328addb5af09078d404f1139e0a82 | 9e2baef2f36f071f8903768adb8d5a5a8c1123f6 | refs/heads/master | 2022-04-30T16:47:24.715570 | 2022-03-24T09:08:43 | 2022-03-24T09:08:43 | 152,565,041 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | """
使用逻辑与(&&) 的短路特性,及必须满足前面,后面才会执行,但是python中好像实现不了
"""
| [
"739843128@qq.com"
] | 739843128@qq.com |
91c4c6728c8b549597426dee3069b2ca120916d3 | 25427cf7ac5ae9f8e5d421e953750a46fb2d1ebc | /ZSY/ZSY_BOM_MAN/View/migrations/0001_initial.py | 2f7a8e07d649c6ff8ba914dfe1d8fbe63115f633 | [] | no_license | povillechan/Python | d48e2e25c9961acef45162ca882b547e5b9d0b77 | 67e88d6d7bdbe49b0c5165d9b35f37dccf638877 | refs/heads/master | 2020-03-22T08:43:44.606336 | 2019-09-01T15:25:57 | 2019-09-01T15:25:57 | 139,786,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,055 | py | # Generated by Django 2.0.5 on 2018-06-04 09:14
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Bom',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bomName', models.CharField(max_length=128, verbose_name='Bom����')),
('bomVersion', models.CharField(max_length=20, verbose_name='Bom�汾')),
('bomContext', models.TextField(max_length=128, verbose_name='Bom����')),
],
),
migrations.CreateModel(
name='Paper',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('paperName', models.CharField(max_length=128, verbose_name='ͼֽ����')),
('paperVersion', models.CharField(max_length=20, verbose_name='ͼֽ�汾')),
('paperDiscrib', models.CharField(max_length=128, verbose_name='ͼֽ����')),
('paperAddr', models.TextField(max_length=256, verbose_name='ͼֽ��ַ')),
],
),
migrations.CreateModel(
name='Product2Bom',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('productName', models.CharField(max_length=128, verbose_name='��Ʒ����')),
('bomName', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='View.Bom')),
],
),
migrations.AlterUniqueTogether(
name='paper',
unique_together={('paperName', 'paperVersion')},
),
migrations.AlterUniqueTogether(
name='bom',
unique_together={('bomName', 'bomVersion')},
),
]
| [
"poville@yeah.net"
] | poville@yeah.net |
9cd579fd23b8680d0118adf350f93b92447d0abd | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_277/ch5_2020_06_16_02_12_47_235716.py | a63e2b56e618d1de2e9758e2e41fe85aaad0fd6d | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 76 | py | def libras_para_kg(libras):
gramas = libras/0.00220462
return gramas | [
"you@example.com"
] | you@example.com |
283035063cf583338fab7543c71cb3ddd00b28aa | 38d34dcc8b14b05063cef875734eb4866591d133 | /medicifyapp/migrations/0004_auto_20210109_2156.py | d391b88ff8708bfb59a0c699cbb75fad7565470d | [] | no_license | AbdurRahman111/Awsome_E-Commerce_Website | 02a4061a3842559d14dd38f7a00a61b403a04822 | 743b702bb3e87e38aaf63470b67398ee33a10358 | refs/heads/master | 2023-02-25T09:38:51.146166 | 2021-02-05T16:29:00 | 2021-02-05T16:29:00 | 336,326,420 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | # Generated by Django 3.1.4 on 2021-01-09 15:56
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('medicifyapp', '0003_auto_20210108_1902'),
]
operations = [
migrations.CreateModel(
name='bennar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=10000)),
('image', models.ImageField(upload_to='uploads/product_image')),
],
),
migrations.AlterField(
model_name='order',
name='order_date',
field=models.DateField(blank=True, default=datetime.datetime(2021, 1, 9, 21, 56, 8, 455385)),
),
migrations.AlterField(
model_name='posted_jobs',
name='post_date',
field=models.DateField(blank=True, default=datetime.datetime(2021, 1, 9, 21, 56, 8, 454387)),
),
]
| [
"mdabdurrahmanchowdhury1122@gmail.com"
] | mdabdurrahmanchowdhury1122@gmail.com |
0ce59ea95422f9ffdfe5d58e6884c9b3aea3c0f9 | 9eb6528606cf9dd011a3ce0c1605b111c9d50955 | /Tony_study/turtle-study/turtle-circle1.py | f1a99704add630dfe7d3e4578772855dcc292681 | [] | no_license | arch123A/luoye | 0ca9f787c7d5e9ba89d2ae602528e68d7d31a636 | ba8e902cefba2c3ccc58bc266cdf9a7eff03a458 | refs/heads/master | 2023-02-02T22:47:00.256065 | 2020-12-13T09:30:33 | 2020-12-13T09:30:33 | 104,022,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | import turtle as t
import time
pen=t.Pen()
pen.up()
# print(help(a.circle))
pen.begin_fill()
# pen.fillcolor" ")
pen.circle(10)
pen.end_fill()
time.sleep(100) | [
"arch@msn.cn"
] | arch@msn.cn |
f0fc3acb68bf9ae1fa73fd72e246b5b8f3401ad3 | f58fe5f505361c7b531ca9fde0f0cb3aa48b5652 | /012/main.py | e90f2216ee821ca20f546a62836094c8f107e9e9 | [] | no_license | elitan/euler | 2ac1891555c133cc788d22d8c040778936849cb3 | 0caa13f74f3b9fb574158f10973cc9dc2586293f | refs/heads/master | 2020-12-25T15:29:46.421623 | 2018-01-17T17:04:32 | 2018-01-17T17:04:32 | 14,663,106 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | #!/bin/py
"""
The sequence of triangle numbers is generated by adding the natural numbers. So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28. The first ten terms would be:
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
Let us list the factors of the first seven triangle numbers:
1: 1
3: 1,3
6: 1,2,3,6
10: 1,2,5,10
15: 1,3,5,15
21: 1,3,7,21
28: 1,2,4,7,14,28
We can see that 28 is the first triangle number to have over five divisors.
What is the value of the first triangle number to have over five hundred divisors?
"""
import math as m
def divisors(n):
dev = 0
for i in range(2, int(m.ceil(n**0.5))):
if n % i == 0:
dev += 1
dev = dev * 2 + 2
#if perfect square
#dev += 1
return dev
i = 1
n = 0
while divisors(n) < 500:
n += i
i += 1
print("Winner: %d" % (n)) | [
"johan@eliasson.me"
] | johan@eliasson.me |
8e5579fe07aaf1de1c48be9ae4db8fd0159327d7 | 5e538961d3b5889c30f81ccbc6d315e0c9c8312e | /apps/xsl/src/xsl/MiscSettingsTab.py | d95fbdabadb7e06b9f60427eb9838574910b2df2 | [] | no_license | WhiteSymmetry/lightlang | 3df11a8c3b6b73bebf9076d0cb70827b685d9af6 | 7510d5dd87fc988fe1b14718bb546daae5baebe6 | refs/heads/master | 2021-01-12T10:42:57.621749 | 2014-04-10T14:14:26 | 2014-04-10T14:14:26 | 72,652,022 | 1 | 0 | null | 2016-11-02T15:13:00 | 2016-11-02T15:12:59 | null | UTF-8 | Python | false | false | 4,635 | py | # -*- coding: utf8 -*-
#
# XSL - graphical interface for SL
# Copyright (C) 2007-2016 Devaev Maxim
#
# This file is part of XSL.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import Qt
import Utils
import Locale
import Settings
import LangsList
import IconsLoader
##### Public classes #####
class MiscSettingsTab(Qt.QWidget) :
def __init__(self, parent = None) :
Qt.QWidget.__init__(self, parent)
self.__main_layout = Qt.QGridLayout()
self.setLayout(self.__main_layout)
#####
self.__settings = Settings.Settings(self)
#####
self.__show_tray_icon_checkbox = Qt.QCheckBox(self)
self.__main_layout.addWidget(self.__show_tray_icon_checkbox, 0, 0, 1, 2)
self.__show_splash_checkbox = Qt.QCheckBox(self)
self.__main_layout.addWidget(self.__show_splash_checkbox, 1, 0, 1, 2)
self.__debug_mode_checkbox = Qt.QCheckBox(self)
self.__main_layout.addWidget(self.__debug_mode_checkbox, 2, 0, 1, 2)
self.__main_layout.setRowStretch(3, 1)
self.__force_main_lang_label = Qt.QLabel(self)
self.__main_layout.addWidget(self.__force_main_lang_label, 4, 0)
self.__force_main_lang_combobox = Qt.QComboBox(self)
self.__main_layout.addWidget(self.__force_main_lang_combobox, 4, 1)
#####
self.translateUi()
### Public ###
def requisites(self) :
return {
"icon" : IconsLoader.icon("configure"),
"title" : Qt.QT_TR_NOOP("Misc"),
}
###
def saveSettings(self) :
self.__settings.setValue("application/misc/show_tray_icon_flag", Qt.QVariant(self.__show_tray_icon_checkbox.isChecked()))
self.__settings.setValue("application/misc/show_splash_flag", Qt.QVariant(self.__show_splash_checkbox.isChecked()))
self.__settings.setValue("application/logger/debug_mode_flag", Qt.QVariant(self.__debug_mode_checkbox.isChecked()))
self.__settings.setValue("application/locale/force_main_lang",
self.__force_main_lang_combobox.itemData(self.__force_main_lang_combobox.currentIndex()).toString())
def loadSettings(self) :
self.__show_tray_icon_checkbox.setChecked(self.__settings.value("application/misc/show_tray_icon_flag", Qt.QVariant(True)).toBool())
self.__show_splash_checkbox.setChecked(self.__settings.value("application/misc/show_splash_flag", Qt.QVariant(True)).toBool())
self.__debug_mode_checkbox.setChecked(self.__settings.value("application/logger/debug_mode_flag").toBool())
###
force_main_lang = self.__settings.value("application/locale/force_main_lang").toString()
for count in xrange(self.__force_main_lang_combobox.count()) :
if ( self.__force_main_lang_combobox.itemData(count).toString() == force_main_lang and
not self.__force_main_lang_combobox.itemText(count).isEmpty() ) :
self.__force_main_lang_combobox.setCurrentIndex(count)
### Private ###
def translateUi(self) :
self.__show_tray_icon_checkbox.setText(tr("Show tray icon"))
self.__show_splash_checkbox.setText(tr("Show splash screen on startup"))
self.__force_main_lang_label.setText(tr("Force language:"))
self.__debug_mode_checkbox.setText(tr("Debug mode (write info to stderr)"))
###
last_index = self.__force_main_lang_combobox.currentIndex()
self.__force_main_lang_combobox.clear()
lang_codes_dict = LangsList.langCodes()
system_lang = Locale.Locale.systemLang()
self.__force_main_lang_combobox.addItem(IconsLoader.icon(Utils.joinPath("flags", system_lang)),
tr("By default (%1)").arg(LangsList.langName(system_lang, lang_codes_dict)), Qt.QVariant(""))
self.__force_main_lang_combobox.insertSeparator(1)
for langs_list_item in Locale.Locale.validLangs() :
self.__force_main_lang_combobox.addItem(IconsLoader.icon(Utils.joinPath("flags", langs_list_item)),
LangsList.langName(langs_list_item, lang_codes_dict), Qt.QVariant(langs_list_item))
self.__force_main_lang_combobox.setCurrentIndex(last_index)
### Handlers ###
def changeEvent(self, event) :
if event.type() == Qt.QEvent.LanguageChange :
self.translateUi()
else :
Qt.QWidget.changeEvent(self, event)
| [
"mdevaev@gmail.com"
] | mdevaev@gmail.com |
b3f1a24691f753afaad91eb52fb038da4b7fd51a | e5333b2e54f1adf2e5bc88a9a242234c5f15851a | /misoclib/com/liteeth/test/udp_tb.py | c7e04c8fd30ae94ea85af0dbf0ce195cda73726c | [
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | hoangt/misoc | 1aaf850c18bab5b18db1fcc788feb96afbbc464e | 6c13879fb605a1ee2bd5a3b35669e093f9a4267b | refs/heads/master | 2021-01-21T02:55:59.398987 | 2015-07-13T15:00:03 | 2015-07-13T15:25:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,834 | py | from migen.fhdl.std import *
from migen.bus import wishbone
from migen.bus.transactions import *
from migen.sim.generic import run_simulation
from misoclib.com.liteeth.common import *
from misoclib.com.liteeth.core import LiteEthUDPIPCore
from misoclib.com.liteeth.test.common import *
from misoclib.com.liteeth.test.model import phy, mac, arp, ip, udp
ip_address = 0x12345678
mac_address = 0x12345678abcd
class TB(Module):
def __init__(self, dw=8):
self.dw = dw
self.submodules.phy_model = phy.PHY(8, debug=False)
self.submodules.mac_model = mac.MAC(self.phy_model, debug=False, loopback=False)
self.submodules.arp_model = arp.ARP(self.mac_model, mac_address, ip_address, debug=False)
self.submodules.ip_model = ip.IP(self.mac_model, mac_address, ip_address, debug=False, loopback=False)
self.submodules.udp_model = udp.UDP(self.ip_model, ip_address, debug=False, loopback=True)
self.submodules.core = LiteEthUDPIPCore(self.phy_model, mac_address, ip_address, 100000)
udp_port = self.core.udp.crossbar.get_port(0x5678, dw)
self.submodules.streamer = PacketStreamer(eth_udp_user_description(dw))
self.submodules.logger = PacketLogger(eth_udp_user_description(dw))
self.comb += [
Record.connect(self.streamer.source, udp_port.sink),
udp_port.sink.ip_address.eq(0x12345678),
udp_port.sink.src_port.eq(0x1234),
udp_port.sink.dst_port.eq(0x5678),
udp_port.sink.length.eq(64//(dw//8)),
Record.connect(udp_port.source, self.logger.sink)
]
# use sys_clk for each clock_domain
self.clock_domains.cd_eth_rx = ClockDomain()
self.clock_domains.cd_eth_tx = ClockDomain()
self.comb += [
self.cd_eth_rx.clk.eq(ClockSignal()),
self.cd_eth_rx.rst.eq(ResetSignal()),
self.cd_eth_tx.clk.eq(ClockSignal()),
self.cd_eth_tx.rst.eq(ResetSignal()),
]
def gen_simulation(self, selfp):
selfp.cd_eth_rx.rst = 1
selfp.cd_eth_tx.rst = 1
yield
selfp.cd_eth_rx.rst = 0
selfp.cd_eth_tx.rst = 0
for i in range(100):
yield
while True:
packet = Packet([i for i in range(64//(self.dw//8))])
yield from self.streamer.send(packet)
yield from self.logger.receive()
# check results
s, l, e = check(packet, self.logger.packet)
print("shift " + str(s) + " / length " + str(l) + " / errors " + str(e))
if __name__ == "__main__":
run_simulation(TB(8), ncycles=2048, vcd_name="my.vcd", keep_files=True)
run_simulation(TB(16), ncycles=2048, vcd_name="my.vcd", keep_files=True)
run_simulation(TB(32), ncycles=2048, vcd_name="my.vcd", keep_files=True)
| [
"florent@enjoy-digital.fr"
] | florent@enjoy-digital.fr |
0789c8472b6c78a39a30f2bada458771bf9b1f90 | 38af786946e955269eba63342f6d74c569619a19 | /backend/manage.py | 36a3f5239240d87766bdc9a321928982b0885406 | [] | no_license | crowdbotics-apps/nfc-21501 | e3b005abe6450cb7243118b98fd3392c5557454d | 87e2410d6022a0fee907193cd75f50e1a960a29b | refs/heads/master | 2022-12-28T16:07:47.361438 | 2020-10-14T20:33:46 | 2020-10-14T20:33:46 | 304,111,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nfc_21501.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
ad01f8f72fabfb58743d2fd35c9720572beb238a | b4328ab3a6ece7d8f8568948781640c12f48053c | /accounts/admin.py | 79a78aa8dbe8463c995cc587989d4a0fd52c140b | [] | no_license | ochui/rockyshelf | 4de74bb2ecbe235371cafabc594c95d2a6169192 | eaa25f5867e53629e46629ca58c2815c0295a4a5 | refs/heads/master | 2020-08-01T03:25:33.250146 | 2019-09-26T17:55:42 | 2019-09-26T17:55:42 | 210,844,943 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 922 | py | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from .forms import CustomUserCreationForm, CustomUserChangeForm
from accounts.models import CustomUser
@admin.register(CustomUser)
class CustomUserAdmin(UserAdmin):
add_form = CustomUserCreationForm
form = CustomUserChangeForm
list_display = ['username', 'email', 'phone_number']
list_filter = ['date_joined', 'last_login', 'is_active', 'is_staff']
search_fields = ['username', 'email', 'phone_number']
fieldsets = UserAdmin.fieldsets + (
('KYC', {'fields': ('phone_number', 'gender', 'date_of_birth')}),
('Academic', {'fields': ('school', 'faculty', 'department')})
)
add_fieldsets = UserAdmin.add_fieldsets + (
('KYC', {'fields': ('phone_number', 'gender', 'date_of_birth')}),
('Academic', {'fields': ('school', 'faculty', 'department')})
)
model = CustomUser
| [
"ochuiprincewill411@gmail.com"
] | ochuiprincewill411@gmail.com |
85da7ef93e50c892890f2eb8a31704305672719a | 5dc77586e3e0f9de1f032fd2ca68494d8e58928f | /contrib/great_expectations_semantic_types_expectations/great_expectations_semantic_types_expectations/expectations/expect_column_values_to_be_valid_country.py | 3a598c9eeb4f921bde123e0ff136c16049bccb8e | [
"Apache-2.0"
] | permissive | great-expectations/great_expectations | dd7c22e6277d6b08bee3ff38a015e6e8cd434df6 | b0290e2fd2aa05aec6d7d8871b91cb4478e9501d | refs/heads/develop | 2023-09-04T09:30:26.395518 | 2023-09-02T00:00:13 | 2023-09-02T00:00:13 | 103,071,520 | 8,931 | 1,535 | Apache-2.0 | 2023-09-14T19:57:16 | 2017-09-11T00:18:46 | Python | UTF-8 | Python | false | false | 5,763 | py | from typing import Optional
import geonamescache
from great_expectations.core.expectation_configuration import ExpectationConfiguration
from great_expectations.execution_engine import PandasExecutionEngine
from great_expectations.expectations.expectation import ColumnMapExpectation
from great_expectations.expectations.metrics import (
ColumnMapMetricProvider,
column_condition_partial,
)
def is_valid_country(country: str):
geocache = geonamescache.GeonamesCache()
dict_of_countries = geocache.get_countries()
list_of_countries = [d for d in dict_of_countries.values()]
list_of_country_names = [item["name"] for item in list_of_countries]
if len(country) > 252:
return False
elif type(country) != str: # noqa: E721
return False
elif country in list_of_country_names:
return True
else:
return False
# This class defines a Metric to support your Expectation.
# For most ColumnMapExpectations, the main business logic for calculation will live in this class.
class ColumnValuesToBeValidCountry(ColumnMapMetricProvider):
# This is the id string that will be used to reference your metric.
condition_metric_name = "column_values.valid_country"
# This method implements the core logic for the PandasExecutionEngine
@column_condition_partial(engine=PandasExecutionEngine)
def _pandas(cls, column, **kwargs):
return column.apply(lambda x: is_valid_country(x))
# This method defines the business logic for evaluating your metric when using a SqlAlchemyExecutionEngine
# @column_condition_partial(engine=SqlAlchemyExecutionEngine)
# def _sqlalchemy(cls, column, _dialect, **kwargs):
# raise NotImplementedError
# This method defines the business logic for evaluating your metric when using a SparkDFExecutionEngine
# @column_condition_partial(engine=SparkDFExecutionEngine)
# def _spark(cls, column, **kwargs):
# raise NotImplementedError
# This class defines the Expectation itself
class ExpectColumnValuesToBeValidCountry(ColumnMapExpectation):
"""Expect values in this column to be valid country names.
See https://github.com/yaph/geonamescache for more information.
"""
# These examples will be shown in the public gallery.
# They will also be executed as unit tests for your Expectation.
examples = [
{
"data": {
"valid_countries": [
"Syria",
"Venezuela",
"Sao Tome and Principe",
"Tanzania",
"Uganda",
],
"invalid_countries": [
"",
"1234",
"anarchy",
"Turkey men I stan",
"Frenc",
],
},
"tests": [
{
"title": "basic_positive_test",
"exact_match_out": False,
"include_in_gallery": True,
"in": {"column": "valid_countries"},
"out": {"success": True},
},
{
"title": "basic_negative_test",
"exact_match_out": False,
"include_in_gallery": True,
"in": {"column": "invalid_countries"},
"out": {"success": False},
},
],
}
]
# This is the id string of the Metric used by this Expectation.
# For most Expectations, it will be the same as the `condition_metric_name` defined in your Metric class above.
map_metric = "column_values.valid_country"
# This is a list of parameter names that can affect whether the Expectation evaluates to True or False
success_keys = ("mostly",)
# This dictionary contains default values for any parameters that should have default values
default_kwarg_values = {}
def validate_configuration(
self, configuration: Optional[ExpectationConfiguration]
) -> None:
"""
Validates that a configuration has been set, and sets a configuration if it has yet to be set. Ensures that
necessary configuration arguments have been provided for the validation of the expectation.
Args:
configuration (OPTIONAL[ExpectationConfiguration]): \
An optional Expectation Configuration entry that will be used to configure the expectation
Returns:
None. Raises InvalidExpectationConfigurationError if the config is not validated successfully
"""
super().validate_configuration(configuration)
configuration = configuration or self.configuration
# # Check other things in configuration.kwargs and raise Exceptions if needed
# try:
# assert (
# ...
# ), "message"
# assert (
# ...
# ), "message"
# except AssertionError as e:
# raise InvalidExpectationConfigurationError(str(e))
# This object contains metadata for display in the public Gallery
library_metadata = {
"maturity": "experimental", # "experimental", "beta", or "production"
"tags": [
"hackathon",
"typed-entities",
], # Tags for this Expectation in the Gallery
"contributors": [ # Github handles for all contributors to this Expectation.
"@luismdiaz01",
"@derekma73", # Don't forget to add your github handle here!
],
"requirements": ["geonamescache"],
}
if __name__ == "__main__":
ExpectColumnValuesToBeValidCountry().print_diagnostic_checklist()
| [
"noreply@github.com"
] | great-expectations.noreply@github.com |
724c1d2105517d9d8760bd6be9c4e99ea6501d3c | 51a2d52711300adabde54dd57901d873b3308401 | /py/specter/test/specter_test_suite.py | 668477ba21e0d53ac6dc6e92852b713f98efa15c | [
"BSD-3-Clause"
] | permissive | desihub/specter | 571933df1923dcc8cbc6c67a118023357a6c7b4f | d9b24e363db03841a24d2727a3775fd7b459f6a2 | refs/heads/main | 2023-01-24T11:12:37.203688 | 2023-01-13T04:06:30 | 2023-01-13T04:06:30 | 6,079,897 | 6 | 8 | NOASSERTION | 2023-01-11T18:44:34 | 2012-10-04T18:35:52 | Jupyter Notebook | UTF-8 | Python | false | false | 1,001 | py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
specter.test.specter_test_suite
===============================
Used to initialize the unit test framework via ``python setup.py test``.
"""
#
from __future__ import absolute_import, division, print_function, unicode_literals
#
import unittest
#
#- This is factored out separately from runtests() so that it can be used by
#- python setup.py test
def specter_test_suite():
"""Returns unittest.TestSuite of specter tests"""
from os.path import dirname
specter_dir = dirname(dirname(__file__))
# print(specter_dir)
return unittest.defaultTestLoader.discover(specter_dir,
top_level_dir=dirname(specter_dir))
def runtests():
"""Run all tests in specter.test.test_*.
"""
#- Load all TestCase classes from specter/test/test_*.py
tests = specter_test_suite()
#- Run them
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == "__main__":
runtests()
| [
"benjamin.weaver@nyu.edu"
] | benjamin.weaver@nyu.edu |
1bb7ceb49897f7f73b38182c93e5cf9ae4dfbe56 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/abc095/B/4914893.py | 9f289d2cd87d2d83f00a6f0ec932caf15edcfcbf | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | N,X = map(int,input().split())
lists = []
for i in range(N):
m = int(input())
lists.append(m)
X -= sum(lists)
if X > 0:
amari = X // min(lists)
print(len(lists) + amari)
else:
print(len(lists)) | [
"kwnafi@yahoo.com"
] | kwnafi@yahoo.com |
cd4368d4e14f7ad7bd1ce40c5f6178a7aeae984f | 1c10937afbba2fd4a6c1d306603bc3b7f4900be6 | /aid1901/day3/demo7_surface.py | 9c281b372893452df740c322242eb08f741f85b2 | [] | no_license | zh-en520/- | 5366628ce9d819ed1d29e41e35c0996090df1085 | 1e4697b091a3b321adc3fa2c13192de8fc3686f7 | refs/heads/master | 2020-06-28T21:25:59.080990 | 2019-08-03T07:21:42 | 2019-08-03T07:21:42 | 200,345,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 476 | py | import numpy as np
import matplotlib.pyplot as mp
from mpl_toolkits.mplot3d import axes3d
n = 1000
x,y = np.meshgrid(np.linspace(-3,3,n),np.linspace(-3,3,n))
z = (1-x/2+x**5+y**3) * np.exp(-x**2-y**2)
mp.figure('3D Surface', facecolor='lightgray')
mp.tick_params(labelsize=10)
ax3d = mp.gca(projection='3d')
ax3d.set_xlabel('x',fontsize=12)
ax3d.set_ylabel('y',fontsize=12)
ax3d.set_zlabel('z',fontsize=12)
ax3d.plot_surface(x,y,z,rstride=30,cstride=30,cmap='jet')
mp.show() | [
"zh_en520@163.com"
] | zh_en520@163.com |
97b3e190da056fe8d5955fb3adf6837e5831c392 | 9d0195aa83cc594a8c61f334b90375961e62d4fe | /JTTest/SL7/CMSSW_10_2_15/src/dataRunA/nano2933.py | 0bcf83215ea92e8d552ea23ae15ce5a482981041 | [] | no_license | rsk146/CMS | 4e49592fc64f6438051544c5de18598db36ed985 | 5f8dab8c59ae556598b9747b52b88205fffc4dbe | refs/heads/master | 2022-12-01T03:57:12.126113 | 2020-08-04T03:29:27 | 2020-08-04T03:29:27 | 284,863,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,292 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: nanoAOD_jetToolbox_cff -s NANO --data --eventcontent NANOAOD --datatier NANOAOD --no_exec --conditions 102X_dataRun2_Sep2018Rereco_v1 --era Run2_2018,run2_nanoAOD_102Xv1 --customise_commands=process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False))) --customise JMEAnalysis/JetToolbox/nanoAOD_jetToolbox_cff.nanoJTB_customizeMC --filein /users/h2/rsk146/JTTest/SL7/CMSSW_10_6_12/src/ttbarCutTest/dataReprocessing/0004A5E9-9F18-6B42-B31D-4206406CE423.root --fileout file:jetToolbox_nano_datatest.root
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('NANO',eras.Run2_2018,eras.run2_nanoAOD_102Xv1)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff')
process.load('PhysicsTools.NanoAOD.nano_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:root://cms-xrd-global.cern.ch//store/data/Run2018A/EGamma/MINIAOD/17Sep2018-v2/60000/3883546D-5329-1549-9EA3-D864B127F166.root'),
secondaryFileNames = cms.untracked.vstring()
)
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('nanoAOD_jetToolbox_cff nevts:1'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.NANOAODoutput = cms.OutputModule("NanoAODOutputModule",
compressionAlgorithm = cms.untracked.string('LZMA'),
compressionLevel = cms.untracked.int32(9),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('NANOAOD'),
filterName = cms.untracked.string('')
),
fileName = cms.untracked.string('file:jetToolbox_nano_datatest2933.root'),
outputCommands = process.NANOAODEventContent.outputCommands
)
# Additional output definition
# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '102X_dataRun2_Sep2018Rereco_v1', '')
# Path and EndPath definitions
process.nanoAOD_step = cms.Path(process.nanoSequence)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.NANOAODoutput_step = cms.EndPath(process.NANOAODoutput)
# Schedule definition
process.schedule = cms.Schedule(process.nanoAOD_step,process.endjob_step,process.NANOAODoutput_step)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)
# customisation of the process.
# Automatic addition of the customisation function from PhysicsTools.NanoAOD.nano_cff
from PhysicsTools.NanoAOD.nano_cff import nanoAOD_customizeData
#call to customisation function nanoAOD_customizeData imported from PhysicsTools.NanoAOD.nano_cff
process = nanoAOD_customizeData(process)
# Automatic addition of the customisation function from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff import nanoJTB_customizeMC
#call to customisation function nanoJTB_customizeMC imported from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
process = nanoJTB_customizeMC(process)
# End of customisation functions
# Customisation from command line
process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False)))
# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion | [
"rsk146@scarletmail.rutgers.edu"
] | rsk146@scarletmail.rutgers.edu |
748d1d3408f65254fb393d5846e48d2f13a89830 | 7e86a9bd9ec1f82838d114bf71ad0f6d0f12152c | /venv/Lib/site-packages/stellar_sdk/xdr/manage_sell_offer_op.py | eeb133891af91ad48163ea97b2db429074ad9c17 | [
"MIT"
] | permissive | yunoUNo/fini | b39688e7203d61f031f2ae9686845b0beccd9b2a | a833bc64a3aaf94f7268ec6eac690aa68327dd96 | refs/heads/master | 2023-08-05T17:42:48.726825 | 2021-09-29T13:30:32 | 2021-09-29T13:30:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,002 | py | # This is an automatically generated file.
# DO NOT EDIT or your changes may be overwritten
import base64
from xdrlib import Packer, Unpacker
from .asset import Asset
from .int64 import Int64
from .price import Price
__all__ = ["ManageSellOfferOp"]
class ManageSellOfferOp:
"""
XDR Source Code
----------------------------------------------------------------
struct ManageSellOfferOp
{
Asset selling;
Asset buying;
int64 amount; // amount being sold. if set to 0, delete the offer
Price price; // price of thing being sold in terms of what you are buying
// 0=create a new offer, otherwise edit an existing offer
int64 offerID;
};
----------------------------------------------------------------
"""
def __init__(
self,
selling: Asset,
buying: Asset,
amount: Int64,
price: Price,
offer_id: Int64,
) -> None:
self.selling = selling
self.buying = buying
self.amount = amount
self.price = price
self.offer_id = offer_id
def pack(self, packer: Packer) -> None:
self.selling.pack(packer)
self.buying.pack(packer)
self.amount.pack(packer)
self.price.pack(packer)
self.offer_id.pack(packer)
@classmethod
def unpack(cls, unpacker: Unpacker) -> "ManageSellOfferOp":
selling = Asset.unpack(unpacker)
buying = Asset.unpack(unpacker)
amount = Int64.unpack(unpacker)
price = Price.unpack(unpacker)
offer_id = Int64.unpack(unpacker)
return cls(
selling=selling,
buying=buying,
amount=amount,
price=price,
offer_id=offer_id,
)
def to_xdr_bytes(self) -> bytes:
packer = Packer()
self.pack(packer)
return packer.get_buffer()
@classmethod
def from_xdr_bytes(cls, xdr: bytes) -> "ManageSellOfferOp":
unpacker = Unpacker(xdr)
return cls.unpack(unpacker)
def to_xdr(self) -> str:
xdr_bytes = self.to_xdr_bytes()
return base64.b64encode(xdr_bytes).decode()
@classmethod
def from_xdr(cls, xdr: str) -> "ManageSellOfferOp":
xdr_bytes = base64.b64decode(xdr.encode())
return cls.from_xdr_bytes(xdr_bytes)
def __eq__(self, other: object):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.selling == other.selling
and self.buying == other.buying
and self.amount == other.amount
and self.price == other.price
and self.offer_id == other.offer_id
)
def __str__(self):
out = [
f"selling={self.selling}",
f"buying={self.buying}",
f"amount={self.amount}",
f"price={self.price}",
f"offer_id={self.offer_id}",
]
return f"<ManageSellOfferOp {[', '.join(out)]}>"
| [
"quit5123@gmail.com"
] | quit5123@gmail.com |
539d185255ee3adc8d504415b0c7c41c3d7bb57e | 0fcf15789a28415d274d313e0e00ce122e03f19e | /vdirsyncer/storage/base.py | 1a96ecef4a400915a355629c001e6332c83af9ad | [
"MIT"
] | permissive | eckhart/vdirsyncer | 1b08d7a21056d887a97c3a1925bcd319e723d5e4 | 9d0d174afee16af87bea17d252d95537f11a0554 | refs/heads/master | 2021-01-18T06:21:26.873466 | 2014-07-31T08:54:12 | 2014-07-31T08:54:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,681 | py | # -*- coding: utf-8 -*-
'''
vdirsyncer.storage.base
~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer & contributors
:license: MIT, see LICENSE for more details.
'''
from .. import exceptions
from vdirsyncer.utils.vobject import Item # noqa
class Storage(object):
'''Superclass of all storages, mainly useful to summarize the interface to
implement.
Terminology:
- ITEM: Instance of the Item class, represents a calendar event, task or
contact.
- HREF: String; Per-storage identifier of item, might be UID. The reason
items aren't just referenced by their UID is because the CalDAV and
CardDAV specifications make this imperformant to implement.
- ETAG: String; Checksum of item, or something similar that changes when
the item does.
Strings can be either unicode strings or bytestrings. If bytestrings, an
ASCII encoding is assumed.
:param collection: If None, the given URL or path is already directly
referring to a collection. Otherwise it will be treated as a basepath
to many collections (e.g. a vdir) and the given collection name will be
looked for.
'''
fileext = '.txt'
storage_name = None # The name used in the config file.
# A value of True means the storage does not support write-methods such as
# upload, update and delete. A value of False means the storage does
# support those methods, but it may also be used in read-only mode.
read_only = False
# The attribute values to show in the representation of the storage.
_repr_attributes = ()
def __init__(self, read_only=None):
if read_only is None:
read_only = self.read_only
if self.read_only and not read_only:
raise ValueError('This storage is read-only.')
self.read_only = bool(read_only)
@classmethod
def discover(cls, **kwargs):
'''Discover collections given a basepath or -URL to many collections.
:param **kwargs: Keyword arguments to additionally pass to the storage
instances returned. You shouldn't pass `collection` here, otherwise
TypeError will be raised.
:returns: Iterable of storages which represent the discovered
collections, all of which are passed kwargs during initialization.
'''
raise NotImplementedError()
def _get_href(self, item):
return item.ident + self.fileext
def __repr__(self):
return '<{}(**{})>'.format(
self.__class__.__name__,
dict((x, getattr(self, x)) for x in self._repr_attributes)
)
def list(self):
'''
:returns: list of (href, etag)
'''
raise NotImplementedError()
def get(self, href):
'''Fetch a single item.
:param href: href to fetch
:returns: (item, etag)
:raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if item can't
be found.
'''
raise NotImplementedError()
def get_multi(self, hrefs):
'''Fetch multiple items.
Functionally similar to :py:meth:`get`, but might bring performance
benefits on some storages when used cleverly.
:param hrefs: list of hrefs to fetch
:raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if one of the
items couldn't be found.
:returns: iterable of (href, item, etag)
'''
for href in hrefs:
item, etag = self.get(href)
yield href, item, etag
def has(self, href):
'''Check if an item exists by its href.
:returns: True or False
'''
try:
self.get(href)
except exceptions.PreconditionFailed:
return False
else:
return True
def upload(self, item):
'''Upload a new item.
:raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if there is
already an item with that href.
:returns: (href, etag)
'''
raise NotImplementedError()
def update(self, href, item, etag):
'''Update an item.
:raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if the etag on
the server doesn't match the given etag or if the item doesn't
exist.
:returns: etag
'''
raise NotImplementedError()
def delete(self, href, etag):
'''Delete an item by href.
:raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` when item has
a different etag or doesn't exist.
'''
raise NotImplementedError()
| [
"markus@unterwaditzer.net"
] | markus@unterwaditzer.net |
ef4f09f43a2211561839ab35971f15db0dc93e6f | ac16a937f32602cf16114463f8e875a972f64c27 | /docs/dolfin/1.4.0/python/source/demo/undocumented/multistage-solver/python/demo_multi-stage-solver.py | 878ab4cdcf016ecb806852e645c82fae2e342902 | [] | no_license | mparno/fenics-web | 2073248da6f9918ffedbe9be8a3433bc1cbb7ffb | 7202752da876b1f9ab02c1d5a5f28ff5da526528 | refs/heads/master | 2021-05-05T04:45:46.436236 | 2016-12-06T20:25:44 | 2016-12-06T20:25:44 | 118,628,385 | 2 | 0 | null | 2018-01-23T15:21:47 | 2018-01-23T15:21:46 | null | UTF-8 | Python | false | false | 2,852 | py | # Copyright (C) 2007 Kristian B. Oelgaard
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by Anders Logg, 2008
# Modified by Johan Hake, 2008
# Modified by Garth N. Wells, 2009
# Modified by Johan Hake, 2013
#
# First added: 2007-11-14
# Last changed: 2013-04-05
#
# This demo solves the time-dependent convection-diffusion equation by
# a SUPG stabilized method. The velocity field used
# in the simulation is the output from the Stokes (Taylor-Hood) demo.
# The sub domains for the different boundary conditions are computed
# by the demo program in src/demo/subdomains.
#
# FIXME: Add shock capturing term and then revert back to the Stokes
# velocity
# FIXME: This demo showcase experimental features of a RKSolver (time integrator)
# FIXME: using a MultiStageScheme. It could be removed or changed anytime.
from dolfin import *
print "RKSolver is temporarily unavailable"
exit(0)
# Load mesh and subdomains
mesh = Mesh("../dolfin_fine.xml.gz")
sub_domains = MeshFunction("size_t", mesh, "../dolfin_fine_subdomains.xml.gz");
h = CellSize(mesh)
# Create FunctionSpaces
Q = FunctionSpace(mesh, "CG", 1)
V = VectorFunctionSpace(mesh, "CG", 2)
# Create velocity Function from file
velocity = Function(V);
File("../dolfin_fine_velocity.xml.gz") >> velocity
# Initialise source function and previous solution function
f = Constant(0.0)
u0 = Function(Q)
# Parameters
T = 5.0
dt = 0.1
t = Constant(0.0)
c = 0.00005
# Test and trial functions
u, v = Function(Q), TestFunction(Q)
# Residual
r = dot(velocity, grad(u)) - c*div(grad(u)) - f
# Galerkin variational problem (rhs)
F = -(v*dot(velocity, grad(u)) + c*dot(grad(v), grad(u)))*dx
# Add SUPG stabilisation terms
vnorm = sqrt(dot(velocity, velocity))
F -= h/(2.0*vnorm)*dot(velocity, grad(v))*r*dx
# Set up boundary condition
g = Expression("(t<=ramp_stop) ? t : 1.0", t=t, ramp_stop=1.0)
bc = DirichletBC(Q, g, sub_domains, 1)
# Output file
out_file = File("results/temperature.pvd")
scheme = BDF1(F, u, t, [bc])
solver = RKSolver(scheme)
# Time-stepping
while float(scheme.t()) < T:
solver.step(dt)
# Plot solution
plot(u)
# Save the solution to file
out_file << (u, float(scheme.t()))
# Hold plot
#interactive()
| [
"johannr@simula.no"
] | johannr@simula.no |
2f3a8f2892355a2e5223f8516cf17c81f3f6edf3 | c9697437c292df7fefd68559fdd9636066bdb2f1 | /science/comparisons/comparing_fsw_hyd_ide__pulse.py | c6c3ad48b11a1b2ed6d56b85e2e087163bc7b111 | [] | no_license | JoshKarpel/ionization | ebdb387483a9bc3fdb52818ab8e897e562ffcc67 | 3056df523ee90147d262b0e8bfaaef6f2678ea11 | refs/heads/master | 2021-03-24T13:03:57.469388 | 2020-04-06T03:37:04 | 2020-04-06T03:37:04 | 62,348,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,848 | py | import logging
import os
import itertools
import numpy as np
import simulacra as si
from simulacra.units import *
import ionization as ion
import ide as ide
FILE_NAME = os.path.splitext(os.path.basename(__file__))[0]
OUT_DIR = os.path.join(os.getcwd(), "out", FILE_NAME)
SIM_LIB = os.path.join(OUT_DIR, "simlib")
logman = si.utils.LogManager(
"simulacra", "ionization", stdout_logs=True, stdout_level=logging.DEBUG
)
PLOT_KWARGS = dict(target_dir=OUT_DIR, img_format="png", fig_dpi_scale=3)
def run(spec):
with logman as logger:
sim = si.utils.find_or_init_sim(spec, search_dir=SIM_LIB)
sim.spec.hydrogen_zero_angular_momentum_correction = True
sim.info().log()
if not sim.status == si.Status.FINISHED:
sim.run_simulation()
sim.save(target_dir=SIM_LIB)
sim.info().log()
sim.plot_wavefunction_vs_time(**PLOT_KWARGS)
return sim
if __name__ == "__main__":
with logman as logger:
pulse_widths = np.array([50, 100, 200, 400, 800]) * asec
fluences = np.array([0.01, 0.1, 1, 10, 20]) * Jcm2
# fluences = np.array([.1, 1, 10, 20]) * Jcm2
phases = [0]
# phases = [0, pi / 2]
for pw, flu, phase in itertools.product(pulse_widths, fluences, phases):
t_bound = 32
efield = ion.potentials.SincPulse(
pulse_width=pw,
fluence=flu,
phase=phase,
window=ion.potentials.LogisticWindow(
window_time=(t_bound - 2) * pw, window_width=0.2 * pw
),
)
test_width = 1 * bohr_radius
test_charge = 1 * electron_charge
test_mass = 1 * electron_mass
potential_depth = 36.831335 * eV
internal_potential = ion.FiniteSquareWell(
potential_depth=potential_depth, width=test_width
)
shared_kwargs = dict(
test_width=test_width,
test_charge=test_charge,
test_mass=test_mass,
potential_depth=potential_depth,
electric_potential=efield,
time_initial=-t_bound * pw,
time_final=t_bound * pw,
time_step=1 * asec,
electric_potential_dc_correction=True,
x_bound=200 * bohr_radius,
x_points=2 ** 14,
r_bound=200 * bohr_radius,
r_points=200 * 10,
l_bound=500,
mask=ion.RadialCosineMask(
inner_radius=175 * bohr_radius, outer_radius=200 * bohr_radius
),
use_numeric_eigenstates=True,
numeric_eigenstate_max_energy=10 * eV,
numeric_eigenstate_max_angular_momentum=10,
time_step_minimum=1 * asec,
time_step_maximum=10 * asec,
error_on="da/dt",
epsilon=1e-6,
analytic_eigenstate_type=ion.FiniteSquareWellState,
checkpoints=True,
checkpoint_dir=SIM_LIB,
store_data_every=1,
)
prefix = (
f"pw={pw / asec:2f}as_flu={flu / Jcm2:4f}jcm2_phase={phase / pi:3f}pi"
)
fsw_initial_state = ion.FiniteSquareWellState.from_potential(
internal_potential, mass=test_mass
)
specs = [
ion.LineSpecification(
prefix + "__fsw_len",
internal_potential=internal_potential,
initial_state=fsw_initial_state,
evolution_gauge="LEN",
**shared_kwargs,
),
# ion.LineSpecification(
# prefix + '__line_vel',
# internal_potential = internal_potential,
# initial_state = ion.FiniteSquareWellState.from_potential(internal_potential, mass = test_mass),
# evolution_gauge = 'VEL',
# **shared_kwargs,
# ),
ion.SphericalHarmonicSpecification(
prefix + "__hyd_len", evolution_gauge="LEN", **shared_kwargs
),
# ion.SphericalHarmonicSpecification(
# prefix + '__hyd_vel',
# evolution_gauge = 'VEL',
# **shared_kwargs,
# ),
ide.IntegroDifferentialEquationSpecification(
prefix + "__ide_len",
integral_prefactor=ide.gaussian_prefactor_LEN(
test_width, test_charge
),
kernel=ide.gaussian_kernel_LEN,
kernel_kwargs={
"tau_alpha": ide.gaussian_tau_alpha_LEN(test_width, test_mass)
},
test_energy=fsw_initial_state.energy,
evolution_gauge="LEN",
evolution_method="ARK4",
**shared_kwargs,
),
ide.IntegroDifferentialEquationSpecification(
prefix + "__ide_vel",
integral_prefactor=ide.gaussian_prefactor_VEL(
test_width, test_charge, test_mass
),
kernel=ide.gaussian_kernel_VEL,
kernel_kwargs={
"tau_alpha": ide.gaussian_tau_alpha_VEL(test_width, test_mass),
"width": test_width,
},
test_energy=fsw_initial_state.energy,
evolution_gauge="VEL",
evolution_method="ARK4",
**shared_kwargs,
),
]
results = si.utils.multi_map(run, specs, processes=4)
final_initial_state_overlaps = []
with open(
os.path.join(OUT_DIR, f"results__{prefix}.txt"), mode="w"
) as file:
for r in results:
try:
final_initial_state_overlap = r.state_overlaps_vs_time[
r.spec.initial_state
][-1]
file.write(f"{final_initial_state_overlap} : {r.name}\n")
except AttributeError: # ide simulation
final_initial_state_overlap = r.b2[-1]
file.write(f"{final_initial_state_overlap} : {r.name}\n")
final_initial_state_overlaps.append(final_initial_state_overlap)
file.write("\n\n\n")
for r in results:
file.write(str(r.info()) + "\n")
styles = {"len": "-", "vel": "--"}
colors = {"hyd": "C0", "fsw": "C1", "ide": "C2"}
def get_style_and_color_keys(name):
for key in styles:
if key in name:
key_style = key
for key in colors:
if key in name:
color_style = key
return key_style, color_style
y_data = []
y_kwargs = []
for r in results:
key_style, key_color = get_style_and_color_keys(r.name)
y_kwargs.append(
{"linestyle": styles[key_style], "color": colors[key_color]}
)
try:
y_data.append(r.state_overlaps_vs_time[r.spec.initial_state])
except AttributeError: # ide simulation
y_data.append(r.b2)
si.vis.xxyy_plot(
f"comparison__{prefix}",
list(r.data_times for r in results),
y_data,
line_labels=(
r.name[-7:] if "line" not in r.name else r.name[-8:]
for r in results
),
line_kwargs=y_kwargs,
x_label=r"$t$",
x_unit="asec",
y_label="Initial State Population",
title=prefix,
**PLOT_KWARGS,
)
y_lower_limit, y_upper_limit = si.vis.get_axis_limits(
[y[-1] for y in y_data], log=True, log_pad=2
)
si.vis.xxyy_plot(
f"comparison__{prefix}__log",
list(r.data_times for r in results),
y_data,
line_labels=(
r.name[-7:] if "line" not in r.name else r.name[-8:]
for r in results
),
line_kwargs=y_kwargs,
x_label=r"$t$",
x_unit="asec",
y_label="Initial State Population",
y_log_axis=True,
y_lower_limit=y_lower_limit,
y_upper_limit=y_upper_limit,
y_log_pad=1,
title=prefix,
**PLOT_KWARGS,
)
# y_lower_limit, y_upper_limit = si.vis.get_axis_limits([1 - x for x in final_initial_state_overlaps], log = True, log_pad = 2)
# si.vis.xxyy_plot(
# f'comparison__{prefix}__log',
# list(r.data_times for r in results),
# [1 - y for y in y_data],
# line_labels = (r.name[-7:] if 'line' not in r.name else r.name[-8:] for r in results),
# line_kwargs = y_kwargs,
# x_label = r'$t$', x_unit = 'asec',
# y_label = '1 - Initial State Population',
# y_log_axis = True,
# y_lower_limit = y_lower_limit, y_upper_limit = y_upper_limit, y_log_pad = 1,
# **PLOT_KWARGS,
# )
| [
"josh.karpel@gmail.com"
] | josh.karpel@gmail.com |
966234d2cd9a29a5a0dd663e2c463d3f3a40d314 | d0be690dfc259c004746237c77521ec4fd30fb52 | /lbl_lontest.py | 1bfcfb2dd28af772c434e77b44346d24d6ae1e8d | [
"Apache-2.0"
] | permissive | rbeyer/scriptorium | 3301741ab6c8ece86dc545b1306bfe7450caf21d | 47e2341c132dc92a33032e5147eba0c26d8ef7fb | refs/heads/master | 2023-07-19T20:16:46.649746 | 2023-07-11T16:11:32 | 2023-07-11T16:11:32 | 91,381,585 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,363 | py | #!/usr/bin/env python
"""Reads a PDS3 INDEX or CUMINDEX, and helps determine what longitude system
it might be in."""
# Copyright 2021, Ross A. Beyer (rbeyer@rossbeyer.net)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import csv
import sys
from pathlib import Path
import pvl
from lbl2sql import get_columns
def arg_parser():
parser = argparse.ArgumentParser(
description=__doc__,
)
parser.add_argument(
"-a", "--all",
action="store_true",
help="Completely check the file instead of failing fast."
)
parser.add_argument(
"-l", "--label",
type=Path,
help="PDS3 Label file. If not given, this program will look in the "
"directory with the index file, and see if it can find an "
"appropriate .LBL file."
)
parser.add_argument(
"index",
type=Path,
help="A PDS index.tab or a cumindex.tab file."
)
return parser
def main():
args = arg_parser().parse_args()
if args.label is None:
for suffix in (".LBL", ".lbl"):
p = args.index.with_suffix(".LBL")
if p.exists():
args.label = p
break
else:
print(
"Could not guess an appropriate LBL file, please "
"use -l explicitly."
)
sys.exit(1)
label = pvl.load(args.label)
columns = get_columns(label)
if "CENTER_LONGITUDE" not in columns:
print("CENTER_LONGITUDE not in columns. Quitting.")
return -1
with open(args.index, newline='') as csvfile:
reader = csv.DictReader(csvfile, fieldnames=columns)
if args.all:
lon360 = None
lon180 = None
for row in reader:
lon = float(row["CENTER_LONGITUDE"])
if lon > 180:
lon360 = row["CENTER_LONGITUDE"]
elif lon < 0:
lon180 = row["CENTER_LONGITUDE"]
if lon360 and lon180 is None:
print("Found longitudes greater than 180. Probably Lon360.")
elif lon180 and lon360 is None:
print("Found longitudes less than 0. Probably Lon180.")
elif lon180 is not None and lon360 is not None:
print(
"Found longitudes less than 0 and greater than 180, "
"which is messed up."
)
else:
print("All longitudes were between 0 and 180, weird.")
else:
for row in reader:
lon = float(row["CENTER_LONGITUDE"])
if lon > 180 or lon < 0:
print(f'Found CENTER_LONGITUDE of {row["CENTER_LONGITUDE"]}')
return 0
if __name__ == "__main__":
sys.exit(main())
| [
"rbeyer@rossbeyer.net"
] | rbeyer@rossbeyer.net |
17a36b19dd6ab80b882c1f4b8536eecb5b143d26 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5670465267826688_1/Python/Renelvon/solve.py | b550930afd0bdd40a51ce994a3275d3db77fcd60 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,649 | py | #!/usr/bin/env python3
import functools
import sys
_QUATERNION_MULT_TABLE = {
("1", "1"): "1",
("1", "i"): "i",
("1", "j"): "j",
("1", "k"): "k",
("i", "1"): "i",
("i", "i"): "-1",
("i", "j"): "k",
("i", "k"): "-j",
("j", "1"): "j",
("j", "i"): "-k",
("j", "j"): "-1",
("j", "k"): "i",
("k", "1"): "k",
("k", "i"): "j",
("k", "j"): "-i",
("k", "k"): "-1"
}
def main():
input_file = sys.argv[1]
solve(input_file)
def solve(input_file):
with open(input_file) as f_in:
testcases = int(next(f_in))
for i in range(testcases):
l, x = tuple(int(num) for num in next(f_in).split())
spelling = next(f_in).rstrip()
output = "YES" if can_be_written(spelling, l, x) else "NO"
print("Case #%d: %s" % (i + 1, output))
def can_be_written(spelling, l, x):
if l * x < 3:
return False
if x > 12:
return large_can_be_written(spelling, l, x)
else:
return small_can_be_written(spelling, l, x)
def small_can_be_written(spelling, l, x):
entire_spelling = spelling * x
iidx = find_minimal_i_prefix(entire_spelling) # position one past the end
kidx = find_minimal_k_suffix(entire_spelling) # position at start
if 0 < iidx < kidx < l * x:
return "j" == functools.reduce(
mult_quarts, entire_spelling[iidx:kidx], "1"
)
return False
def large_can_be_written(spelling, l, x):
quad_spelling = spelling * 4
iidx = find_minimal_i_prefix(quad_spelling) # position one past the end
kidx = find_minimal_k_suffix(quad_spelling) # position at start
if 0 < iidx < l * 4 and 0 < kidx < l * 4:
_, q = divmod(x - 8, 4)
residual = "".join((
quad_spelling[iidx:],
spelling * q,
quad_spelling[:kidx]
))
return "j" == functools.reduce(mult_quarts, residual, "1")
return False
def find_minimal_i_prefix(qtext):
acc = "1"
idx, ltext = 0, len(qtext)
while acc != "i" and idx < ltext:
acc = mult_quarts(acc, qtext[idx])
idx += 1
return idx
def find_minimal_k_suffix(qtext):
acc = "1"
idx = len(qtext)
while acc != "k" and idx > 0:
idx -= 1
acc = mult_quarts(qtext[idx], acc)
return idx
def mult_quarts(q1, q2):
# Use only "-" sign, omit "+"
abs_val = _QUATERNION_MULT_TABLE[(q1[-1], q2[-1])]
if (len(q1) + len(q2)) % 2 == 0:
return abs_val
elif len(abs_val) == 2:
return abs_val[-1]
else:
return "-" + abs_val
if __name__ == '__main__':
main()
| [
"eewestman@gmail.com"
] | eewestman@gmail.com |
601b0f0c6249de30d39f9c6bcca5bccafa39d142 | 6b20a0d3fc814cff0adafcffcaa193eb91677897 | /data/phys/fill_6297/config_common.py | 72dfc824d5f1784597b9bf16698c4ea5e8b83fbc | [] | no_license | jan-kaspar/analysis_ctpps_alignment_2017_postTS2 | 3a942f62ec8ea8f2e124e09831dfe87f54980f26 | 66b1e0cd77643c143797cee53fc36b8652a6becf | refs/heads/master | 2021-05-04T12:55:09.753837 | 2018-08-21T11:31:19 | 2018-08-21T11:31:19 | 120,303,385 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 282 | py | import sys
import os
import FWCore.ParameterSet.Config as cms
sys.path.append(os.path.relpath("../../../../../"))
from config_base import config
config.sector_45.cut_h_c = -38.57
config.sector_45.cut_v_c = 1.63
config.sector_56.cut_h_c = -39.34
config.sector_56.cut_v_c = 1.41
| [
"jan.kaspar@cern.ch"
] | jan.kaspar@cern.ch |
26c39c3f10cf4e8e3c1a1f83e5764c9d78805969 | 21b131564f9708d4667dc6dc0043ce6173dfa83c | /leetcode/Symmetric Tree.py | b6335a454916b0a33c3576aab48d96da4023aaa9 | [] | no_license | hermitbaby/leetcode | 5f1b6854c181adabb00951a56dd5235316ab8a45 | 5b76e2ff61a42cd366447d4a7cf1f4f9c624369b | refs/heads/master | 2020-12-24T08:37:25.846329 | 2017-04-28T23:27:57 | 2017-04-28T23:27:57 | 34,924,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,655 | py | # Given a binary tree, check whether it is a mirror of itself (ie, symmetric around its center).
#
# For example, this binary tree is symmetric:
#
# 1
# / \
# 2 2
# / \ / \
# 3 4 4 3
# But the following is not:
# 1
# / \
# 2 2
# \ \
# 3 3
# Note:
# Bonus points if you could solve it both recursively and iteratively.
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param {TreeNode} root
# @return {boolean}
def isSymmetric0(self, root):
if root == None:
return True
else:
return self.isSym(root.left, root.right)
def isSym(self, left, right):
if left == None and right == None:
return True
elif left == None or right == None:
return False
elif left.val != right.val:
return False
else:
return self.isSym(left.left, right.right) and \
self.isSym(left.right, right.left)
def isSymmetric(self, root):
if root == None:
return True
stack = [[root.left, root.right]]
while len(stack) > 0:
left, right = stack.pop()
if left == None and right == None:
continue
if left == None or right == None:
return False
if left.val == right.val:
stack.append([left.left, right.right])
stack.append([left.right, right.left])
else:
return False
return True | [
"lchen@choicestream.com"
] | lchen@choicestream.com |
f9effb28c711e475e08298c71f5a21a49d7acde2 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/yangydeng_tc_koubei_newBird/tc_koubei_newBird-master/dyy/MLcodes/MLcode_0.py | a7977400f7727ac2ae6150328cbb99d500a9d311 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 1,026 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Feb 09 10:21:49 2017
@author: Administrator
"""
import pandas as pd
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import ExtraTreesRegressor
from sklearn.ensemble import GradientBoostingRegressor
import sys
sys.path.append('../tools')
from tools import get_result
day_time = '_02_16_3'
train_x = pd.read_csv('../train_0/train_x'+day_time+'.csv')
train_y = pd.read_csv('../train_0/train_y'+day_time+'.csv')
test_x = pd.read_csv('../test_0/test_x'+day_time+'.csv')
#RF = RandomForestRegressor(n_estimators=1200,random_state=1,n_jobs=-1,min_samples_split=2,min_samples_leaf=2,max_depth=25)
#RF.fit(train_x,train_y)
#pre = (RF.predict(test_x)).round()
ET = ExtraTreesRegressor(n_estimators=1200,random_state=1,n_jobs=-1,min_samples_split=2,min_samples_leaf=2,max_depth=25,max_features=270)
ET.fit(train_x,train_y)
pre = (ET.predict(test_x)).round()
result = get_result(pre)
result.to_csv('../results/result'+day_time+'.csv',index=False,header=False)
| [
"659338505@qq.com"
] | 659338505@qq.com |
615663416663253f049b531beb1c929695c6c61f | 6fd26735b9dfd1d3487c1edfebf9e1e595196168 | /2016/day08a_tiny_code_displaying_screen.py | b29fc623f52b71abd419ce76166fd185ffdc2a7b | [
"BSD-3-Clause"
] | permissive | Kwpolska/adventofcode | bc3b1224b5272aa8f3a5c4bef1d8aebe04dcc677 | 8e55ef7b31a63a39cc2f08b3f28e15c2e4720303 | refs/heads/master | 2021-01-10T16:48:38.816447 | 2019-12-03T20:46:07 | 2019-12-03T20:46:07 | 47,507,587 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,149 | py | #!/usr/bin/env python3
import numpy
with open("input/08.txt") as fh:
file_data = fh.read()
def solve(data, width, height):
array = numpy.zeros((height, width), numpy.bool)
lines = [i for i in data.split('\n') if i]
l = len(lines)
# kwpbar.pbar(0, l)
for n, line in enumerate(lines):
if line.startswith('rect'):
# OPERATION = rect
a, b = (int(i) for i in line[4:].split('x'))
for x in range(a):
for y in range(b):
array[y][x] = True
else:
# OPERATION = rotate
_, t, d, _, b = line.split()
a = int(d[2:])
b = int(b)
if t == 'column':
array[:,a] = numpy.roll(array[:,a], b)
else:
array[a] = numpy.roll(array[a], b)
return numpy.count_nonzero(array)
test_data = "rect 3x2\nrotate column x=1 by 1\nrotate row y=0 by 4\nrotate column x=1 by 1"
test_output = solve(test_data, 7, 3)
test_expected = 6 # ".#..#.#\n#.#....\n.#....."
print(test_output, test_expected)
assert test_output == test_expected
print(solve(file_data, 50, 6))
| [
"kwpolska@gmail.com"
] | kwpolska@gmail.com |
fad8744022c9adaad6c09d096ffac46929675f6e | 0a65d42f4f0e491cb2aada408401b94909f821c2 | /mhiap/mhiap_landing/urls.py | f01523ec5db9b36db84dafc291186a45715832a5 | [] | no_license | jmadlansacay/_Office | 3acde7655784e91c7dcecfc853d4f36cdfeef028 | 7f46449b9f7e8e892e2e0025ba493259197fa592 | refs/heads/main | 2023-07-28T10:23:54.680822 | 2021-09-11T02:28:07 | 2021-09-11T02:28:07 | 379,155,026 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.landing_index, name ='landing_index'),
] | [
"Q034800@mhi.co.jp"
] | Q034800@mhi.co.jp |
c0a91143554164dc10207cb1baec7850f8e5b7c4 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startQiskit_Class3164.py | 75bd4fadf578af609572e00a72c5b264519b5bbc | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,211 | py | # qubit number=4
# total number=39
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.cx(input_qubit[0],input_qubit[3]) # number=12
prog.x(input_qubit[3]) # number=13
prog.h(input_qubit[3]) # number=28
prog.cz(input_qubit[0],input_qubit[3]) # number=29
prog.h(input_qubit[3]) # number=30
prog.z(input_qubit[3]) # number=10
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.rx(2.708052867394402,input_qubit[1]) # number=11
prog.h(input_qubit[3]) # number=4
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.y(input_qubit[2]) # number=16
prog.h(input_qubit[0]) # number=36
prog.cz(input_qubit[1],input_qubit[0]) # number=37
prog.h(input_qubit[0]) # number=38
prog.h(input_qubit[3]) # number=25
prog.z(input_qubit[1]) # number=20
prog.z(input_qubit[3]) # number=31
prog.h(input_qubit[0]) # number=22
prog.cz(input_qubit[1],input_qubit[0]) # number=23
prog.h(input_qubit[0]) # number=24
prog.z(input_qubit[2]) # number=15
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.y(input_qubit[2]) # number=18
prog.h(input_qubit[0]) # number=9
prog.h(input_qubit[0]) # number=32
prog.cz(input_qubit[1],input_qubit[0]) # number=33
prog.h(input_qubit[0]) # number=34
prog.x(input_qubit[2]) # number=35
prog.cx(input_qubit[1],input_qubit[0]) # number=27
# circuit end
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class3164.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
bcfd03c9f8baeb8050157a15878fa97165389a4e | 440f868dd3d3bfe9bbeb8350eeac19741550537c | /migrations/versions/0a48b22fada9_modify_comments_table_migration.py | 690ff91979ef90c03e948d56b715d042030420b8 | [
"MIT"
] | permissive | Isaacg94/personal-blog | 47cc9036a85d0928d816a523188636be1a1c6ed5 | be4bc49655c5dd17664e7532ae9ceef31161157a | refs/heads/master | 2022-12-14T00:26:46.175694 | 2021-04-30T23:43:43 | 2021-04-30T23:43:43 | 217,496,372 | 0 | 0 | null | 2022-12-08T06:47:24 | 2019-10-25T09:11:21 | Python | UTF-8 | Python | false | false | 1,098 | py | """modify comments table Migration
Revision ID: 0a48b22fada9
Revises: 8eb8a629f46e
Create Date: 2019-10-28 20:39:15.260269
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0a48b22fada9'
down_revision = '8eb8a629f46e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('comment', sa.String(), nullable=True),
sa.Column('post_id', sa.Integer(), nullable=True),
sa.Column('posted', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('comment_by', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('comments')
# ### end Alembic commands ###
| [
"7248zack@gmail.com"
] | 7248zack@gmail.com |
e6fecc5dfd263374da30baa5ca08dcfc36cf843e | e9b2a3af17a82ec4bebf3b0ca24e670885de951e | /neo/VM/RandomAccessStack.py | 7cd230c31d8ae5c134ab231ddc132e70785f6ac5 | [
"LicenseRef-scancode-free-unknown",
"MIT"
] | permissive | imusify/crowdfunding-blockchain-middleware | 379efb5654f36ee3028fdd50b7e4ae649973ca6e | 35d967b005741208a7947b2edface5158d177413 | refs/heads/imu2 | 2022-06-02T06:39:05.971622 | 2018-03-03T09:59:38 | 2018-03-03T09:59:38 | 120,617,579 | 3 | 2 | MIT | 2021-06-01T21:55:40 | 2018-02-07T13:18:57 | Python | UTF-8 | Python | false | false | 1,682 | py | from neo.VM.InteropService import StackItem
class RandomAccessStack():
_list = []
_name = 'Stack'
def __init__(self, name='Stack'):
self._list = []
self._name = name
@property
def Count(self):
return len(self._list)
@property
def Items(self):
return self._list
def Clear(self):
self._list = []
def GetEnumerator(self):
return enumerate(self._list)
def Insert(self, index, item):
index = int(index)
if index < 0 or index > self.Count:
raise Exception("Invalid list operation")
self._list.insert(index, item)
def Peek(self, index=0):
index = int(index)
if index >= self.Count:
raise Exception("Invalid list operation")
return self._list[self.Count - 1 - index]
def Pop(self):
# self.PrintList("POPSTACK <- ")
return self.Remove(0)
def PushT(self, item):
if not type(item) is StackItem and not issubclass(type(item), StackItem):
item = StackItem.New(item)
self._list.append(item)
def Remove(self, index):
index = int(index)
if index < 0 or index >= self.Count:
raise Exception("Invalid list operation")
item = self._list.pop(self.Count - 1 - index)
return item
def Set(self, index, item):
index = int(index)
if index < 0 or index > self.Count:
raise Exception("Invalid list operation")
if not type(item) is StackItem and not issubclass(type(item), StackItem):
item = StackItem.New(item)
self._list[self.Count - index - 1] = item
| [
"tasaunders@gmail.com"
] | tasaunders@gmail.com |
28a9f6ea1e2c3d5c96b687baee604ce54d312130 | d7ccb4225f623139995a7039f0981e89bf6365a4 | /.history/accounts/models_20211010000215.py | 35292b7aa656d3cd1f02443e0c8db84e9d132c67 | [] | no_license | tonnymuchui/django-mall | 64fd4abc3725c1bd0a3dcf20b93b490fe9307b37 | 55c083d8433be3c77adc61939cd197902de4ce76 | refs/heads/master | 2023-08-23T04:59:20.418732 | 2021-10-13T15:59:37 | 2021-10-13T15:59:37 | 415,668,388 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 936 | py | from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager
# Create your models here.
class Account(AbstractBaseUser):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
username = models.CharField(max_length=50, unique=True)
email = models.EmailField(max_length=100, unique=True)
phone_number = models.CharField(max_length=50)
# required
date_joined = models.DateTimeField(auto_now_add=True)
last_login = models.DateTimeField(auto_now_add=True)
is_admin = models.BooleanField(default=False)
is_staff = models.BooleanField(default=False)
is_active = models.BooleanField(default=False)
is_superadmin = models.BooleanField(default=False)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username', 'first_name', 'last_name']
de | [
"tonykanyingah@gmail.com"
] | tonykanyingah@gmail.com |
d4fe72b1336081d99ef0dabd6ff3b10c865f77a2 | af327b6738acf8ee9383dc4fd9bc82bf522a9afb | /第6章 结构化数据分析工具Pandas/code_6.4.5.py | 83223ac8d27510a3b5a381e3be239d285164f8b3 | [] | no_license | xufive/ways2grow | c76b03d56645ae1731bcb450ab30e0cd171f0570 | 3abf8b942bfe997d73b73fe1fb61ff81ad74d093 | refs/heads/master | 2022-12-28T12:08:01.984345 | 2020-10-16T10:01:53 | 2020-10-16T10:01:53 | 265,993,044 | 16 | 9 | null | null | null | null | UTF-8 | Python | false | false | 768 | py | # -*- encoding: utf-8 -*-
"""
6.4.5 日期时间索引对象
"""
import pandas as pd
print(pd.DatetimeIndex(['2020-03-10', '2020-03-11', '2020-03-12']))
print(pd.DatetimeIndex(pd.Index(['2020-03-10', '2020-03-11', '2020-03-12'])))
idx = pd.Index(['2020-03-10', '2020-03-11', '2020-03-12'])
sdt = pd.Series(['2020-03-10', '2020-03-11', '2020-03-12'])
print(idx)
print(sdt)
print(pd.DatetimeIndex(idx))
print(pd.DatetimeIndex(sdt))
print(pd.to_datetime(['2020-03-10', '2020-03-11', '2020-03-12', '2020-03-13']))
print(pd.to_datetime(idx))
print(pd.to_datetime(sdt))
print(pd.date_range(start='2020-05-12', end='2020-05-18'))
print(pd.date_range(start='2020-05-12 08:00:00', periods=6, freq='3H'))
print(pd.date_range(start='08:00:00', end='9:00:00', freq='15T'))
| [
"xufive@sdysit.com"
] | xufive@sdysit.com |
e15a973ef84fabaca83fcc2b58c755850c5f1177 | 877bd731bc97f220c363914d1e66970e2d9e599e | /python_stack/_django/django_intro/books_authors_proj/books_authors_app/urls.py | 2cbca658d4748c9158dd939713ddc91444e95267 | [] | no_license | mpresto/dojo | eaccc08465298d35ae5a8e0d60e547a90bc24e05 | aec14ee041950eea7c35003fa03b0728b4606754 | refs/heads/master | 2021-05-26T00:15:16.551562 | 2020-10-04T00:09:48 | 2020-10-04T00:09:48 | 253,975,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | from django.urls import path
from . import views
urlpatterns = [
path('books', views.book),
path('create_book', views.process_book),
path('books/<int:id>', views.detail_book),
path('add_author', views.add_author),
path('authors', views.author),
path('create_author', views.process_author),
path('authors/<int:id>', views.detail_author),
path('add_book', views.add_book),
] | [
"monty.preston5@gmail.com"
] | monty.preston5@gmail.com |
29089798558c29e3df1bc3539daeac08a378841d | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_cockier.py | 5759063c1290757ca3ab8527642ce07e621e48af | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py |
#calss header
class _COCKIER():
def __init__(self,):
self.name = "COCKIER"
self.definitions = cocky
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['cocky']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
da5fb1954750247d10046085ec61041faa23735d | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_103/ch39_2020_04_01_14_56_02_997196.py | a35791d49c98122f6542b004e3643a0780ffd8b9 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 645 | py | def collatz(primeiro_termo):
novo_termo=0
if primeiro_termo<1000:
if primeiro_termo%2==0:
novo_termo=primeiro_termo/2
else:
novo_termo=(primeiro_termo*3)+1
n=2
lista=[0]*n
lista[0]=primeiro_termo
lista[1]=novo_termo
while novo_termo>1:
if novo_termo%2==0:
novo_termo=novo_termo/2
else:
novo_termo=(novo_termo*3)+1
n+=1
lista.append(novo_termo)
return len(lista)
tamanho=0
primeiro=0
i=1
while i<1000:
m=collatz(i)
if m> tamanho:
tamanho=m
primeiro=i
i+=1
return primeiro
| [
"you@example.com"
] | you@example.com |
372438639e65e16bba52319154723a2d65319fc8 | d543c03d5f6993a260049ae645228e5e00f7238e | /python27/win32/Lib/zipfile.py | 7619cfee78d3de83ee873a655191daad41073f18 | [
"bzip2-1.0.6",
"LicenseRef-scancode-openssl",
"LicenseRef-scancode-ssleay-windows",
"LicenseRef-scancode-newlib-historical",
"BSD-3-Clause",
"OpenSSL",
"Sleepycat",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-pyth... | permissive | hehahovip/oss-ftp | 8ea4a5b7a0f5b0a156bbd9c9586e71b89c39f021 | 400b934b2fa625e31da41b3c6af98fc14f4f63ab | refs/heads/master | 2020-04-04T12:47:59.959182 | 2018-11-16T06:49:27 | 2018-11-16T06:49:27 | 155,938,177 | 1 | 0 | MIT | 2018-11-03T01:18:23 | 2018-11-03T01:18:23 | null | UTF-8 | Python | false | false | 59,383 | py | """
Read and write ZIP files.
"""
import struct, os, time, sys, shutil
import binascii, cStringIO, stat
import io
import re
import string
try:
import zlib # We may need its compression method
crc32 = zlib.crc32
except ImportError:
zlib = None
crc32 = binascii.crc32
__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
"ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ]
class BadZipfile(Exception):
pass
class LargeZipFile(Exception):
"""
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
and those extensions are disabled.
"""
error = BadZipfile # The exception raised by this module
ZIP64_LIMIT = (1 << 31) - 1
ZIP_FILECOUNT_LIMIT = (1 << 16) - 1
ZIP_MAX_COMMENT = (1 << 16) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
# Other ZIP compression methods not supported
# Below are some formats and associated data for reading/writing headers using
# the struct module. The names and structures of headers/records are those used
# in the PKWARE description of the ZIP file format:
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
# (URL valid as of January 2008)
# The "end of central directory" structure, magic number, size, and indices
# (section V.I in the format document)
structEndArchive = "<4s4H2LH"
stringEndArchive = "PK\005\006"
sizeEndCentDir = struct.calcsize(structEndArchive)
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
# These last two indices are not part of the structure as defined in the
# spec, but they are used internally by this module as a convenience
_ECD_COMMENT = 8
_ECD_LOCATION = 9
# The "central directory" structure, magic number, size, and indices
# of entries in the structure (section V.F in the format document)
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = "PK\001\002"
sizeCentralDir = struct.calcsize(structCentralDir)
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# The "local file header" structure, magic number, size, and indices
# (section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = "PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
# The "Zip64 end of central directory locator" structure, magic number, and size
structEndArchive64Locator = "<4sLQL"
stringEndArchive64Locator = "PK\x06\x07"
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
# The "Zip64 end of central directory" record, magic number, size, and indices
# (section V.G in the format document)
structEndArchive64 = "<4sQ2H2L4Q"
stringEndArchive64 = "PK\x06\x06"
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
_CD64_SIGNATURE = 0
_CD64_DIRECTORY_RECSIZE = 1
_CD64_CREATE_VERSION = 2
_CD64_EXTRACT_VERSION = 3
_CD64_DISK_NUMBER = 4
_CD64_DISK_NUMBER_START = 5
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
_CD64_NUMBER_ENTRIES_TOTAL = 7
_CD64_DIRECTORY_SIZE = 8
_CD64_OFFSET_START_CENTDIR = 9
def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
except IOError:
pass
return False
def is_zipfile(filename):
"""Quickly see if a file is a ZIP file by checking the magic number.
The filename argument may be a file or file-like object too.
"""
result = False
try:
if hasattr(filename, "read"):
result = _check_zipfile(fp=filename)
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
except IOError:
pass
return result
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
except IOError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
data = fpin.read(sizeEndCentDir64Locator)
if len(data) != sizeEndCentDir64Locator:
return endrec
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipfile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
data = fpin.read(sizeEndCentDir64)
if len(data) != sizeEndCentDir64:
return endrec
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
endrec[_ECD_DISK_NUMBER] = disk_num
endrec[_ECD_DISK_START] = disk_dir
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
# Determine file size
fpin.seek(0, 2)
filesize = fpin.tell()
# Check to see if this is ZIP file with no archive comment (the
# "end of central directory" structure should be the last item in the
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
except IOError:
return None
data = fpin.read()
if (len(data) == sizeEndCentDir and
data[0:4] == stringEndArchive and
data[-2:] == b"\000\000"):
# the signature is correct and there's no comment, unpack structure
endrec = struct.unpack(structEndArchive, data)
endrec=list(endrec)
# Append a blank comment and record start offset
endrec.append("")
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
# record signature. The comment is the last item in the ZIP file and may be
# up to 64K long. It is assumed that the "end of central directory" magic
# number does not appear in the comment.
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
fpin.seek(maxCommentStart, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0:
# found the magic number; attempt to unpack and interpret
recData = data[start:start+sizeEndCentDir]
if len(recData) != sizeEndCentDir:
# Zip file is corrupted.
return None
endrec = list(struct.unpack(structEndArchive, recData))
commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file
comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
endrec.append(comment)
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, maxCommentStart + start - filesize,
endrec)
# Unable to find a valid end of central directory structure
return None
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'_raw_time',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
if date_time[0] < 1980:
raise ValueError('ZIP does not support timestamps before 1980')
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = "" # Comment for each file
self.extra = "" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = 20 # Version which created ZIP archive
self.extract_version = 20 # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self, zip64=None):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
if zip64 is None:
zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
if zip64:
fmt = '<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
if not zip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
file_size = 0xffffffff
compress_size = 0xffffffff
self.extract_version = max(45, self.extract_version)
self.create_version = max(45, self.extract_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def _encodeFilenameFlags(self):
if isinstance(self.filename, unicode):
try:
return self.filename.encode('ascii'), self.flag_bits
except UnicodeEncodeError:
return self.filename.encode('utf-8'), self.flag_bits | 0x800
else:
return self.filename, self.flag_bits
def _decodeFilename(self):
if self.flag_bits & 0x800:
return self.filename.decode('utf-8')
else:
return self.filename
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while len(extra) >= 4:
tp, ln = unpack('<HH', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<QQQ', extra[4:28])
elif ln == 16:
counts = unpack('<QQ', extra[4:20])
elif ln == 8:
counts = unpack('<Q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError, "Corrupt extra field %s"%(ln,)
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
self.file_size = counts[idx]
idx += 1
if self.compress_size == 0xFFFFFFFFL:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == 0xffffffffL:
old = self.header_offset
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class _ZipDecrypter:
"""Class to handle decryption of files stored within a ZIP archive.
ZIP supports a password-based form of encryption. Even though known
plaintext attacks have been found against it, it is still useful
to be able to get data out of such a file.
Usage:
zd = _ZipDecrypter(mypwd)
plain_char = zd(cypher_char)
plain_text = map(zd, cypher_text)
"""
def _GenerateCRCTable():
"""Generate a CRC-32 table.
ZIP encryption uses the CRC32 one-byte primitive for scrambling some
internal keys. We noticed that a direct implementation is faster than
relying on binascii.crc32().
"""
poly = 0xedb88320
table = [0] * 256
for i in range(256):
crc = i
for j in range(8):
if crc & 1:
crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly
else:
crc = ((crc >> 1) & 0x7FFFFFFF)
table[i] = crc
return table
crctable = _GenerateCRCTable()
def _crc32(self, ch, crc):
"""Compute the CRC32 primitive on one byte."""
return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff]
def __init__(self, pwd):
self.key0 = 305419896
self.key1 = 591751049
self.key2 = 878082192
for p in pwd:
self._UpdateKeys(p)
def _UpdateKeys(self, c):
self.key0 = self._crc32(c, self.key0)
self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295
self.key1 = (self.key1 * 134775813 + 1) & 4294967295
self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2)
def __call__(self, c):
"""Decrypt a single character."""
c = ord(c)
k = self.key2 | 2
c = c ^ (((k * (k^1)) >> 8) & 255)
c = chr(c)
self._UpdateKeys(c)
return c
compressor_names = {
0: 'store',
1: 'shrink',
2: 'reduce',
3: 'reduce',
4: 'reduce',
5: 'reduce',
6: 'implode',
7: 'tokenize',
8: 'deflate',
9: 'deflate64',
10: 'implode',
12: 'bzip2',
14: 'lzma',
18: 'terse',
19: 'lz77',
97: 'wavpack',
98: 'ppmd',
}
class _SharedFile:
def __init__(self, file, pos, close):
self._file = file
self._pos = pos
self._close = close
def read(self, n=-1):
self._file.seek(self._pos)
data = self._file.read(n)
self._pos = self._file.tell()
return data
def close(self):
if self._file is not None:
fileobj = self._file
self._file = None
self._close(fileobj)
class ZipExtFile(io.BufferedIOBase):
"""File-like object for reading an archive member.
Is returned by ZipFile.open().
"""
# Max size supported by decompressor.
MAX_N = 1 << 31 - 1
# Read from compressed files in 4k blocks.
MIN_READ_SIZE = 4096
# Search for universal newlines or line chunks.
PATTERN = re.compile(r'^(?P<chunk>[^\r\n]+)|(?P<newline>\n|\r\n?)')
def __init__(self, fileobj, mode, zipinfo, decrypter=None,
close_fileobj=False):
self._fileobj = fileobj
self._decrypter = decrypter
self._close_fileobj = close_fileobj
self._compress_type = zipinfo.compress_type
self._compress_size = zipinfo.compress_size
self._compress_left = zipinfo.compress_size
if self._compress_type == ZIP_DEFLATED:
self._decompressor = zlib.decompressobj(-15)
elif self._compress_type != ZIP_STORED:
descr = compressor_names.get(self._compress_type)
if descr:
raise NotImplementedError("compression type %d (%s)" % (self._compress_type, descr))
else:
raise NotImplementedError("compression type %d" % (self._compress_type,))
self._unconsumed = ''
self._readbuffer = ''
self._offset = 0
self._universal = 'U' in mode
self.newlines = None
# Adjust read size for encrypted files since the first 12 bytes
# are for the encryption/password information.
if self._decrypter is not None:
self._compress_left -= 12
self.mode = mode
self.name = zipinfo.filename
if hasattr(zipinfo, 'CRC'):
self._expected_crc = zipinfo.CRC
self._running_crc = crc32(b'') & 0xffffffff
else:
self._expected_crc = None
def readline(self, limit=-1):
"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
"""
if not self._universal and limit < 0:
# Shortcut common case - newline found in buffer.
i = self._readbuffer.find('\n', self._offset) + 1
if i > 0:
line = self._readbuffer[self._offset: i]
self._offset = i
return line
if not self._universal:
return io.BufferedIOBase.readline(self, limit)
line = ''
while limit < 0 or len(line) < limit:
readahead = self.peek(2)
if readahead == '':
return line
#
# Search for universal newlines or line chunks.
#
# The pattern returns either a line chunk or a newline, but not
# both. Combined with peek(2), we are assured that the sequence
# '\r\n' is always retrieved completely and never split into
# separate newlines - '\r', '\n' due to coincidental readaheads.
#
match = self.PATTERN.search(readahead)
newline = match.group('newline')
if newline is not None:
if self.newlines is None:
self.newlines = []
if newline not in self.newlines:
self.newlines.append(newline)
self._offset += len(newline)
return line + '\n'
chunk = match.group('chunk')
if limit >= 0:
chunk = chunk[: limit - len(line)]
self._offset += len(chunk)
line += chunk
return line
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
if len(chunk) > self._offset:
self._readbuffer = chunk + self._readbuffer[self._offset:]
self._offset = 0
else:
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
def readable(self):
return True
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
buf = ''
if n is None:
n = -1
while True:
if n < 0:
data = self.read1(n)
elif n > len(buf):
data = self.read1(n - len(buf))
else:
return buf
if len(data) == 0:
return buf
buf += data
def _update_crc(self, newdata, eof):
# Update the CRC using the given data.
if self._expected_crc is None:
# No need to compute the CRC if we don't have a reference value
return
self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff
# Check the CRC if we're at the end of the file
if eof and self._running_crc != self._expected_crc:
raise BadZipfile("Bad CRC-32 for file %r" % self.name)
def read1(self, n):
"""Read up to n bytes with at most one read() system call."""
# Simplify algorithm (branching) by transforming negative n to large n.
if n < 0 or n is None:
n = self.MAX_N
# Bytes available in read buffer.
len_readbuffer = len(self._readbuffer) - self._offset
# Read from file.
if self._compress_left > 0 and n > len_readbuffer + len(self._unconsumed):
nbytes = n - len_readbuffer - len(self._unconsumed)
nbytes = max(nbytes, self.MIN_READ_SIZE)
nbytes = min(nbytes, self._compress_left)
data = self._fileobj.read(nbytes)
self._compress_left -= len(data)
if data and self._decrypter is not None:
data = ''.join(map(self._decrypter, data))
if self._compress_type == ZIP_STORED:
self._update_crc(data, eof=(self._compress_left==0))
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
else:
# Prepare deflated bytes for decompression.
self._unconsumed += data
# Handle unconsumed data.
if (len(self._unconsumed) > 0 and n > len_readbuffer and
self._compress_type == ZIP_DEFLATED):
data = self._decompressor.decompress(
self._unconsumed,
max(n - len_readbuffer, self.MIN_READ_SIZE)
)
self._unconsumed = self._decompressor.unconsumed_tail
eof = len(self._unconsumed) == 0 and self._compress_left == 0
if eof:
data += self._decompressor.flush()
self._update_crc(data, eof=eof)
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
# Read from buffer.
data = self._readbuffer[self._offset: self._offset + n]
self._offset += len(data)
return data
def close(self):
try :
if self._close_fileobj:
self._fileobj.close()
finally:
super(ZipExtFile, self).close()
class ZipFile(object):
""" Class with methods to open, read, write, close, list zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
else:
raise RuntimeError, "That compression method is not supported"
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = mode
self.pwd = None
self._comment = ''
# Check if we were passed a file-like object
if isinstance(file, basestring):
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'w+b', 'a' : 'r+b',
'r+b': 'w+b', 'w+b': 'wb'}
filemode = modeDict[mode]
while True:
try:
self.fp = io.open(file, filemode)
except IOError:
if filemode in modeDict:
filemode = modeDict[filemode]
continue
raise
break
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
self._fileRefCnt = 1
try:
if mode == 'r':
self._RealGetContents()
elif mode == 'w':
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
self.start_dir = 0
elif mode == 'a':
try:
# See if file is a zip file
self._RealGetContents()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipfile:
# file is not a zip file, just append
self.fp.seek(0, 2)
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
self.start_dir = self.fp.tell()
else:
raise RuntimeError('Mode must be "r", "w" or "a"')
except:
fp = self.fp
self.fp = None
self._fpclose(fp)
raise
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
try:
endrec = _EndRecData(fp)
except IOError:
raise BadZipfile("File is not a zip file")
if not endrec:
raise BadZipfile, "File is not a zip file"
if self.debug > 1:
print endrec
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self._comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print "given, inferred, offset", offset_cd, inferred, concat
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if len(centdir) != sizeCentralDir:
raise BadZipfile("Truncated central directory")
centdir = struct.unpack(structCentralDir, centdir)
if centdir[_CD_SIGNATURE] != stringCentralDir:
raise BadZipfile("Bad magic number for central directory")
if self.debug > 2:
print centdir
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
x.filename = x._decodeFilename()
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print "total", total
def namelist(self):
"""Return a list of file names in the archive."""
l = []
for data in self.filelist:
l.append(data.filename)
return l
def infolist(self):
"""Return a list of class ZipInfo instances for files in the
archive."""
return self.filelist
def printdir(self):
"""Print a table of contents for the zip file."""
print "%-46s %19s %12s" % ("File Name", "Modified ", "Size")
for zinfo in self.filelist:
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
def testzip(self):
"""Read all the files and check the CRC."""
chunk_size = 2 ** 20
for zinfo in self.filelist:
try:
# Read by chunks, to avoid an OverflowError or a
# MemoryError with very large embedded files.
with self.open(zinfo.filename, "r") as f:
while f.read(chunk_size): # Check CRC-32
pass
except BadZipfile:
return zinfo.filename
def getinfo(self, name):
"""Return the instance of ZipInfo given 'name'."""
info = self.NameToInfo.get(name)
if info is None:
raise KeyError(
'There is no item named %r in the archive' % name)
return info
def setpassword(self, pwd):
"""Set default password for encrypted files."""
self.pwd = pwd
@property
def comment(self):
"""The comment text associated with the ZIP file."""
return self._comment
@comment.setter
def comment(self, comment):
# check for valid comment length
if len(comment) > ZIP_MAX_COMMENT:
import warnings
warnings.warn('Archive comment is too long; truncating to %d bytes'
% ZIP_MAX_COMMENT, stacklevel=2)
comment = comment[:ZIP_MAX_COMMENT]
self._comment = comment
self._didModify = True
def read(self, name, pwd=None):
"""Return file bytes (as a string) for name."""
return self.open(name, "r", pwd).read()
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError, 'open() requires mode "r", "U", or "rU"'
if not self.fp:
raise RuntimeError, \
"Attempt to read ZIP archive that was already closed"
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
self._fileRefCnt += 1
zef_file = _SharedFile(self.fp, zinfo.header_offset, self._fpclose)
try:
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if len(fheader) != sizeFileHeader:
raise BadZipfile("Truncated file header")
fheader = struct.unpack(structFileHeader, fheader)
if fheader[_FH_SIGNATURE] != stringFileHeader:
raise BadZipfile("Bad magic number for file header")
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise BadZipfile, \
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError, "File %s is encrypted, " \
"password required for extraction" % name
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
bytes = zef_file.read(12)
h = map(zd, bytes[0:12])
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if ord(h[11]) != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd, True)
except:
zef_file.close()
raise
def extract(self, member, path=None, pwd=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a ZipInfo object. You can
specify a different directory using `path'.
"""
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return self._extract_member(member, path, pwd)
def extractall(self, path=None, members=None, pwd=None):
"""Extract all members from the archive to the current working
directory. `path' specifies a different directory to extract to.
`members' is optional and must be a subset of the list returned
by namelist().
"""
if members is None:
members = self.namelist()
for zipinfo in members:
self.extract(zipinfo, path, pwd)
def _extract_member(self, member, targetpath, pwd):
"""Extract the ZipInfo object 'member' to a physical
file on the path targetpath.
"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
arcname = member.filename.replace('/', os.path.sep)
if os.path.altsep:
arcname = arcname.replace(os.path.altsep, os.path.sep)
# interpret absolute pathname as relative, remove drive letter or
# UNC path, redundant separators, "." and ".." components.
arcname = os.path.splitdrive(arcname)[1]
arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
if x not in ('', os.path.curdir, os.path.pardir))
if os.path.sep == '\\':
# filter illegal characters on Windows
illegal = ':<>|"?*'
if isinstance(arcname, unicode):
table = {ord(c): ord('_') for c in illegal}
else:
table = string.maketrans(illegal, '_' * len(illegal))
arcname = arcname.translate(table)
# remove trailing dots
arcname = (x.rstrip('.') for x in arcname.split(os.path.sep))
arcname = os.path.sep.join(x for x in arcname if x)
targetpath = os.path.join(targetpath, arcname)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
with self.open(member, pwd=pwd) as source, \
file(targetpath, "wb") as target:
shutil.copyfileobj(source, target)
return targetpath
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
import warnings
warnings.warn('Duplicate name: %r' % zinfo.filename, stacklevel=3)
if self.mode not in ("w", "a"):
raise RuntimeError, 'write() requires mode "w" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to write ZIP archive that was already closed"
if zinfo.compress_type == ZIP_DEFLATED and not zlib:
raise RuntimeError, \
"Compression requires the (missing) zlib module"
if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED):
raise RuntimeError, \
"That compression method is not supported"
if not self._allowZip64:
requires_zip64 = None
if len(self.filelist) >= ZIP_FILECOUNT_LIMIT:
requires_zip64 = "Files count"
elif zinfo.file_size > ZIP64_LIMIT:
requires_zip64 = "Filesize"
elif zinfo.header_offset > ZIP64_LIMIT:
requires_zip64 = "Zipfile size"
if requires_zip64:
raise LargeZipFile(requires_zip64 +
" would require ZIP64 extensions")
def write(self, filename, arcname=None, compress_type=None):
"""Put the bytes from filename into the archive under the name
arcname."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = st.st_size
zinfo.flag_bits = 0x00
self.fp.seek(self.start_dir, 0)
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
zinfo.external_attr |= 0x10 # MS-DOS directory flag
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
self.fp.write(zinfo.FileHeader(False))
self.start_dir = self.fp.tell()
return
with open(filename, "rb") as fp:
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
# Compressed size can be larger than uncompressed size
zip64 = self._allowZip64 and \
zinfo.file_size * 1.05 > ZIP64_LIMIT
self.fp.write(zinfo.FileHeader(zip64))
if zinfo.compress_type == ZIP_DEFLATED:
cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
else:
cmpr = None
file_size = 0
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
if not zip64 and self._allowZip64:
if file_size > ZIP64_LIMIT:
raise RuntimeError('File size has increased during compressing')
if compress_size > ZIP64_LIMIT:
raise RuntimeError('Compressed size larger than uncompressed size')
# Seek backwards and write file header (which will now include
# correct CRC and file sizes)
self.start_dir = self.fp.tell() # Preserve current position in file
self.fp.seek(zinfo.header_offset, 0)
self.fp.write(zinfo.FileHeader(zip64))
self.fp.seek(self.start_dir, 0)
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
"""Write a file into the archive. The contents is the string
'bytes'. 'zinfo_or_arcname' is either a ZipInfo instance or
the name of the file in the archive."""
if not isinstance(zinfo_or_arcname, ZipInfo):
zinfo = ZipInfo(filename=zinfo_or_arcname,
date_time=time.localtime(time.time())[:6])
zinfo.compress_type = self.compression
if zinfo.filename[-1] == '/':
zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x
zinfo.external_attr |= 0x10 # MS-DOS directory flag
else:
zinfo.external_attr = 0o600 << 16 # ?rw-------
else:
zinfo = zinfo_or_arcname
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
if compress_type is not None:
zinfo.compress_type = compress_type
zinfo.file_size = len(bytes) # Uncompressed size
self.fp.seek(self.start_dir, 0)
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
zinfo.CRC = crc32(bytes) & 0xffffffff # CRC-32 checksum
if zinfo.compress_type == ZIP_DEFLATED:
co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
bytes = co.compress(bytes) + co.flush()
zinfo.compress_size = len(bytes) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
zip64 = zinfo.file_size > ZIP64_LIMIT or \
zinfo.compress_size > ZIP64_LIMIT
if zip64 and not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
self.fp.write(zinfo.FileHeader(zip64))
self.fp.write(bytes)
if zinfo.flag_bits & 0x08:
# Write CRC and file sizes after the file data
fmt = '<LQQ' if zip64 else '<LLL'
self.fp.write(struct.pack(fmt, zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.fp.flush()
self.start_dir = self.fp.tell()
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __del__(self):
"""Call the "close()" method in case the user forgot."""
self.close()
def close(self):
"""Close the file, and for mode "w" and "a" write the ending
records."""
if self.fp is None:
return
try:
if self.mode in ("w", "a") and self._didModify: # write ending records
self.fp.seek(self.start_dir, 0)
for zinfo in self.filelist: # write central directory
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffffL
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
'<HH' + 'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
extract_version = max(45, zinfo.extract_version)
create_version = max(45, zinfo.create_version)
else:
extract_version = zinfo.extract_version
create_version = zinfo.create_version
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print >>sys.stderr, (structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
raise
self.fp.write(centdir)
self.fp.write(filename)
self.fp.write(extra_data)
self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = len(self.filelist)
centDirSize = pos2 - self.start_dir
centDirOffset = self.start_dir
requires_zip64 = None
if centDirCount > ZIP_FILECOUNT_LIMIT:
requires_zip64 = "Files count"
elif centDirOffset > ZIP64_LIMIT:
requires_zip64 = "Central directory offset"
elif centDirSize > ZIP64_LIMIT:
requires_zip64 = "Central directory size"
if requires_zip64:
# Need to write the ZIP64 end-of-archive records
if not self._allowZip64:
raise LargeZipFile(requires_zip64 +
" would require ZIP64 extensions")
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self._comment))
self.fp.write(endrec)
self.fp.write(self._comment)
self.fp.flush()
finally:
fp = self.fp
self.fp = None
self._fpclose(fp)
def _fpclose(self, fp):
assert self._fileRefCnt > 0
self._fileRefCnt -= 1
if not self._fileRefCnt and not self._filePassed:
fp.close()
class PyZipFile(ZipFile):
"""Class to create ZIP archives with Python library files and packages."""
def writepy(self, pathname, basename = ""):
"""Add all files from "pathname" to the ZIP archive.
If pathname is a package directory, search the directory and
all package subdirectories recursively for all *.py and enter
the modules into the archive. If pathname is a plain
directory, listdir *.py and enter all modules. Else, pathname
must be a Python *.py file and the module will be put into the
archive. Added modules are always module.pyo or module.pyc.
This method will compile the module.py into module.pyc if
necessary.
"""
dir, name = os.path.split(pathname)
if os.path.isdir(pathname):
initname = os.path.join(pathname, "__init__.py")
if os.path.isfile(initname):
# This is a package directory, add it
if basename:
basename = "%s/%s" % (basename, name)
else:
basename = name
if self.debug:
print "Adding package in", pathname, "as", basename
fname, arcname = self._get_codename(initname[0:-3], basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
dirlist = os.listdir(pathname)
dirlist.remove("__init__.py")
# Add all *.py files and package subdirectories
for filename in dirlist:
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, "__init__.py")):
# This is a package directory, add it
self.writepy(path, basename) # Recursive call
elif ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
# This is NOT a package directory, add its files at top level
if self.debug:
print "Adding files from directory", pathname
for filename in os.listdir(pathname):
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
if pathname[-3:] != ".py":
raise RuntimeError, \
'Files added with writepy() must end with ".py"'
fname, arcname = self._get_codename(pathname[0:-3], basename)
if self.debug:
print "Adding file", arcname
self.write(fname, arcname)
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print "Compiling", file_py
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError,err:
print err.msg
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
def main(args = None):
import textwrap
USAGE=textwrap.dedent("""\
Usage:
zipfile.py -l zipfile.zip # Show listing of a zipfile
zipfile.py -t zipfile.zip # Test if a zipfile is valid
zipfile.py -e zipfile.zip target # Extract zipfile into target dir
zipfile.py -c zipfile.zip src ... # Create zipfile from sources
""")
if args is None:
args = sys.argv[1:]
if not args or args[0] not in ('-l', '-c', '-e', '-t'):
print USAGE
sys.exit(1)
if args[0] == '-l':
if len(args) != 2:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
zf.printdir()
elif args[0] == '-t':
if len(args) != 2:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
badfile = zf.testzip()
if badfile:
print("The following enclosed file is corrupted: {!r}".format(badfile))
print "Done testing"
elif args[0] == '-e':
if len(args) != 3:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
zf.extractall(args[2])
elif args[0] == '-c':
if len(args) < 3:
print USAGE
sys.exit(1)
def addToZip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, ZIP_DEFLATED)
elif os.path.isdir(path):
if zippath:
zf.write(path, zippath)
for nm in os.listdir(path):
addToZip(zf,
os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
with ZipFile(args[1], 'w', allowZip64=True) as zf:
for path in args[2:]:
zippath = os.path.basename(path)
if not zippath:
zippath = os.path.basename(os.path.dirname(path))
if zippath in ('', os.curdir, os.pardir):
zippath = ''
addToZip(zf, path, zippath)
if __name__ == "__main__":
main()
| [
"scorpion@yanghaorandeiMac.local"
] | scorpion@yanghaorandeiMac.local |
32094bfec7d1f73bf53f88c454aa658b21ff5eb7 | 44d70e38ffc6dad4021e8e72363893a290a64b7a | /mwatch/main.py | 94b6b983d7c32dba5025d844f0f747d292a71a21 | [
"Apache-2.0"
] | permissive | rick446/mongo-watch | 55353363174bfea9f22abf23060099f7bf1401a7 | a218a1c397f89dbfc90100fa650fa4b51958e281 | refs/heads/master | 2021-01-12T02:52:08.717843 | 2018-04-23T14:26:40 | 2018-04-23T14:26:40 | 78,122,369 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,690 | py | import logging
from itertools import chain
from pprint import pformat
import pymongo
try:
from mongoquery import Query, QueryError
except ImportError:
Query = QueryError = None
log = logging.getLogger(__name__)
class Watcher(object):
def __init__(self, cli, await_=False):
self.oplog = cli.local.oplog.rs
self.watches = {} # dict[id] = watch (so we can remove watches later)
if await_:
self._cursor_type = pymongo.CursorType.TAILABLE_AWAIT
else:
self._cursor_type = pymongo.CursorType.TAILABLE
self._last_ts = self._get_last_ts()
def watch_query(self, collection, qspec=None, check_inserts=False):
res = QueryWatch(self, collection, qspec, check_inserts)
self.watches[id(res)] = res
return res
def watch_inserts(self, collection, qspec=None):
res = InsertWatch(self, collection, qspec)
self.watches[id(res)] = res
return res
def watch_updates(self, collection, ids=None):
res = UpdateWatch(self, collection, ids)
self.watches[id(res)] = res
return res
def watch_deletes(self, collection, ids=None):
res = DeleteWatch(self, collection, ids)
self.watches[id(res)] = res
return res
def _get_last_ts(self):
final_entry = self.oplog.find().sort('$natural', -1).limit(1).next()
log.debug('final_entry: %s', final_entry)
return final_entry['ts']
def _get_cursor(self):
branches = list(chain(*[
w.oplog_branches() for w in self.watches.values()]))
assert branches, 'Nothing to watch'
if len(branches) == 1:
spec = branches[0]
else:
spec = {'$or': branches}
spec['ts'] = {'$gt': self._last_ts}
log.debug('Query oplog with\n%s', pformat(spec))
return self.oplog.find(
spec,
cursor_type=self._cursor_type,
oplog_replay=True)
def __iter__(self):
curs = self._get_cursor()
stateful_watches = [
w for w in self.watches.values()
if hasattr(w, 'process_entry')]
needs_restart = False
for doc in curs:
for w in stateful_watches:
needs_restart = needs_restart or w.process_entry(doc)
self._last_ts = doc['ts']
yield doc
if needs_restart:
break
class Watch:
def __init__(self, watcher):
self.watcher = watcher
def unwatch(self):
self.watcher.watches.pop(id(self), None)
class QueryWatch(Watch):
"""Insert/update/delete watch for a query (stateful)."""
def __init__(
self, watcher, collection, qspec=None, check_inserts=False):
super().__init__(watcher)
self.collection = collection
self.qspec = qspec
self.check_inserts = check_inserts
if check_inserts:
assert Query is not None, 'Cannot check inserts without mongoquery'
self._mquery = Query(qspec)
self._ns = '{}.{}'.format(
collection.database.name,
collection.name)
if qspec:
self._ids = set(
doc['_id'] for doc in self.collection.find(qspec, {'_id': 1}))
else:
self._ids = None
def __repr__(self):
return '<QueryWatch {} {}>'.format(self._ns, self.qspec)
def oplog_branches(self):
if self.qspec is None:
yield {'ns': self._ns, 'op': {'$in': ['i', 'u', 'd']}}
return
ins_watch = InsertWatch(self.watcher, self.collection, self.qspec)
if self._ids:
watches = [
ins_watch,
UpdateWatch(self.watcher, self.collection, list(self._ids)),
DeleteWatch(self.watcher, self.collection, list(self._ids))]
else:
watches = [ins_watch]
for w in watches:
yield from w.oplog_branches()
def process_entry(self, entry):
"""Return true if the oplog query needs to be restarted."""
if not self.qspec:
# no need to track IDs
return False
if entry['ns'] != self._ns:
# not my collection
return False
if entry['op'] == 'i':
if self.check_inserts and not self._mquery.match(entry['o']):
# I don't watch that doc
return False
self._ids.add(entry['o']['_id'])
return True
elif entry['op'] == 'd':
self._ids.discard(entry['o']['_id'])
else:
return False
class InsertWatch(Watch):
def __init__(self, watcher, collection, qspec=None):
super().__init__(watcher)
self._ns = '{}.{}'.format(
collection.database.name,
collection.name)
self.qspec = qspec
def __repr__(self):
return '<InsertWatch {} {}>'.format(self._ns, self.qspec)
def oplog_branches(self):
qspec = {
'o.{}'.format(k): v
for k, v in self.qspec.items()}
if self.qspec:
yield {'op': 'i', 'ns': self._ns, **qspec}
else:
yield {'op': 'i', 'ns': self._ns}
class UpdateWatch(Watch):
def __init__(self, watcher, collection, ids=None):
super().__init__(watcher)
self._ns = '{}.{}'.format(
collection.database.name,
collection.name)
self._ids = ids
def __repr__(self):
return '<UpdateWatch {} {}>'.format(self._ns, self._ids)
def oplog_branches(self):
if self._ids is None:
yield {'op': 'u', 'ns': self._ns}
return
ids = list(self._ids)
if len(ids) == 1:
yield {'op': 'u', 'ns': self._ns, 'o2._id': ids[0]}
if len(ids) > 0:
yield {'op': 'u', 'ns': self._ns, 'o2._id': {'$in': ids}}
def unwatch(self, id):
self._ids.remove(id)
class DeleteWatch(Watch):
def __init__(self, watcher, collection, ids=None):
super().__init__(watcher)
self._ns = '{}.{}'.format(
collection.database.name,
collection.name)
self._ids = ids
def __repr__(self):
return '<DeleteWatch {} {}>'.format(self._ns, self._ids)
def oplog_branches(self):
if self._ids is None:
yield {'op': 'd', 'ns': self._ns}
return
ids = list(self._ids)
if len(ids) == 1:
yield {'op': 'd', 'ns': self._ns, 'o._id': ids[0]}
if len(ids) > 0:
yield {'op': 'd', 'ns': self._ns, 'o._id': {'$in': ids}}
def unwatch(self, id):
self._ids.remove(id)
| [
"rick@arborian.com"
] | rick@arborian.com |
0c8e2bf1927d6dfb77056648dbdb1d8d9b79192b | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/pubsub/v1/pubsub-v1-py/google/pubsub_v1/__init__.py | e4f49d6e9d87d97f720f363967d71bd48ff32afd | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,857 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .services.publisher import PublisherClient
from .services.publisher import PublisherAsyncClient
from .services.schema_service import SchemaServiceClient
from .services.schema_service import SchemaServiceAsyncClient
from .services.subscriber import SubscriberClient
from .services.subscriber import SubscriberAsyncClient
from .types.pubsub import AcknowledgeRequest
from .types.pubsub import CreateSnapshotRequest
from .types.pubsub import DeadLetterPolicy
from .types.pubsub import DeleteSnapshotRequest
from .types.pubsub import DeleteSubscriptionRequest
from .types.pubsub import DeleteTopicRequest
from .types.pubsub import DetachSubscriptionRequest
from .types.pubsub import DetachSubscriptionResponse
from .types.pubsub import ExpirationPolicy
from .types.pubsub import GetSnapshotRequest
from .types.pubsub import GetSubscriptionRequest
from .types.pubsub import GetTopicRequest
from .types.pubsub import ListSnapshotsRequest
from .types.pubsub import ListSnapshotsResponse
from .types.pubsub import ListSubscriptionsRequest
from .types.pubsub import ListSubscriptionsResponse
from .types.pubsub import ListTopicSnapshotsRequest
from .types.pubsub import ListTopicSnapshotsResponse
from .types.pubsub import ListTopicsRequest
from .types.pubsub import ListTopicsResponse
from .types.pubsub import ListTopicSubscriptionsRequest
from .types.pubsub import ListTopicSubscriptionsResponse
from .types.pubsub import MessageStoragePolicy
from .types.pubsub import ModifyAckDeadlineRequest
from .types.pubsub import ModifyPushConfigRequest
from .types.pubsub import PublishRequest
from .types.pubsub import PublishResponse
from .types.pubsub import PubsubMessage
from .types.pubsub import PullRequest
from .types.pubsub import PullResponse
from .types.pubsub import PushConfig
from .types.pubsub import ReceivedMessage
from .types.pubsub import RetryPolicy
from .types.pubsub import SchemaSettings
from .types.pubsub import SeekRequest
from .types.pubsub import SeekResponse
from .types.pubsub import Snapshot
from .types.pubsub import StreamingPullRequest
from .types.pubsub import StreamingPullResponse
from .types.pubsub import Subscription
from .types.pubsub import Topic
from .types.pubsub import UpdateSnapshotRequest
from .types.pubsub import UpdateSubscriptionRequest
from .types.pubsub import UpdateTopicRequest
from .types.schema import CreateSchemaRequest
from .types.schema import DeleteSchemaRequest
from .types.schema import GetSchemaRequest
from .types.schema import ListSchemasRequest
from .types.schema import ListSchemasResponse
from .types.schema import Schema
from .types.schema import ValidateMessageRequest
from .types.schema import ValidateMessageResponse
from .types.schema import ValidateSchemaRequest
from .types.schema import ValidateSchemaResponse
from .types.schema import Encoding
from .types.schema import SchemaView
__all__ = (
'PublisherAsyncClient',
'SchemaServiceAsyncClient',
'SubscriberAsyncClient',
'AcknowledgeRequest',
'CreateSchemaRequest',
'CreateSnapshotRequest',
'DeadLetterPolicy',
'DeleteSchemaRequest',
'DeleteSnapshotRequest',
'DeleteSubscriptionRequest',
'DeleteTopicRequest',
'DetachSubscriptionRequest',
'DetachSubscriptionResponse',
'Encoding',
'ExpirationPolicy',
'GetSchemaRequest',
'GetSnapshotRequest',
'GetSubscriptionRequest',
'GetTopicRequest',
'ListSchemasRequest',
'ListSchemasResponse',
'ListSnapshotsRequest',
'ListSnapshotsResponse',
'ListSubscriptionsRequest',
'ListSubscriptionsResponse',
'ListTopicSnapshotsRequest',
'ListTopicSnapshotsResponse',
'ListTopicSubscriptionsRequest',
'ListTopicSubscriptionsResponse',
'ListTopicsRequest',
'ListTopicsResponse',
'MessageStoragePolicy',
'ModifyAckDeadlineRequest',
'ModifyPushConfigRequest',
'PublishRequest',
'PublishResponse',
'PublisherClient',
'PubsubMessage',
'PullRequest',
'PullResponse',
'PushConfig',
'ReceivedMessage',
'RetryPolicy',
'Schema',
'SchemaServiceClient',
'SchemaSettings',
'SchemaView',
'SeekRequest',
'SeekResponse',
'Snapshot',
'StreamingPullRequest',
'StreamingPullResponse',
'SubscriberClient',
'Subscription',
'Topic',
'UpdateSnapshotRequest',
'UpdateSubscriptionRequest',
'UpdateTopicRequest',
'ValidateMessageRequest',
'ValidateMessageResponse',
'ValidateSchemaRequest',
'ValidateSchemaResponse',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
6e15139c4557ef381a727beb2930c858fa210a14 | dfab6798ece135946aebb08f93f162c37dd51791 | /timber/aokuang.timber/aokuang.timber/actors/formselectorfield/events.py | ac42bdb2d1eb95d7fe1639ac26c31dab2b19fe40 | [] | no_license | yxqd/luban | 405f5f7dcf09015d214079fe7e23d644332be069 | 00f699d15c572c8bf160516d582fa37f84ac2023 | refs/heads/master | 2020-03-20T23:08:45.153471 | 2012-05-18T14:52:43 | 2012-05-18T14:52:43 | 137,831,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,700 | py | # -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2006-2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
import luban
from ....DemoPanelActor import Actor as base
class Actor(base):
title='A form with one selector field'
description = [
'The selector field is attached with event hanlders for ',
'"change", "focus", and "blur" events',
]
def createDemoPanel(self, **kwds):
doc = luban.e.document()
form = doc.form(title='login')
entries = [
('male', 'Male'),
('female', 'Female'),
]
gender = form.selector(label='gender', entries = entries, selection='female')
log = doc.document(id='log', Class='log', title='log')
gender.onchange = luban.a.load(
actor=self.name, routine='onchange',
old = luban.event.old, new = luban.event.new
)
gender.onfocus = luban.a.select(element=log).append(
newelement=luban.e.paragraph(text="focused")
)
gender.onblur = luban.a.select(element=log).append(
newelement=luban.e.paragraph(text="blured")
)
return doc
def onchange(self, old=None, new=None, **kwds):
msg = "value changed from %r to %r" % (old, new)
newelement = luban.e.paragraph(text = msg)
return luban.a.select(id='log').append(newelement=newelement)
# End of file
| [
"linjiao@caltech.edu"
] | linjiao@caltech.edu |
32fd38b52799830d21b7091490311c7c83f30a60 | 967056372d123ad5a86705156aea928d7352fe6a | /python基础/src/modules/my.py | 0ad230e316beccd863f6dd702a4f103130deffea | [] | no_license | lxy39678/Python | ea179ef929eb9ddddb2460656aad07880ae67f84 | aba0434bc8ca7a2abdaa3ced3c4d84a8de819c61 | refs/heads/master | 2020-04-18T22:05:11.683134 | 2019-01-27T07:49:09 | 2019-01-27T07:49:09 | 167,783,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | #!/usr/bin/env python
#coding=utf-8
import time
from datetime import datetime
import sys
print
reload(sys)
sys.setdefaultencoding('UTF-8')
print sys.getfilesystemencoding()
print time.strftime("%Z").decode(sys.getfilesystemencoding()).encode('utf-8')
print "中国".encode('utf-8').decode('utf-8') | [
"895902857@qq.com"
] | 895902857@qq.com |
804693751f7c34dcafb00d5959faed6213b530fc | 5a06576cd3aa58d265c433f10fe19136f25db8a5 | /cgsa/utils/data.py | f44b89169a45995f0d8f4cf7d95bc8f1bd87cbd9 | [
"MIT"
] | permissive | gghidiu/CGSA | 3e9d787ff4150e2b3a0875c74384c1591f261945 | c5af073f064db67d92b22705899c0d0263caec58 | refs/heads/master | 2020-06-17T17:12:55.512455 | 2019-01-05T13:08:56 | 2019-01-05T13:08:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,453 | py | #!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
##################################################################
# Documentation
"""Module for reading and outputting data.
Attributes:
"""
##################################################################
# Imports
from __future__ import absolute_import, print_function, unicode_literals
from collections import defaultdict
from future.utils import python_2_unicode_compatible
from six import iteritems
import re
from cgsa.utils.common import LOGGER
from cgsa.constants import KNOWN_LABELS
##################################################################
# Variables and Constants
BAR_RE = re.compile(r'[|]')
COLON_RE = re.compile(r'::+')
EQ_RE = re.compile(r'=')
SLASH_RE = re.compile(r'/')
SPACE_RE = re.compile(r' +')
TAB_RE = re.compile(r'\t')
MMAX = "mmax"
# compatibility with newer mate versions
VAL2KEY = {"1": "person", "2": "person", "3": "person",
"nom": "case", "gen": "case", "dat": "case",
"acc": "case", "sg": "number", "pl": "number",
"pres": "tense"}
##################################################################
# Classes
@python_2_unicode_compatible
class Features(object):
"""Class comprising relevant information about features.
"""
def __init__(self, feats):
"""Parse information about dependency relations.
Args:
feats (str): features to parse
Retuns:
void:
"""
self._feats = {MMAX: defaultdict(
lambda: defaultdict(dict))}
if feats == '_':
return
for feat_i in BAR_RE.split(feats):
if EQ_RE.search(feat_i):
key, value = EQ_RE.split(feat_i)
else:
value = feat_i
key = VAL2KEY.get(value, "UNKFEAT")
if value.lower() == "true" or value.lower() == "false":
value = bool(value)
key_parts = COLON_RE.split(key)
if len(key_parts) == 3:
# parse MMAX feature
markable, markable_id, attr = key_parts
self._feats[MMAX][markable][markable_id][attr] = value
else:
self._feats[key] = value
def get(self, key):
"""Return feaure's value if it is present.
Args:
key (str): feature's name
"""
return self._feats.get(key)
def __contains__(self, key):
"""Check feaure's presence.
Args:
key (str): feature's name
"""
return key in self._feats
def __getitem__(self, key):
"""Return feaure's value.
Args:
key (str): feature's name
"""
return self._feats[key]
def __haskey__(self, key):
"""Check feaure's presence.
Args:
key (str): feature's name
"""
return key in self._feats
def __setitem__(self, key, value):
"""Set feaure's value.
Args:
key (str): feature's name
value (object): new feature's value
"""
self._feats[key] = value
return value
def __str__(self):
"""Return unicode representation of the given word.
Returns:
unicode:
"""
if len(self._feats) == 1 and self._feats[MMAX]:
return '_'
mmax_feats = '|'.join([
mkbl + "::" + markable_id + "::"
+ attr + '=' + str(value)
for mkbl, mkbl_val in iteritems(self._feats[MMAX])
for markable_id in mkbl_val
for attr, value in iteritems(mkbl_val[markable_id])])
feats = '|'.join([k + '-' + str(v)
for k, v in iteritems(self._feats)
if k != MMAX])
if feats:
if mmax_feats:
return feats + '|' + mmax_feats
return feats
elif mmax_feats:
return mmax_feats
else:
return '_'
@python_2_unicode_compatible
class Word(object):
"""Class comprising relevant information about a word.
Attributes:
form (str): word's form
lemma (str): word's lemma
tag (str): part-of-speech tag
feats (dict): word's features
deprel (str): dependency relation connecting the word to its parent
prnt_idx (int or None): index of the parent node
"""
def __init__(self, form, lemma, tag, deprel, feats):
"""Class constructor.
Attributes:
form (str): word's form
lemma (str): word's lemma
tag (str): part-of-speech tag
deprel (str): dependency relation and index of the parent node
feats (str): word's features to be parsed
"""
self._logger = LOGGER
self.form = form
self.lemma = lemma
self.tag = tag
self._parse_deps(deprel)
self.feats = Features(feats)
self.children = []
def __str__(self):
"""Return unicode representation of the given word.
Returns:
unicode:
"""
ret = "form: '{:s}'; lemma: {:s}; tag: {:s}; deprel: {:s}/{:s}; " \
" feats: {:s}".format(
self.form, self.lemma, self.tag,
str(self.prnt_idx + 1), self.deprel,
str(self.feats))
return ret
def _parse_deps(self, deprel):
"""Parse information about dependency relations.
Args:
deprel (str):
"""
fields = SLASH_RE.split(deprel)
if fields[0] == '_' or fields[0] == '0':
self.prnt_idx = -1
self.deprel = '_'
else:
self.prnt_idx = int(fields[0]) - 1
self.deprel = fields[1]
@python_2_unicode_compatible
class Tweet(object):
"""Class comprising relevant information about a tweet.
Attributes:
msg_id (str): id of the message
words (list[str]): message's words with all their attributes
dtree (list[str]): dependency tree
"""
def __init__(self, tweet):
"""Class constructor.
Args:
tweet (str): tweet
"""
self.msg_id = None
self.label = None
self.words = []
self.iwords = iter(self.words)
self._logger = LOGGER
self._parse(tweet)
def __iter__(self):
"""Private method required for iteration.
"""
self.iwords = iter(self.words)
return self.iwords
def __len__(self):
"""Return number of tokens in the tweet.
"""
return len(self.words)
def __getitem__(self, key):
"""Private method required for iteration.
"""
return self.words[key]
def __next__(self):
"""Private method required for iteration.
"""
return next(self.iwords)
def __str__(self):
"""Return unicode representation of the given word.
Returns:
unicode:
"""
# output the same string as the one that has been parsed
ret = "{msg_id:s}\t{label:s}\t{tokens:s}\t{lemmas:s}" \
"\t{tags:s}\t{deps:s}\t{feats:s}".format(
msg_id=self.msg_id, label=self.label,
tokens=' '.join(w.form for w in self.words),
lemmas=' '.join(w.lemma for w in self.words),
tags=' '.join(w.tag for w in self.words),
deps=' '.join(str(w.prnt_idx + 1) + '/' + w.deprel
for w in self.words),
feats=' '.join(str(w.feats) for w in self.words)
)
return ret
def _parse(self, tweet):
"""Parse tweet's string.
Attributes:
tweet (str): tweet to be parse
"""
fields = TAB_RE.split(tweet)
assert len(fields) == 7, \
"Incorrect line format (expected seven tab fields): {!r}".format(
tweet
)
self.msg_id = fields[0]
assert fields[1] in KNOWN_LABELS, \
"Unknown sentiment label: {!r}".format(fields[1])
self.label = fields[1]
toks = SPACE_RE.split(fields[2])
lemmas = SPACE_RE.split(fields[3].lower())
tags = SPACE_RE.split(fields[4])
deps = SPACE_RE.split(fields[5])
feats = SPACE_RE.split(fields[6])
n = len(toks)
if n != len(lemmas) or n != len(tags) \
or n != len(deps) or n != len(feats):
self._logger.error("Tokens: %d", n)
self._logger.error("Lemmas: %d", len(lemmas))
self._logger.error("Tags: %d", len(tags))
self._logger.error("Dependencies: %d", len(deps))
self._logger.error("Features: %r", len(feats))
for tok_i, lemma_i in zip(toks, lemmas):
if tok_i.lower() != lemma_i.lower():
self._logger.error("{!r} <-> {!r}".format(tok_i, lemma_i))
assert False, \
"Unequal number of attributes at line {!r}".format(
tweet)
self.words = [
Word(tok_i, lemma_i, tag_i, dep_i, feats_i)
for tok_i, lemma_i, tag_i, dep_i, feats_i
in zip(toks, lemmas, tags, deps, feats)
]
for i, w_i in enumerate(self.words):
prnt_idx = w_i.prnt_idx
if prnt_idx < 0:
continue
prnt = self.words[prnt_idx]
prnt.children.append(i)
| [
"wlsidorenko@gmail.com"
] | wlsidorenko@gmail.com |
cde49f25158ed056ea012c0cbf5dd02e5061df9b | 2fdf33eff3f22a4f2e0337f065646de8fe6cc01f | /mq/utils.py | ec63e584dd00aa2c4cdd4436bde69e7ac89a83b4 | [
"MIT"
] | permissive | apnarm/python-mq | 007d978fe6a23b0d65555909ad34f2a21df5c5d5 | 14037cf86abc2393c4f8d791fd76bcca7a781607 | refs/heads/master | 2020-04-15T10:30:05.739270 | 2014-06-09T20:58:47 | 2014-06-09T20:58:47 | 20,659,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | import sys
import time
from contextlib import contextmanager
def chunk_list(sequence, size):
"""Returns a list of lists from a list."""
def get_chunks(sequence, size):
for start in xrange(0, len(sequence), size):
yield sequence[start: start + size]
return list(get_chunks(sequence, size))
@contextmanager
def time_elapsed(name=''):
"""
A context manager for timing blocks of code.
From https://gist.github.com/raymondbutcher/5168588
"""
start = time.time()
yield
elapsed = (time.time() - start) * 1000
if name:
sys.stderr.write('%s took ' % name)
if elapsed < 1:
sys.stderr.write('%.4f ms\n' % elapsed)
else:
sys.stderr.write('%d ms\n' % elapsed)
| [
"randomy@gmail.com"
] | randomy@gmail.com |
d828594adc6f7eb9cf70397564962feb6b86a16a | 796f96e8c01db0bb9493c4871be66d92689b73ab | /5097_회전/sol1.py | 6e38169fa762c0448805a506e36bde655123aa0d | [] | no_license | swhan9404/swExpertAcademy | cf82b957b7ea6231d1f4b95f77f74e7717d2de0d | dea4176229121983c6daed80811de20de6da5ff6 | refs/heads/master | 2023-04-19T22:57:37.111757 | 2021-05-02T14:58:44 | 2021-05-02T14:58:44 | 337,338,058 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | import sys
sys.stdin = open("input.txt")
T = int(input())
for tc in range(1, T+1):
N, M = map(int, input().split())
# list로 queue 구현
inp_arr= list(map(int, input().split()))
for _ in range(M) :
front = inp_arr.pop(0)
inp_arr.append(front)
result = inp_arr[0]
print("#{} {}".format(tc, result))
| [
"swhan9404@naver.com"
] | swhan9404@naver.com |
db422a8166459af5fa7fb6aefad431847328f65f | fab72028e5aa12c0a1a1cf1b550212c017979f78 | /test_app/models.py | cd0321fab47456961ba3a922b9260af7557b4e50 | [] | no_license | jpic/test_input_relation | 0e43c64e659005472d3e7210ed0a1c8f65a05bd4 | 24a485670b2945430657ee4b998e5aaa756329c3 | refs/heads/master | 2021-01-02T08:19:08.771571 | 2015-05-19T23:43:38 | 2015-05-19T23:43:38 | 35,902,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=200, null=True, blank=True)
relation = models.ForeignKey('self', null=True, blank=True)
name0 = models.CharField(max_length=200, null=True, blank=True)
name1 = models.CharField(max_length=200, null=True, blank=True)
name2 = models.CharField(max_length=200, null=True, blank=True)
name3 = models.CharField(max_length=200, null=True, blank=True,
choices=[('a', 'a')])
| [
"jamespic@gmail.com"
] | jamespic@gmail.com |
0e6415dc00588741391be39b0ea44e25232e440a | 747f759311d404af31c0f80029e88098193f6269 | /addons/account_banking_nl_clieop/__terp__.py | 3728f4dce0fd515099f470fe771d6857b3c424b6 | [] | no_license | sgeerish/sirr_production | 9b0d0f7804a928c0c582ddb4ccb7fcc084469a18 | 1081f3a5ff8864a31b2dcd89406fac076a908e78 | refs/heads/master | 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | /home/openerp/production/extra-addons/account_banking_nl_clieop/__terp__.py | [
"geerish@omerp.net"
] | geerish@omerp.net |
c5c3040710b66ad3829cacb41d49fd4ffee9f79b | 5966449d2e29c9b64351895db2932f94f9de42da | /catkin_ws/build/calibration_msgs/catkin_generated/pkg.develspace.context.pc.py | c5aed50da0a8c9acf6a127e9060b081e8582ea96 | [] | no_license | godaeseong/GoHriProject | 8cbce6934485b8ba3253fc7b6c5b5b59397b4518 | 425e70b7c91b6215f5477fc2250d2b0ac96577be | refs/heads/master | 2021-05-11T22:11:56.099580 | 2018-01-15T02:20:43 | 2018-01-15T02:20:43 | 117,484,817 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/hri/catkin_ws/devel/.private/calibration_msgs/include;/home/hri/catkin_ws/src/calibration_toolkit/calibration_msgs/include".split(';') if "/home/hri/catkin_ws/devel/.private/calibration_msgs/include;/home/hri/catkin_ws/src/calibration_toolkit/calibration_msgs/include" != "" else []
PROJECT_CATKIN_DEPENDS = "message_runtime;sensor_msgs;actionlib_msgs;calibration_common;eigen_conversions".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lcalibration_msgs".split(';') if "-lcalibration_msgs" != "" else []
PROJECT_NAME = "calibration_msgs"
PROJECT_SPACE_DIR = "/home/hri/catkin_ws/devel/.private/calibration_msgs"
PROJECT_VERSION = "1.0.0"
| [
"bigdream0129@naver.com"
] | bigdream0129@naver.com |
0082adaaf92b0c63edfb3eb67e82c9f23b7aaff1 | c6d852e5842cf6f74123445d20ff03876377ae26 | /lemon/python22/lemon_25_191023_充值接口_用例关联/test_practice_01_25/middler_ware/db_handler.py | 965b6da45635f25ac85180de1363a2f57a60577d | [] | no_license | songyongzhuang/PythonCode_office | 0b3d35ca5d58bc305ae90fea8b1e8c7214619979 | cfadd3132c2c7c518c784589e0dab6510a662a6c | refs/heads/master | 2023-02-13T14:06:10.610935 | 2021-01-14T09:11:32 | 2021-01-14T09:11:32 | 327,183,429 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,069 | py | # --*-- coding :utf-8 --*--
# Project :python22
# Current file :db_handler.py
# Author :Administrator
# Create time :2019/10/24 15:23
# IDE :PyCharm
# TODO 成长很苦, 进步很甜, 加油!
from pymysql.cursors import DictCursor
from lemon_25_191023_充值接口_用例关联.test_practice_01_25.common.db_handler_shujuku \
import DBHandler # 数据库
from lemon_25_191023_充值接口_用例关联.test_practice_01_25.common.config_handler_peizhiwenjian \
import config # 配置文件
class MyDBHandler(DBHandler):
def __init__(self, **kw): # 获取的数据默认是元组
super().__init__(
host=config.read('db', 'host'),
port=eval(config.read('db', 'port')), # int类型去掉两端字符串
user=config.read('db', 'user'),
password=config.read('db', 'password'),
charset=config.read('db', 'charset'),
database=config.read('db', 'database'),
cursorclass=DictCursor, # 默认是元组
**kw
)
| [
"songyongzhuang9@163.com"
] | songyongzhuang9@163.com |
9d85792ee3739689520034f7a9100b57d97a01e6 | 3b56423a34de9b4adae13d0fee4905609a9e2410 | /contrib/linearize/linearize-hashes.py | 99af17d0cc38195bef52e9e3482702549581d36a | [
"MIT"
] | permissive | mulecore/mule | 5302db1cb6596475f9496495dba12b4d1cfd0d2c | 9b2d9bf0ffc47963b5c0ce9b760cfff757961533 | refs/heads/master | 2023-03-27T02:25:49.455321 | 2021-03-26T10:31:13 | 2021-03-26T10:31:13 | 351,247,550 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,667 | py | #!/usr/bin/env python3
#
# linearize-hashes.py: List blocks in a linear, no-fork version of the chain.
#
# Copyright (c) 2013-2016 The Bitcoin Core developers
# Copyright (c) 2017-2020 The Mule Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from __future__ import print_function
try: # Python 3
import http.client as httplib
except ImportError: # Python 2
import httplib
import json
import re
import base64
import sys
import os
import os.path
settings = {}
##### Switch endian-ness #####
def hex_switchEndian(s):
""" Switches the endianness of a hex string (in pairs of hex chars) """
pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)]
return b''.join(pairList[::-1]).decode()
class MuleRPC:
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
authpair = authpair.encode('utf-8')
self.authhdr = b"Basic " + base64.b64encode(authpair)
self.conn = httplib.HTTPConnection(host, port=port, timeout=30)
def execute(self, obj):
try:
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
except ConnectionRefusedError:
print('RPC connection refused. Check RPC settings and the server status.',
file=sys.stderr)
return None
resp = self.conn.getresponse()
if resp is None:
print("JSON-RPC: no response", file=sys.stderr)
return None
body = resp.read().decode('utf-8')
resp_obj = json.loads(body)
return resp_obj
@staticmethod
def build_request(idx, method, params):
obj = { 'version' : '1.1',
'method' : method,
'id' : idx }
if params is None:
obj['params'] = []
else:
obj['params'] = params
return obj
@staticmethod
def response_is_error(resp_obj):
return 'error' in resp_obj and resp_obj['error'] is not None
def get_block_hashes(settings, max_blocks_per_call=10000):
rpc = MuleRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpassword'])
height = settings['min_height']
while height < settings['max_height']+1:
num_blocks = min(settings['max_height']+1-height, max_blocks_per_call)
batch = []
for x in range(num_blocks):
batch.append(rpc.build_request(x, 'getblockhash', [height + x]))
reply = rpc.execute(batch)
if reply is None:
print('Cannot continue. Program will halt.')
return None
for x,resp_obj in enumerate(reply):
if rpc.response_is_error(resp_obj):
print('JSON-RPC: error at height', height+x, ': ', resp_obj['error'], file=sys.stderr)
sys.exit(1)
assert(resp_obj['id'] == x) # assume replies are in-sequence
if settings['rev_hash_bytes'] == 'true':
resp_obj['result'] = hex_switchEndian(resp_obj['result'])
print(resp_obj['result'])
height += num_blocks
def get_rpc_cookie():
# Open the cookie file
with open(os.path.join(os.path.expanduser(settings['datadir']), '.cookie'), 'r', encoding="ascii") as f:
combined = f.readline()
combined_split = combined.split(":")
settings['rpcuser'] = combined_split[0]
settings['rpcpassword'] = combined_split[1]
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Usage: linearize-hashes.py CONFIG-FILE")
sys.exit(1)
f = open(sys.argv[1], encoding="utf8")
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 9652
if 'min_height' not in settings:
settings['min_height'] = 0
if 'max_height' not in settings:
settings['max_height'] = 313000
if 'rev_hash_bytes' not in settings:
settings['rev_hash_bytes'] = 'false'
use_userpass = True
use_datadir = False
if 'rpcuser' not in settings or 'rpcpassword' not in settings:
use_userpass = False
if 'datadir' in settings and not use_userpass:
use_datadir = True
if not use_userpass and not use_datadir:
print("Missing datadir or username and/or password in cfg file", file=sys.stderr)
sys.exit(1)
settings['port'] = int(settings['port'])
settings['min_height'] = int(settings['min_height'])
settings['max_height'] = int(settings['max_height'])
# Force hash byte format setting to be lowercase to make comparisons easier.
settings['rev_hash_bytes'] = settings['rev_hash_bytes'].lower()
# Get the rpc user and pass from the cookie if the datadir is set
if use_datadir:
get_rpc_cookie()
get_block_hashes(settings)
| [
"mule@muleda.com"
] | mule@muleda.com |
2ada5ff18ff071c0266184f2b0db5ad2a56cc1ad | 1287ad54942fd2020a217ab12004a541abb62558 | /pythonexercicios/Ex046.py | f73a36c9de3ccc6a017797df77b9e097cfd3a9b4 | [] | no_license | LuPessoa/exerciciospy- | 637f24581722e547a62380973ca645b55ff65d90 | b5faad818f978bb13a65922edceb17888b73a407 | refs/heads/master | 2023-05-12T04:16:39.847184 | 2021-06-04T03:02:24 | 2021-06-04T03:02:24 | 374,410,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 108 | py | from time import sleep
for cont in range(10, -1, -1):
print(cont)
sleep(0.5)
print('BUM! BUM! BUM!') | [
"lulenemacedo29@gmail.com"
] | lulenemacedo29@gmail.com |
40fcffde1d8ae8851c4f31733e22a88f8701a626 | ea9f38468c32efc07b090ff65ae0e3369486fb8b | /update/organizations.py | d7ca0b95ea56884cfbf5989ae4130373c1226511 | [] | no_license | KohoVolit/activities | f8942e9480a1d0a20bfa1727d53f49f4df9f0bb8 | 1569e8df19c9deb718e08e17ca17baa6bbedcbb0 | refs/heads/master | 2021-01-10T16:46:46.104425 | 2016-03-30T01:53:07 | 2016-03-30T01:53:07 | 53,628,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,753 | py | # insert and update organizations
# chamber and political groups
import json
import api
import authentication
import scrapeutils
url = 'http://www.psp.cz/eknih/cdrom/opendata/poslanci.zip'
unlfile = 'organy.unl'
api.login(authentication.email,authentication.password)
zfile = scrapeutils.download(url,zipped=True)
organy = scrapeutils.zipfile2rows(zfile,unlfile)
# chamber:
for row in organy:
if row[2] == '11': #chamber
term = row[3][3:]
org = {
"name": row[4].strip(),
'classification': 'chamber',
'id': int(row[0].strip()),
'founding_date': scrapeutils.cs2iso(row[6].strip()),
'attributes': {
"abbreviation": "PSP",
"term": int(term)
}
}
if (row[7].strip() != ''):
org["dissolution_date"] = scrapeutils.cs2iso(row[7].strip())
params = {'id': "eq.%s" % (org['id'])}
r = api.get("organizations", params)
rdata = r.json()
if len(rdata) == 0:
r = api.post("organizations",org)
else:
o = r.json()[0]
try:
z = o['attributes'].copy()
z.update(org['attributes'])
org['attributes'] = z
except:
nothing = None
r = api.patch("organizations", params, org)
# political groups:
for row in organy:
if row[2] == '1': #political group
params = {
"id": "eq."+row[1].strip()
}
parent = api.get_one("organizations",params)
org = {
"name": row[4].strip(),
'classification': 'political group',
'id': int(row[0].strip()),
'founding_date': scrapeutils.cs2iso(row[6].strip()),
'attributes': {
"abbreviation": row[3].strip(),
"parent_id": parent['id'],
"term": parent['attributes']['term']
}
}
if (row[7].strip() != ''):
org["dissolution_date"] = scrapeutils.cs2iso(row[7].strip())
params = {'id': "eq.%s" % (org['id'])}
r = api.get("organizations", params)
rdata = r.json()
if len(rdata) == 0:
r = api.post("organizations",org)
else:
o = r.json()[0]
try:
z = o['attributes'].copy()
z.update(org['attributes'])
org['attributes'] = z
except:
nothing = None
r = api.patch("organizations", params, org)
# regions
for row in organy:
if row[2] == '75' or row[2] == '8': #regions
params = {
"id": "eq."+row[1].strip()
}
org = {
"name": row[4].strip(),
'classification': 'region',
'id': int(row[0].strip()),
'founding_date': scrapeutils.cs2iso(row[6].strip()),
'attributes': {
"abbreviation": row[3].strip()
}
}
if row[2] == '8': # old regions
row[7] = '31.05.2002'
if (row[7].strip() != ''):
org["dissolution_date"] = scrapeutils.cs2iso(row[7].strip())
params = {'id': "eq.%s" % (org['id'])}
r = api.get("organizations", params)
rdata = r.json()
if len(rdata) == 0:
r = api.post("organizations",org)
else:
o = r.json()[0]
try:
z = o['attributes'].copy()
z.update(org['attributes'])
org['attributes'] = z
except:
nothing = None
r = api.patch("organizations", params, org)
# electoral lists
for row in organy:
if row[2] == '6': # electoral list
params = {
"id": "eq."+row[1].strip()
}
org = {
"name": row[4].strip(),
'classification': 'electoral list',
'id': int(row[0].strip()),
'founding_date': scrapeutils.cs2iso(row[6].strip()),
'attributes': {
"abbreviation": row[3].strip()
}
}
if (row[7].strip() != ''):
org["dissolution_date"] = scrapeutils.cs2iso(row[7].strip())
params = {'id': "eq.%s" % (org['id'])}
r = api.get("organizations", params)
rdata = r.json()
if len(rdata) == 0:
r = api.post("organizations",org)
else:
o = r.json()[0]
try:
z = o['attributes'].copy()
z.update(org['attributes'])
org['attributes'] = z
except:
nothing = None
r = api.patch("organizations", params, org)
# governments
for row in organy:
if row[2] == '5': # electoral list
params = {
"id": "eq."+row[1].strip()
}
org = {
"name": row[4].strip(),
'classification': 'government',
'id': int(row[0].strip()),
'founding_date': scrapeutils.cs2iso(row[6].strip()),
'attributes': {
"abbreviation": row[3].strip()
}
}
if (row[7].strip() != ''):
org["dissolution_date"] = scrapeutils.cs2iso(row[7].strip())
params = {'id': "eq.%s" % (org['id'])}
r = api.get("organizations", params)
rdata = r.json()
if len(rdata) == 0:
r = api.post("organizations",org)
else:
o = r.json()[0]
try:
z = o['attributes'].copy()
z.update(org['attributes'])
org['attributes'] = z
except:
nothing = None
r = api.patch("organizations", params, org)
| [
"michal.skop@kohovolit.eu"
] | michal.skop@kohovolit.eu |
5e660b6bf487e557b8c33745d5b66cea959e7c4e | cbd2eee46663fad5b5375b13c8c21b1b06eb4c6b | /hrjk/engine/kafka_engine.py | b0e5773f5a18a78305aeb9dc4ba4f5cc70279331 | [] | no_license | 1026237416/Python | ef474ee40d7efcd6dabb6fb0ecba81b4dcfc7e14 | ffa8f9ffb8bfec114b0ca46295db05c4213c4c30 | refs/heads/master | 2021-07-05T00:57:00.456886 | 2019-04-26T10:13:46 | 2019-04-26T10:13:46 | 114,510,323 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,099 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: 1.0
@author: li
@license: Apache Licence
@contact: 1026237416@qq.com
@site:
@software: PyCharm
@file: kafka_engine.py
@time: 2018/7/17 17:58
"""
import json
from config import KAFKA_HOST, KAFKA_PORT, conductor_recv_topic
from common import case_log
def default_call_back(data):
print("******************Receive New Data******************")
print type(data)
print data
print("****************************************************")
def on_message(messages, call_back):
for message in messages:
if message is not None:
case_log.info("Recv new message: %s" % message.value)
try:
data = json.loads(message.value)
call_back(message.value, key=message.key)
except ValueError as e:
case_log.warning(
"Receive an illegal message: %s" % e.message)
class KafkaEngineClientByPyKafka(object):
def __init__(self):
from pykafka import KafkaClient
kafka_host = "%s:%s" % (KAFKA_HOST, str(KAFKA_PORT))
self.client = KafkaClient(hosts=kafka_host)
self.topic = self.client.topics[conductor_recv_topic]
def get_all_topic(self):
return self.client.topics
def create_topic(self, topic_name):
pass
def recv_msg(self, call_back):
messages = self.topic.get_simple_consumer(consumer_group='case_engine',
auto_commit_enable=True,
consumer_id='case_engine')
on_message(messages=messages, call_back=call_back)
class KafkaEngineClientByKafkaPython(object):
def __init__(self):
self.kafka_host = "%s:%s" % (KAFKA_HOST, str(KAFKA_PORT))
def recv_msg(self, call_back):
from kafka import KafkaConsumer
messages = KafkaConsumer(conductor_recv_topic,
bootstrap_servers=[self.kafka_host])
on_message(messages=messages, call_back=call_back)
def send_msg(self, topic_name, msg, key):
from kafka import KafkaProducer
producer = KafkaProducer(
bootstrap_servers=self.kafka_host,
value_serializer=lambda v: json.dumps(v).encode('utf-8'),
acks="all"
)
producer.send(topic=topic_name,
key=key,
value=msg
)
case_log.info(
"Send message to Kafka, using topic: %s, key: %s, value: %s" % (
topic_name, key, msg
))
producer.flush()
def kafka_engine():
kafka_client = KafkaEngineClientByKafkaPython()
return kafka_client
if __name__ == '__main__':
client = kafka_engine()
msg = {'requestinfo': '0000000', 'processName': '201807182248',
'requestway': 'run'}
client.send_msg(topic_name="EVENT.conductor.ack",
key=b"35b71d7d-fdca-4070-8940-85f1f1fd82c1",
msg=msg
)
| [
"1026237416@qq.com"
] | 1026237416@qq.com |
96efef66f4e59be983e051f8f19e13fff585a4dc | da0ec71df76ea5071a1481ef5b54cafd6d74d720 | /python/minesweeper/minesweeper.py | 84edda519d12bb338bbe9d7f8204a1bbc09c5e6f | [] | no_license | apalala/exercism | 6bd4926c0c78790c366b3a95c1c594669f871410 | 9d295a1ba1f6950c1e03e9de6fa10ccb392ffa22 | refs/heads/master | 2021-01-18T15:56:51.685366 | 2017-03-31T23:40:29 | 2017-03-31T23:40:29 | 86,697,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,212 | py | ARROUND = [
(-1,-1), (-1, 0), (-1, 1),
(0, -1), (0, 1),
(1, -1), (1, 0), (1, 1)
]
def board(matrix):
def adjust(value):
if value == ' ':
return '1'
elif value.isdigit():
return str(int(value) + 1)
else:
return value
if not matrix:
raise ValueError('Empty board')
m = len(matrix)
n = len(matrix[0])
horz_border = '+' + '-' * (n-2) + '+'
if matrix[0] != horz_border or matrix[m - 1] != horz_border:
raise ValueError('Corrupt border')
matrix = [list(row) for row in matrix]
for i in range(1, m - 1):
if len(matrix[i]) != n:
raise ValueError('Rows not of same length')
if matrix[i][0] != '|' or matrix[i][n - 1] != '|':
raise ValueError('Corrupt border')
for j in range(1, n - 1):
c = matrix[i][j]
if c not in '* 012345678':
raise ValueError('Unknown symbol in matrix')
elif c != '*':
continue
else:
for x, y in ARROUND:
matrix[i + x][j + y] = adjust(matrix[i + x][j + y])
return [''.join(matrix) for matrix in matrix]
| [
"apalala@gmail.com"
] | apalala@gmail.com |
eafa72b01bb3f0d3be4213af4e7b7bf50022defd | 0d0afd1dce972b4748ce8faccd992c019794ad9e | /integra/construtora/__openerp__.py | 44b6587f79173aa9f793c38484bb52a60caab17f | [] | no_license | danimaribeiro/odoo-erp | e2ca2cfe3629fbedf413e85f7c3c0453fd16941e | d12577bf7f5266b571cbedeb930720d653320e96 | refs/heads/master | 2020-01-23T21:32:16.149716 | 2016-11-05T15:35:40 | 2016-11-05T15:35:40 | 67,892,809 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,212 | py | # -*- coding: utf-8 -*-
#from __future__ import division, print_function, unicode_literals
{
'name': 'Construcao',
'version': '1.0',
'category': 'Integra',
'description': u'Construção civil',
'author': 'Integra',
'maintainer': 'Integra',
'website': 'http://www.ERPIntegra.com.br',
'depends': [
'base',
'sale',
'sped_base',
'sped',
'project',
'account',
'project_long_term',
'finan',
'finan_modelo_lancamento',
'finan_contrato',
'finan_contrato_checklist',
'finan_cheque',
'purchase',
'compras',
'sped_purchase',
'integra_libreoffice',
'share',
],
'update_xml': [
'security/groups.xml',
'views/mail_compose_message_negociacao_view.xml',
'views/project_task.xml',
'views/project_etapa.xml',
'views/project_orcamento.xml',
'views/project_orcamento_planejamento.xml',
'views/purchase_orcamento_planejamento.xml',
'views/purchase_orcamento_item.xml',
'views/project_orcamento_medicao.xml',
'views/res_partner_address_view.xml',
'views/res_cep_view.xml',
'views/partner_view.xml',
'views/res_partner_corretor_view.xml',
'views/res_partner_contato_view.xml',
'views/crm_lead_prospecto_view.xml',
'views/imovel_view.xml',
'views/imovel_casa.xml',
'views/imovel_apartamento.xml',
'views/imovel_terreno.xml',
'views/imovel_chacara.xml',
'views/imovel_fazenda.xml',
'views/imovel_area.xml',
'views/imovel_galpao.xml',
'views/imovel_predio.xml',
'views/imovel_sala_comercial.xml',
'views/imovel_loja.xml',
'views/imovel_outros.xml',
'views/hr_employee_view.xml',
'views/product_view.xml',
'views/p_view.xml',
'views/purchase_view.xml',
'views/purchase_cotacao.xml',
'views/cotacao_supplier_info.xml',
'views/purchase_solicitacao_cotacao.xml',
'views/purchase_solicitacao_item.xml',
'views/purchase_order.xml',
'views/purchase_order_line_view.xml',
#'views/finan_tabela_venda.xml',
'views/imovel_crm.xml',
'views/finan_contrato_etapa.xml',
'views/finan_contrato_condicao_pagamento_view.xml',
'views/finan_contrato_condicao_pagamento_renegociacao_view.xml',
'views/finan_contrato_proposta.xml',
'views/finan_contrato_analise_financeiro.xml',
'views/finan_contrato_analise_juridico.xml',
'views/finan_contrato_receber_view.xml',
'views/sale_dashboard_view.xml',
'views/crm_lead_prospecto_dashboard.xml',
'views/finan_contrato_proposta_dashboard.xml',
'views/finan_centrocusto_view.xml',
'views/finan_pagar_view.xml',
'views/finan_receber_view.xml',
'views/finan_documento_view.xml',
'views/project_view.xml',
'views/const_zoneamento.xml',
'views/finan_comissao.xml',
'views/sped_documento_nfe_recebida.xml',
'views/sped_documento_nfe_recebida_manual.xml',
'views/sped_documento_nfse_recebida.xml',
'views/sped_documento_ecf_recebido.xml',
'views/lo_modelo.xml',
'wizard/relatorio_projeto_tarefa.xml',
'wizard/relatorio_projeto_orcamento.xml',
'wizard/relatorio_projeto_orcamento_compras.xml',
'wizard/relatorio_projeto_orcamento_medicao.xml',
'wizard/relatorio_orcamento_prefeitura.xml',
'wizard/relatorio_cronograma_desembolso.xml',
'wizard/relatorio_cronograma_fisico_gantt.xml',
'wizard/finan_relatorio_pagar.xml',
'wizard/finan_relatorio_diario_fornecedor_rateio.xml',
'wizard/finan_relatorio_diario_cliente_rateio.xml',
'wizard/finan_pagar_sintetico_rateio_view.xml',
'wizard/finan_relatorio_movimentacao.xml',
'wizard/res_users_imovel_wizard.xml',
'views/bi_view.xml',
'views/finan_contrato_dashboard_view.xml',
],
'init_xml': [],
'installable': True,
'application': True,
'auto_install': False,
}
| [
"danimaribeiro@gmail.com"
] | danimaribeiro@gmail.com |
620b2eb456cd0bacb38145e96666ec13851dbbd2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03393/s159130267.py | fab972600880229cd01c0c174bf16684d93f78dd | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | S = input()
alp = 'abcdefghijklmnopqrstuvwxyz'
if len(S) < 26:
for char in alp:
if not char in S:
print(S+char)
exit()
else:
for i in range(1,26)[::-1]:
if S[i-1] < S[i]:
for j in range(i,26)[::-1]:
if S[j] > S[i-1]:
print(S[:i-1]+S[j])
exit()
print(-1) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
e982cd970fcc733ffec8bc5132aa59a74bc15347 | e3fd35a8443aaf2f293ae03a5f6c819046a4dd21 | /leetcode-python/medium/_875_koko_eating_bananas/solution.py | 8386f90d8a84f00fa8f75457e852eec954a432c3 | [] | no_license | hieutran106/leetcode-ht | 2223ea6bcd459c2cdbc33344c0ff69df7f8a3c7f | 8332eb20e613f82cda2e326218154c7803a32403 | refs/heads/main | 2023-08-09T02:52:41.360360 | 2023-07-27T10:12:28 | 2023-07-27T10:12:28 | 234,890,448 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 949 | py | from typing import List
import math
class Solution:
def minEatingSpeed(self, piles: List[int], h: int) -> int:
"""
We need to search k in range [1..max(piles]
Bruce force is one option, but we can optimize by using binary search
"""
left = 1
right = max(piles)
while left <= right:
mid = (left + right) // 2
can_finish = self.can_finish_banana_before_guard(mid, piles, h)
# if koko can finish, we need to search to the left to find minimum value of k
if can_finish:
right = mid - 1
else:
# koko cannot finish, need to eat at faster speed
left = mid + 1
return left
def can_finish_banana_before_guard(self, speed, piles, returning_hour):
hour = 0
for banana in piles:
hour += math.ceil(banana/speed)
return hour <= returning_hour
| [
"hieutran106@gmail.com"
] | hieutran106@gmail.com |
a868b212e954ecde9a6e2ddbb97f02b3e2926de7 | 7229c0e24c02839963a3ac744b8b03dffc1a1463 | /setup.py | ee1a52ace29003f083e5dda5f8a8ec9ae1bc4141 | [
"MIT"
] | permissive | bio2bel/hippie | a4589bc912df305852fa472ef29aebad22dd2a98 | 7b21ae56a372e9972153811be9e869f6614472ad | refs/heads/master | 2020-03-29T18:47:32.021576 | 2019-08-26T13:35:17 | 2019-08-26T13:35:17 | 150,231,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | # -*- coding: utf-8 -*-
"""Setup module for Bio2BEL HIPPIE."""
import setuptools
if __name__ == '__main__':
setuptools.setup()
| [
"cthoyt@gmail.com"
] | cthoyt@gmail.com |
12f03eb4d7c67b7384a949715d2a876250691b6e | e3bdb7844f634efd89109079d22cade713c4899d | /openapi_client/models/soft_descriptor.py | 6e955d44a5031c57cec38929bb14019ee69a59d9 | [] | no_license | pc-coholic/Python | 5170c27da09b066c353e09539e404961f7ad50b7 | b7251c31339b579f71fb7ee9db05be51e9e43361 | refs/heads/master | 2023-04-19T02:42:02.914726 | 2021-04-26T16:07:37 | 2021-04-26T16:07:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,796 | py | # coding: utf-8
"""
Payment Gateway API Specification.
The documentation here is designed to provide all of the technical guidance required to consume and integrate with our APIs for payment processing. To learn more about our APIs please visit https://docs.firstdata.com/org/gateway. # noqa: E501
The version of the OpenAPI document: 21.2.0.20210406.001
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class SoftDescriptor(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'dynamic_merchant_name': 'str',
'customer_service_number': 'str',
'mcc': 'str',
'dynamic_address': 'Address'
}
attribute_map = {
'dynamic_merchant_name': 'dynamicMerchantName',
'customer_service_number': 'customerServiceNumber',
'mcc': 'mcc',
'dynamic_address': 'dynamicAddress'
}
def __init__(self, dynamic_merchant_name=None, customer_service_number=None, mcc=None, dynamic_address=None): # noqa: E501
"""SoftDescriptor - a model defined in OpenAPI""" # noqa: E501
self._dynamic_merchant_name = None
self._customer_service_number = None
self._mcc = None
self._dynamic_address = None
self.discriminator = None
self.dynamic_merchant_name = dynamic_merchant_name
if customer_service_number is not None:
self.customer_service_number = customer_service_number
if mcc is not None:
self.mcc = mcc
if dynamic_address is not None:
self.dynamic_address = dynamic_address
@property
def dynamic_merchant_name(self):
"""Gets the dynamic_merchant_name of this SoftDescriptor. # noqa: E501
Store \"doing-business-as\" name. # noqa: E501
:return: The dynamic_merchant_name of this SoftDescriptor. # noqa: E501
:rtype: str
"""
return self._dynamic_merchant_name
@dynamic_merchant_name.setter
def dynamic_merchant_name(self, dynamic_merchant_name):
"""Sets the dynamic_merchant_name of this SoftDescriptor.
Store \"doing-business-as\" name. # noqa: E501
:param dynamic_merchant_name: The dynamic_merchant_name of this SoftDescriptor. # noqa: E501
:type: str
"""
if dynamic_merchant_name is None:
raise ValueError("Invalid value for `dynamic_merchant_name`, must not be `None`") # noqa: E501
if dynamic_merchant_name is not None and not re.search(r'^(?!\s*$).+', dynamic_merchant_name): # noqa: E501
raise ValueError(r"Invalid value for `dynamic_merchant_name`, must be a follow pattern or equal to `/^(?!\s*$).+/`") # noqa: E501
self._dynamic_merchant_name = dynamic_merchant_name
@property
def customer_service_number(self):
"""Gets the customer_service_number of this SoftDescriptor. # noqa: E501
Customer service phone number information that is passed to the issuer (it may appear on the cardholder’s statement) or if merchant wants to pass information that differs from the information stored on our master File. # noqa: E501
:return: The customer_service_number of this SoftDescriptor. # noqa: E501
:rtype: str
"""
return self._customer_service_number
@customer_service_number.setter
def customer_service_number(self, customer_service_number):
"""Sets the customer_service_number of this SoftDescriptor.
Customer service phone number information that is passed to the issuer (it may appear on the cardholder’s statement) or if merchant wants to pass information that differs from the information stored on our master File. # noqa: E501
:param customer_service_number: The customer_service_number of this SoftDescriptor. # noqa: E501
:type: str
"""
if customer_service_number is not None and len(customer_service_number) > 10:
raise ValueError("Invalid value for `customer_service_number`, length must be less than or equal to `10`") # noqa: E501
if customer_service_number is not None and not re.search(r'^[0-9]+$', customer_service_number): # noqa: E501
raise ValueError(r"Invalid value for `customer_service_number`, must be a follow pattern or equal to `/^[0-9]+$/`") # noqa: E501
self._customer_service_number = customer_service_number
@property
def mcc(self):
"""Gets the mcc of this SoftDescriptor. # noqa: E501
The 4-digit merchant category code (MCC). The merchant might be associated with multiple MCCs. In that case the MCC provided here will be the one that better describes the current transaction. # noqa: E501
:return: The mcc of this SoftDescriptor. # noqa: E501
:rtype: str
"""
return self._mcc
@mcc.setter
def mcc(self, mcc):
"""Sets the mcc of this SoftDescriptor.
The 4-digit merchant category code (MCC). The merchant might be associated with multiple MCCs. In that case the MCC provided here will be the one that better describes the current transaction. # noqa: E501
:param mcc: The mcc of this SoftDescriptor. # noqa: E501
:type: str
"""
if mcc is not None and len(mcc) > 4:
raise ValueError("Invalid value for `mcc`, length must be less than or equal to `4`") # noqa: E501
self._mcc = mcc
@property
def dynamic_address(self):
"""Gets the dynamic_address of this SoftDescriptor. # noqa: E501
:return: The dynamic_address of this SoftDescriptor. # noqa: E501
:rtype: Address
"""
return self._dynamic_address
@dynamic_address.setter
def dynamic_address(self, dynamic_address):
"""Sets the dynamic_address of this SoftDescriptor.
:param dynamic_address: The dynamic_address of this SoftDescriptor. # noqa: E501
:type: Address
"""
self._dynamic_address = dynamic_address
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SoftDescriptor):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"emargules@bluepay.com"
] | emargules@bluepay.com |
f7fcfd90159532e1747d9597ff8f8ac95b02b0b6 | 149df7dee4b00a65fd7edd143e6d5a8791b0f05f | /plotlib.py | 1589c324f8266ce95d327f744e20f95db8530943 | [] | no_license | PiotrDabkowski/Py3D | 4a65c4344884255996ea4c02cda1af8b25bc8f54 | cd383f47d22de28171be59690defe66e838d08cb | refs/heads/master | 2020-04-06T06:46:41.424810 | 2014-11-09T20:20:39 | 2014-11-09T20:20:39 | 26,407,756 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,531 | py | from PIL import Image
from PIL import ImageDraw
import guif
import math
class Plot:
def __init__(self, size=(600,400), pos=(0,0), scale_x=1, scale_y=1, centre=True):
self.__vid = guif.Video(size)
self.__size = size
self.__im = Image.new('RGBA', self.__size, 'white')
self.__draw = ImageDraw.Draw(self.__im)
self.d = self.__draw
self.__pos = pos
if centre:
self.__pos = -size[0]/2, -size[1]/2
self.__scale_x = float(scale_x)
self.__scale_y = float(scale_y)
self.update()
def update(self):
self.__vid.change_frame(self.__im)
def clear(self):
self.__im = Image.new('RGBA', self.__size, 'white')
self.__draw = ImageDraw.Draw(self.__im)
def line(self, p1, p2, color='black', thickness=1):
self.__draw.line(list(self.__point_transform(p1))+list(self.__point_transform(p2)), color, thickness)
def polygon(self, points, color='black', fill=False, thickness=1):
if fill: # Fill inside
points = [self.__point_transform(p) for p in points]
self.__draw.polygon(points, color)
else:
last = points[-1]
for point in points:
self.line(last, point, color, thickness)
last = point
def circle(self, centre, radius, color='black', full=False, thickness=1):
(x, y), r= self.__point_transform(centre), radius
if full:
self.__draw.ellipse((x-r, y-r, x+r, y+r), color) #Fix scale!
else:
n=36
step = 2*math.pi/n
points = [(round(centre[0]+radius*math.cos(s*step)), round(centre[1]+radius*math.sin(s*step))) for s in xrange(n)]
self.polygon(points, color, False, thickness)
def graph(self, func, x_domain, y_domain):
pass
def text(self, text, pos, color='black'):
self.__draw.text(self.__point_transform(pos), text, color)
self.__update()
def change_view(self, left_bottom_corner, right_top_corner):
sx, sy = self.__size
self.__pos = left_bottom_corner
self.__scale_x = abs(left_bottom_corner[0]-right_top_corner[0])/float(sx)
self.__scale_y = abs(left_bottom_corner[1]-right_top_corner[1])/float(sy)
def __point_transform(self, point):
#return point[0], self.__size[1]-point[1]
return (point[0]-self.__pos[0])/self.__scale_x, (self.__size[1]-point[1]+self.__pos[1])/self.__scale_y
| [
"piodrus@gmail.com"
] | piodrus@gmail.com |
3d2e1e76aeff9fb853b71d131c8b95d2b0916654 | ef54d37f8a3303013ca7469871a320d303957ed7 | /robo4.2/4.2/lib/python2.7/site-packages/RoboGalaxyLibrary/keywords/native.py | 276bc2b15f011103f08073a5398ba2f918fd3560 | [] | no_license | richa92/Jenkin_Regression_Testing | d18badfcf16bda682dfe7bcbbd66f54a9a27a58d | 24a74926170cbdfafa47e972644e2fe5b627d8ff | refs/heads/master | 2020-07-12T10:01:59.099137 | 2019-08-27T12:14:53 | 2019-08-27T12:14:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | import os
if os.name == 'nt':
from ntnative import NativeOsKeywords
elif os.name == 'posix':
from posixnative import NativeOsKeywords
| [
"akul@SAC0MKUVCQ.asiapacific.hpqcorp.net"
] | akul@SAC0MKUVCQ.asiapacific.hpqcorp.net |
a6149cc5912615ead7a405dd66d41c846a8e853d | 38d1fda1533b1ee545e4f507a77e405114ca2a51 | /tests/__init__.py | c1af8ee0dad556b017986378cedbfeb13c12ca02 | [
"WTFPL"
] | permissive | ActivKonnect/castor | 6d5f13715a78fac0b503688da95afb0130cf3929 | a6837389c23eaba9c581a194689c30587820c9e8 | refs/heads/develop | 2020-12-24T15:22:29.845970 | 2016-03-04T14:12:41 | 2016-03-04T14:12:41 | 38,937,755 | 6 | 1 | null | 2016-03-04T14:14:30 | 2015-07-11T19:26:26 | Python | UTF-8 | Python | false | false | 159 | py | # vim: fileencoding=utf-8 tw=100 expandtab ts=4 sw=4 :
#
# Castor
# (c) 2015 ActivKonnect
# Rémy Sanchez <remy.sanchez@activkonnect.com>
from .repo import *
| [
"remy.sanchez@hyperthese.net"
] | remy.sanchez@hyperthese.net |
97bfe0b12d6ac454218f101c5824d8b48fc8d518 | 4331b28f22a2efb12d462ae2a8270a9f666b0df1 | /.history/dvdstore/webapp/urls_20190914174439.py | 3a5fcfde3376715beebb709d88261bd656548217 | [] | no_license | ZiyaadLakay/csc312.group.project | ba772a905e0841b17478eae7e14e43d8b078a95d | 9cdd9068b5e24980c59a53595a5d513c2e738a5e | refs/heads/master | 2020-07-26T23:30:22.542450 | 2019-09-16T11:46:41 | 2019-09-16T11:46:41 | 200,703,160 | 0 | 0 | null | 2019-08-05T17:52:37 | 2019-08-05T17:52:37 | null | UTF-8 | Python | false | false | 1,490 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.home, name='home'),
path('clerk/', views.clerk, name='clerk'),
path('clerk/register2',views.register2, name='register2'),
path('clerk/register3',views.register3, name='register3'),
path('clerk/model_form_upload',views.model_form_upload, name='model_form_upload'),
path('transactions/register2',views.register2, name='register2'),
path('transactions/register3',views.register3, name='register3'),
path('transactions/model_form_upload',views.model_form_upload, name='model_form_upload'),
path('booking',views.booking, name='booking'),
path('clerk/checkout',views.checkout, name='checkout'),
path('clerk/checkin',views.checkin, name='checkin'),
path('transactions/', views.transactions, name='transactions'),
path('userstbl/', views.userstbl, name='userstbl'),
path('clerk/deleteMovie',views.deleteMovie, name='deleteMovie'),
path('transactions/deleteTransaction',views.deleteTransaction, name='deleteTransaction'),
path('userstbl/deleteUser',views.deleteUser, name='deleteUser'),
path('user_detail/', views.user_detail, name='user_detail'),
path('accounts/registerCustomer',views.registerCustomer, name='registerCustomer'),
path('user_detail/updateCustomer',views.updateCustomer, name='updateCustomer'),
path('user_detail/updateUser',views.updateUser, name='updateUser'),
]
| [
"uzairjoneswolf@gmail.com"
] | uzairjoneswolf@gmail.com |
2734fe61fbccae3ea2f76c179e664252b595fd4f | f1344fb85607e6371dcf67b5c22addf535059b00 | /tests/test_score.py | 98118abf550c85939defefad2e2c92a3eb017fcc | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | shunsunsun/aizynthfinder | d18bea55db48b6e29b61d167e5b5bafcc0e0b6db | 51864ef8a1dc3501d60d21ddb4d71d392f47b045 | refs/heads/master | 2023-04-10T07:28:11.204379 | 2021-04-21T12:10:42 | 2021-04-21T12:10:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,185 | py | import pytest
import numpy as np
from aizynthfinder.context.scoring import (
StateScorer,
NumberOfReactionsScorer,
AverageTemplateOccurenceScorer,
NumberOfPrecursorsScorer,
NumberOfPrecursorsInStockScorer,
PriceSumScorer,
RouteCostScorer,
ScorerCollection,
ScorerException,
)
from aizynthfinder.chem import Molecule, UniqueMolecule
from aizynthfinder.mcts.mcts import SearchTree
from aizynthfinder.analysis import ReactionTree
def test_state_scorer_node(generate_root, default_config):
root = generate_root("CCCCOc1ccc(CC(=O)N(C)O)cc1")
scorer = StateScorer(default_config)
assert repr(scorer) == "state score"
assert round(scorer(root), 4) == 0.0491
def test_state_scorer_nodes(generate_root, default_config):
root = generate_root("CCCCOc1ccc(CC(=O)N(C)O)cc1")
scorer = StateScorer(default_config)
scores = scorer([root, root])
assert repr(scorer) == "state score"
assert round(scores[0], 4) == 0.0491
assert round(scores[1], 4) == 0.0491
def test_state_scorer_tree(load_reaction_tree, default_config, mock_stock):
mock_stock(
default_config, "N#Cc1cccc(N)c1F", "O=C(Cl)c1ccc(F)cc1", "CN1CCC(Cl)CC1", "O"
)
tree = ReactionTree.from_dict(load_reaction_tree("sample_reaction.json"))
scorer = StateScorer(default_config)
assert round(scorer(tree), 4) == 0.994
def test_state_scorer_trees(load_reaction_tree, default_config, mock_stock):
mock_stock(
default_config, "N#Cc1cccc(N)c1F", "O=C(Cl)c1ccc(F)cc1", "CN1CCC(Cl)CC1", "O"
)
tree = ReactionTree.from_dict(load_reaction_tree("sample_reaction.json"))
scorer = StateScorer(default_config)
scores = scorer([tree, tree])
assert round(scores[0], 4) == 0.994
assert round(scores[1], 4) == 0.994
def test_sort(shared_datadir, default_config, mock_stock):
mock_stock(default_config, "CCCO", "CC")
search_tree = SearchTree.from_json(
shared_datadir / "tree_without_repetition.json", default_config
)
nodes = list(search_tree.graph())
scorer = StateScorer(default_config)
sorted_nodes, scores, _ = scorer.sort(nodes)
assert [np.round(score, 4) for score in scores] == [0.9976, 0.0491]
assert sorted_nodes == [nodes[1], nodes[0]]
def test_number_of_reaction_scorer_node(shared_datadir, default_config):
search_tree = SearchTree.from_json(
shared_datadir / "tree_without_repetition.json", default_config
)
nodes = list(search_tree.graph())
scorer = NumberOfReactionsScorer()
assert scorer(nodes[1]) == 1
def test_number_of_reaction_scorer_tree(load_reaction_tree):
tree = ReactionTree.from_dict(load_reaction_tree("sample_reaction.json"))
scorer = NumberOfReactionsScorer()
assert scorer(tree) == 2
def test_template_occurence_scorer_no_metadata(shared_datadir, default_config):
search_tree = SearchTree.from_json(
shared_datadir / "tree_without_repetition.json", default_config
)
nodes = list(search_tree.graph())
scorer = AverageTemplateOccurenceScorer()
assert scorer(nodes[1]) == 0
def test_template_occurence_scorer(shared_datadir, default_config):
search_tree = SearchTree.from_json(
shared_datadir / "tree_without_repetition.json", default_config
)
nodes = list(search_tree.graph())
nodes[0][nodes[1]]["action"].metadata["library_occurence"] = 5
scorer = AverageTemplateOccurenceScorer()
assert scorer(nodes[0]) == 0
assert scorer(nodes[1]) == 5
def test_template_occurence_scorer_tree(load_reaction_tree):
tree = ReactionTree.from_dict(load_reaction_tree("sample_reaction.json"))
scorer = AverageTemplateOccurenceScorer()
assert scorer(tree) == 0
def test_template_occurence_scorer_tree_one_node():
rt = ReactionTree()
rt.root = Molecule(smiles="CCCCOc1ccc(CC(=O)N(C)O)cc1")
rt.graph.add_node(rt.root)
scorer = AverageTemplateOccurenceScorer()
assert scorer(rt) == 0.0
def test_scorers_one_mcts_node(default_config):
tree = SearchTree(default_config, root_smiles="CCCCOc1ccc(CC(=O)N(C)O)cc1")
node = tree.root
assert pytest.approx(StateScorer(default_config)(node), abs=1e-3) == 0.0497
assert NumberOfReactionsScorer(default_config)(node) == 0
assert NumberOfPrecursorsScorer(default_config)(node) == 1
assert NumberOfPrecursorsInStockScorer(default_config)(node) == 0
assert PriceSumScorer(default_config)(node) == 10
assert RouteCostScorer(default_config)(node) == 10
def test_scoring_branched_mcts_tree(shared_datadir, default_config):
search_tree = SearchTree.from_json(
shared_datadir / "tree_with_branching.json", default_config
)
nodes = list(search_tree.graph())
assert pytest.approx(StateScorer(default_config)(nodes[-1]), abs=1e-6) == 0.00012363
assert NumberOfReactionsScorer()(nodes[-1]) == 14
assert NumberOfPrecursorsScorer(default_config)(nodes[-1]) == 8
assert NumberOfPrecursorsInStockScorer(default_config)(nodes[-1]) == 0
assert PriceSumScorer(default_config)(nodes[-1]) == 80
cost_score = RouteCostScorer(default_config)(nodes[-1])
assert pytest.approx(cost_score, abs=1e-3) == 410.6577
def test_scoring_branch_mcts_tree_in_stock(shared_datadir, default_config, mock_stock):
mock_stock(
default_config,
"CC(C)(C)CO",
"CC(C)(C)OC(=O)N(CCCl)CCCl",
"N#CCc1cccc(O)c1F",
"O=[N+]([O-])c1ccccc1F",
"O=C1CCC(=O)N1Br",
"O=C=Nc1csc(C(F)(F)F)n1",
"CCC[Sn](Cl)(CCC)CCC",
"COc1ccc2ncsc2c1",
)
search_tree = SearchTree.from_json(
shared_datadir / "tree_with_branching.json", default_config
)
nodes = list(search_tree.graph())
assert pytest.approx(StateScorer(default_config)(nodes[-1]), abs=1e-3) == 0.950
assert NumberOfReactionsScorer()(nodes[-1]) == 14
assert NumberOfPrecursorsScorer(default_config)(nodes[-1]) == 8
assert NumberOfPrecursorsInStockScorer(default_config)(nodes[-1]) == 8
assert PriceSumScorer(default_config)(nodes[-1]) == 8
cost_score = RouteCostScorer(default_config)(nodes[-1])
assert pytest.approx(cost_score, abs=1e-3) == 77.4797
def test_scorers_tree_one_node_route(default_config):
tree = ReactionTree()
tree.root = UniqueMolecule(smiles="CCCCOc1ccc(CC(=O)N(C)O)cc1")
tree.graph.add_node(tree.root)
assert pytest.approx(StateScorer(default_config)(tree), abs=1e-3) == 0.0497
assert NumberOfReactionsScorer(default_config)(tree) == 0
assert NumberOfPrecursorsScorer(default_config)(tree) == 1
assert NumberOfPrecursorsInStockScorer(default_config)(tree) == 0
assert PriceSumScorer(default_config)(tree) == 10
assert RouteCostScorer(default_config)(tree) == 10
def test_scoring_branched_route(load_reaction_tree, default_config):
tree = ReactionTree.from_dict(load_reaction_tree("branched_route.json"))
assert pytest.approx(StateScorer(default_config)(tree), abs=1e-6) == 0.00012363
assert NumberOfReactionsScorer(default_config)(tree) == 14
assert NumberOfPrecursorsScorer(default_config)(tree) == 8
assert NumberOfPrecursorsInStockScorer(default_config)(tree) == 0
assert PriceSumScorer(default_config)(tree) == 80
cost_score = RouteCostScorer(default_config)(tree)
assert pytest.approx(cost_score, abs=1e-3) == 410.6577
def test_scoring_branched_route_in_stock(
load_reaction_tree, default_config, mock_stock
):
mock_stock(
default_config,
"CC(C)(C)CO",
"CC(C)(C)OC(=O)N(CCCl)CCCl",
"N#CCc1cccc(O)c1F",
"O=[N+]([O-])c1ccccc1F",
"O=C1CCC(=O)N1Br",
"O=C=Nc1csc(C(F)(F)F)n1",
"CCC[Sn](Cl)(CCC)CCC",
"COc1ccc2ncsc2c1",
)
tree = ReactionTree.from_dict(load_reaction_tree("branched_route.json"))
assert pytest.approx(StateScorer(default_config)(tree), abs=1e-3) == 0.950
assert NumberOfReactionsScorer(default_config)(tree) == 14
assert NumberOfPrecursorsScorer(default_config)(tree) == 8
assert NumberOfPrecursorsInStockScorer(default_config)(tree) == 8
assert PriceSumScorer(default_config)(tree) == 8
cost_score = RouteCostScorer(default_config)(tree)
assert pytest.approx(cost_score, abs=1e-3) == 77.4797
def test_create_scorer_collection(default_config):
collection = ScorerCollection(default_config)
assert len(collection) == 5
assert "state score" in collection.names()
assert "number of reactions" in collection.names()
assert isinstance(collection["state score"], StateScorer)
with pytest.raises(KeyError):
collection["dummy"]
def test_delete_scorer_to_collection(default_config):
collection = ScorerCollection(default_config)
del collection["state score"]
assert "state score" not in collection.names()
def test_add_scorer_to_collection(default_config):
collection = ScorerCollection(default_config)
del collection["state score"]
collection.load(StateScorer(default_config))
assert "state score" in collection.names()
def test_add_scorer_to_collection_no_scorer(default_config):
collection = ScorerCollection(default_config)
with pytest.raises(ScorerException):
collection.load(Molecule(smiles="CCC"))
def test_load_scorer_to_collection_only_class(default_config):
collection = ScorerCollection(default_config)
del collection["state score"]
collection.load_from_config(**{"StateScorer": {}})
assert "state score" in collection.names()
def test_load_scorer_to_collection_full_package(default_config):
collection = ScorerCollection(default_config)
del collection["state score"]
collection.load_from_config(**{"aizynthfinder.context.scoring.StateScorer": {}})
assert "state score" in collection.names()
def test_load_scorer_to_collection_failures(default_config):
collection = ScorerCollection(default_config)
with pytest.raises(ScorerException, match=".*load module.*"):
collection.load_from_config(**{"mypackage.scoring.StateScorer": {}})
with pytest.raises(ScorerException, match=".*class.*"):
collection.load_from_config(**{"aizynthfinder.context.scoring.NoScorer": {}})
| [
"samuel.genheden@gmail.com"
] | samuel.genheden@gmail.com |
c80e313bd12a3aacf520301551236756fa08a96a | 0f2e7d7c4323fb8607d5a1d709cb36f8de13078c | /0x02-python-import_modules/2-args.py | 60fb770e2d178d9b997f88c47f765de76722a66d | [] | no_license | nelsfichera/holbertonschool-higher_level_programming | ab031450f5ebc4401a255187fad82ad8b8fd9c8b | 4f3e2155746ad8e8c4cb56443c6750466d66d346 | refs/heads/main | 2023-08-18T21:31:59.425903 | 2021-09-22T16:52:32 | 2021-09-22T16:52:32 | 361,780,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 448 | py | #!/usr/bin/python3
if __name__ == "__main__":
import sys
n_args = len(sys.argv)
if n_args == 2:
print("1 argument:".format(n_args - 1))
print("1: {:s}".format(sys.argv[1]))
elif n_args > 2:
print("{:d} arguments".format(n_args - 1))
count = 1
while count < n_args:
print("{:d}: {:s}".format(count, sys.argv[count]))
count += 1
else:
print("0 arguments.")
| [
"nelsfichera@gmail.com"
] | nelsfichera@gmail.com |
9ee29ecd7bf3848994ee9db3b3493e2ba6053189 | 8b2435044491c4f1887bcce6fdd3989b2f55be88 | /meddet/data/datasets/base.py | 6fdee7f869ce73b45b3d54a5ab17ceb0fc970800 | [] | no_license | JoeeYF/MedDetection | ed169c481ff628a771966ba5e5290f799ac2323b | 8c183d8bf632fe6bf54841ac20db19955331f336 | refs/heads/main | 2023-06-12T00:26:13.537943 | 2021-07-06T02:32:42 | 2021-07-06T02:32:42 | 382,782,483 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,236 | py |
from abc import abstractmethod
from torch.utils.data import Dataset
import json
from ..pipelines import ForwardCompose, LoadPrepare
class BasicDataset(Dataset):
N_FOLD = 5
dataset_format = ""
def __init__(self, dataset_file, image_prefix, pipeline, infer_mode=False, task=None, fold=None):
self._TASK = task
self._FOLD = fold
self.dataset = json.load(open(dataset_file))
self.check_dataset_format(self.dataset)
self.dataset_file = dataset_file
self.image_prefix = image_prefix
self.infer_mode = infer_mode
self.pairs = self.dataset['pairs']
self.pre_pipeline = LoadPrepare()
self.pipeline = ForwardCompose(pipeline)
def __len__(self):
return len(self.pairs)
@abstractmethod
def check_dataset_format(self, dataset) -> bool:
return False
@abstractmethod
def run_training_strategy(self):
pass
def __repr__(self):
repr_str = self.__class__.__name__
repr_str += '(dataset_file={}, image_prefix={},infer_mode={})'.format(
self.dataset_file, self.image_prefix, self.infer_mode)
repr_str += '\nPipeline: \n{}'.format(self.pipeline)
return repr_str | [
"qiao_yuanfang@163.com"
] | qiao_yuanfang@163.com |
12150465282ffc30111b9f36e79907253eb9a7f4 | 11fa6e6506076faea2c2411143cc53ee3852a676 | /dl/pytorch/tutorial/1.2_data_loading/data_load.py | bf6a515b77d3c1c1c5eb176bf8d95ed5f51232a2 | [] | no_license | ZhangXinNan/LearnPractice | 5f0403ebe589018b7c2dd4f349228dd83ab5c60f | 992679f8697923712e42f8a5e68fbfedbeeda82d | refs/heads/master | 2023-08-04T11:46:51.673750 | 2023-07-22T06:37:50 | 2023-07-22T06:37:50 | 60,957,100 | 18 | 7 | null | 2022-11-02T08:11:56 | 2016-06-12T08:51:41 | Shell | UTF-8 | Python | false | false | 2,795 | py | from __future__ import print_function, division
import os
import torch
import pandas as pd
from skimage import io, transform
import numpy as np
import matplotlib.pyplot as plt
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms, utils
# Ignore warnings
import warnings
warnings.filterwarnings("ignore")
plt.ion() # interactive mode
root_dir = 'd:/data_public/face/faces/'
csv_file = os.path.join(root_dir, 'face_landmarks.csv')
landmarks_frame = pd.read_csv(csv_file)
n = 65
img_name = landmarks_frame.iloc[n, 0]
landmarks = landmarks_frame.iloc[n, 1:].as_matrix()
landmarks = landmarks.astype('float').reshape(-1, 2)
print('Image name: {}'.format(img_name))
print('Landmarks shape: {}'.format(landmarks.shape))
print('First 4 Landmarks: {}'.format(landmarks[:4]))
def show_landmarks(image, landmarks):
"""Show image with landmarks"""
plt.imshow(image)
plt.scatter(landmarks[:, 0], landmarks[:, 1], s=10, marker='.', c='r')
plt.pause(0.001) # pause a bit so that plots are updated
plt.figure()
show_landmarks(io.imread(os.path.join(root_dir, img_name)),
landmarks)
plt.show()
class FaceLandmarksDataset(Dataset):
"""Face Landmarks dataset."""
def __init__(self, csv_file, root_dir, transform=None):
"""
Args:
csv_file (string): Path to the csv file with annotations.
root_dir (string): Directory with all the images.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.landmarks_frame = pd.read_csv(csv_file)
self.root_dir = root_dir
self.transform = transform
def __len__(self):
return len(self.landmarks_frame)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img_name = os.path.join(self.root_dir,
self.landmarks_frame.iloc[idx, 0])
image = io.imread(img_name)
landmarks = self.landmarks_frame.iloc[idx, 1:]
landmarks = np.array([landmarks])
landmarks = landmarks.astype('float').reshape(-1, 2)
sample = {'image': image, 'landmarks': landmarks}
if self.transform:
sample = self.transform(sample)
return sample
face_dataset = FaceLandmarksDataset(csv_file=csv_file,
root_dir=root_dir)
fig = plt.figure()
for i in range(len(face_dataset)):
sample = face_dataset[i]
print(i, sample['image'].shape, sample['landmarks'].shape)
ax = plt.subplot(1, 4, i + 1)
plt.tight_layout()
ax.set_title('Sample #{}'.format(i))
ax.axis('off')
show_landmarks(**sample)
if i == 3:
plt.show()
plt.savefig('faces.png')
break
| [
"zhangxin19870504@163.com"
] | zhangxin19870504@163.com |
854658cf13f4ecfb539041685c704bf876be67dd | d60c06ce1cf676752c4d2331315a5fa4a18389b0 | /package/3xpath/05xpath解析基础.py | 088ae63f47c39b3bce44a03337c76af68d51a4b9 | [
"MIT"
] | permissive | HuangCongQing/Spider | f9bab6462ba7e2525a3297fceb0a0bc6f2b5e61a | f0204b8bc60ad4f78f1606a9e5c4c157094fea9b | refs/heads/master | 2023-07-05T23:19:00.272380 | 2023-07-04T12:29:27 | 2023-07-04T12:29:27 | 134,965,705 | 14 | 13 | null | null | null | null | UTF-8 | Python | false | false | 1,028 | py | '''
Description:
Author: HCQ
Company(School): UCAS
Email: 1756260160@qq.com
Date: 2021-01-01 13:56:16
LastEditTime: 2021-01-04 10:34:44
FilePath: /Spider/package/3xpath/05xpath解析基础.py
'''
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from lxml import etree
if __name__ == "__main__":
#实例化好了一个etree对象,且将被解析的源码加载到了该对象中
tree = etree.parse('test.html') # 在线网页 用 : etree.HTML(page_text)
r = tree.xpath('/html/body/div') # 3个Element对象 [<Element div at 0x7fcb3819b4c8>, <Element div at 0x7fcb3819b5c8>, <Element div at 0x7fcb3819b608>]
# r = tree.xpath('/html//div') # 等价于上面/html/body/div
# r = tree.xpath('//div') # # 等价于上面
# r = tree.xpath('//div[@class="song"]')
# r = tree.xpath('//div[@class="tang"]//li[5]/a/text()')[0]
r = tree.xpath('//li[7]//text()') # ['度蜜月']
# r = tree.xpath('//div[@class="tang"]//text()')
# r = tree.xpath('//div[@class="song"]/img/@src')
print(r)
| [
"1756260160@qq.com"
] | 1756260160@qq.com |
136345666dd80f0d00f9787130e307f038c2ef90 | eb61d62ca1f6f0123e3771105f5dfbbd6115138d | /.history/leccion_20210910222316.py | dc35f2abc2ad2289ec975569ef538dca51980b08 | [] | no_license | Alopezm5/CORRECTO-2 | e0f14bcc3a88c0e222d10e3261e68532008bc42e | 223613f1fb04dce3fac9f82f243cb2f22fe100f3 | refs/heads/main | 2023-07-29T06:52:48.147424 | 2021-09-12T20:33:27 | 2021-09-12T20:33:27 | 388,995,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 265 | py | # frase=input("ingresas frase")
# c=frase.read()
# if c>1 and c<=5:
# print(frase)
# else:
# print("ingrese bien la frase")
from datetime import date
class Calculos:
def ant(self,fecha):
hoy=date.today()
if hoy<fecha:
re | [
"85761855+Alopezm5@users.noreply.github.com"
] | 85761855+Alopezm5@users.noreply.github.com |
4011259abc726756075d513fe7b6d65e19716f62 | 1c27972511fcf83b8050f9412714e8c029296a38 | /timedisplay2/timedisplay2/wsgi.py | ee6e16e7a853335f135755348dd2effe40b2f57d | [] | no_license | enbaba/ninja_1 | 406ab7e8b115f08194b95b76be6aa80f5a49bb09 | 9c63c851da4847eb3e858983f53cbfd7121e81fa | refs/heads/master | 2022-12-05T14:33:38.336436 | 2020-08-29T00:48:09 | 2020-08-29T00:48:09 | 291,173,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
WSGI config for timedisplay2 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'timedisplay2.settings')
application = get_wsgi_application()
| [
"enbaba10@gmail.com"
] | enbaba10@gmail.com |
92a503fbafa9676d2472155a5bba906a947d0358 | a8b37bd399dd0bad27d3abd386ace85a6b70ef28 | /airbyte-ci/connectors/qa-engine/tests/test_inputs.py | 295dd8a484f90d3ff29746c323413a6e4167bcd2 | [
"LicenseRef-scancode-free-unknown",
"MIT",
"Elastic-2.0"
] | permissive | thomas-vl/airbyte | 5da2ba9d189ba0b202feb952cadfb550c5050871 | 258a8eb683634a9f9b7821c9a92d1b70c5389a10 | refs/heads/master | 2023-09-01T17:49:23.761569 | 2023-08-25T13:13:11 | 2023-08-25T13:13:11 | 327,604,451 | 1 | 0 | MIT | 2021-01-07T12:24:20 | 2021-01-07T12:24:19 | null | UTF-8 | Python | false | false | 6,629 | py | #
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from importlib.resources import files
from unittest.mock import MagicMock, call
import pandas as pd
import pytest
import requests
from qa_engine import constants, inputs
@pytest.mark.parametrize("catalog_url", [constants.OSS_CATALOG_URL, constants.CLOUD_CATALOG_URL])
def test_fetch_remote_catalog(catalog_url):
catalog = inputs.fetch_remote_catalog(catalog_url)
assert isinstance(catalog, pd.DataFrame)
expected_columns = ["connector_type", "connector_definition_id"]
assert all(expected_column in catalog.columns for expected_column in expected_columns)
assert set(catalog.connector_type.unique()) == {"source", "destination"}
def test_fetch_adoption_metrics_per_connector_version(mocker):
fake_bigquery_results = pd.DataFrame(
[
{
"connector_definition_id": "abcdefgh",
"connector_version": "0.0.0",
"number_of_connections": 10,
"number_of_users": 2,
"succeeded_syncs_count": 12,
"failed_syncs_count": 1,
"total_syncs_count": 3,
"sync_success_rate": 0.99,
"unexpected_column": "foobar",
}
]
)
mocker.patch.object(inputs.pd, "read_gbq", mocker.Mock(return_value=fake_bigquery_results))
expected_columns = {
"connector_definition_id",
"connector_version",
"number_of_connections",
"number_of_users",
"succeeded_syncs_count",
"failed_syncs_count",
"total_syncs_count",
"sync_success_rate",
}
expected_sql_query = files("qa_engine").joinpath("connector_adoption.sql").read_text()
expected_project_id = "airbyte-data-prod"
adoption_metrics_per_connector_version = inputs.fetch_adoption_metrics_per_connector_version()
assert isinstance(adoption_metrics_per_connector_version, pd.DataFrame)
assert set(adoption_metrics_per_connector_version.columns) == expected_columns
inputs.pd.read_gbq.assert_called_with(expected_sql_query, project_id=expected_project_id)
@pytest.mark.parametrize(
"connector_name, mocked_json_payload, mocked_status_code, expected_status",
[
(
"connectors/source-pokeapi",
[
{
"connector_version": "0.3.0",
"success": True,
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5222619538",
"date": "2023-06-09T06:50:04",
},
{
"connector_version": "0.3.0",
"success": False,
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5220000547",
"date": "2023-06-09T01:42:46",
},
],
200,
inputs.BUILD_STATUSES.SUCCESS,
),
(
"connectors/source-pokeapi",
[
{
"connector_version": "0.3.0",
"success": False,
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5222619538",
"date": "2023-06-09T06:50:04",
},
{
"connector_version": "0.3.0",
"success": True,
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5220000547",
"date": "2023-06-09T01:42:46",
},
],
200,
inputs.BUILD_STATUSES.FAILURE,
),
("connectors/source-pokeapi", None, 404, inputs.BUILD_STATUSES.NOT_FOUND),
(
"connectors/source-pokeapi",
[
{
"connector_version": "0.3.0",
"success": None,
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5222619538",
"date": "2023-06-09T06:50:04",
}
],
200,
inputs.BUILD_STATUSES.NOT_FOUND,
),
("connectors/source-pokeapi", None, 404, inputs.BUILD_STATUSES.NOT_FOUND),
],
)
def test_fetch_latest_build_status_for_connector(mocker, connector_name, mocked_json_payload, mocked_status_code, expected_status):
# Mock the api call to get the latest build status for a connector version
mock_response = MagicMock()
mock_response.json.return_value = mocked_json_payload
mock_response.status_code = mocked_status_code
mock_get = mocker.patch.object(requests, "get", return_value=mock_response)
connector_name = connector_name.replace("connectors/", "")
assert inputs.fetch_latest_build_status_for_connector(connector_name) == expected_status
assert mock_get.call_args == call(f"{constants.CONNECTOR_TEST_SUMMARY_URL}/{connector_name}/index.json")
def test_fetch_latest_build_status_for_connector_invalid_status(mocker, caplog):
connector_name = "connectors/source-pokeapi"
mocked_json_payload = [
{
"connector_version": "0.3.0",
"success": "unknown_outcome_123",
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5222619538",
"date": "2023-06-09T06:50:04",
},
{
"connector_version": "0.3.0",
"success": False,
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5220000547",
"date": "2023-06-09T01:42:46",
},
{
"connector_version": "0.3.0",
"success": True,
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5212578854",
"date": "2023-06-08T07:46:37",
},
{
"connector_version": "0.3.0",
"success": True,
"gha_workflow_run_url": "https://github.com/airbytehq/airbyte/actions/runs/5198665885",
"date": "2023-06-07T03:05:40",
},
]
# Mock the api call to get the latest build status for a connector version
mock_response = MagicMock()
mock_response.json.return_value = mocked_json_payload
mock_response.status_code = 200
mocker.patch.object(requests, "get", return_value=mock_response)
assert inputs.fetch_latest_build_status_for_connector(connector_name) == inputs.BUILD_STATUSES.NOT_FOUND
assert "Error: Unexpected build status value: unknown_outcome_123 for connector connectors/source-pokeapi" in caplog.text
| [
"noreply@github.com"
] | thomas-vl.noreply@github.com |
9cf14aedda368785b0835c31f60b9e5131cf77a1 | 1936f515b46c93face431709dcf485c8b7987d08 | /python/venv/bin/easy_install-3.6 | b45fd6eb73baf83cee4a4b7aae7ffa11e1ca9d23 | [] | no_license | NicolasLagaillardie/TremplinConnecte | 216be2c7078ff3b0ba5ea081da4aeaada5ef780c | dbd46445a13d48a6bc806d8e5e685279d5551b17 | refs/heads/master | 2020-04-27T01:08:38.293591 | 2019-03-06T15:26:48 | 2019-03-06T15:26:48 | 173,956,849 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | 6 | #!/home/lag/.aws/python/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.6'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.6')()
)
| [
"lagaillardie.nicolas@live.fr"
] | lagaillardie.nicolas@live.fr |
c35f4a18d503e913aacbcd66ca915a96c3509d83 | 2a1b8a671aceda6bc446f8ce26400aa84fa444a6 | /Packs/RecordedFuture/Integrations/RecordedFuture/RecordedFuture_test.py | 8902dfb1bc7e36f52b8dfe68c7fbcf2e8e29073b | [
"MIT"
] | permissive | demisto/content | 6d4722d46f0ff0beea2748e9f7de585bf91a78b4 | 890def5a0e0ae8d6eaa538148249ddbc851dbb6b | refs/heads/master | 2023-09-04T00:02:25.618032 | 2023-09-03T21:56:22 | 2023-09-03T21:56:22 | 60,525,392 | 1,023 | 1,921 | MIT | 2023-09-14T20:55:24 | 2016-06-06T12:17:02 | Python | UTF-8 | Python | false | false | 53,631 | py | def create_client():
import os
from RecordedFuture import Client
base_url = 'https://api.recordedfuture.com/gw/xsoar/'
verify_ssl = True
token = os.environ.get('RF_TOKEN')
headers = {
'X-RFToken': token,
'X-RF-User-Agent': "RecordedFuture.py/2.4 (Linux-5.13.0-1031-aws-x86_64-with) "
"XSOAR/2.4 RFClient/2.4 (Cortex_XSOAR_6.5.0)",
}
return Client(base_url=base_url, verify=verify_ssl, headers=headers, proxy=False)
class TestHelpers:
def test_translate_score(self):
from RecordedFuture import translate_score
from CommonServerPython import Common
assert translate_score(score=10, threshold=0) == Common.DBotScore.BAD
assert translate_score(score=10, threshold=10) == Common.DBotScore.BAD
assert translate_score(score=10, threshold=11) == Common.DBotScore.NONE
assert translate_score(score=24, threshold=40) == Common.DBotScore.NONE
assert translate_score(score=25, threshold=40) == Common.DBotScore.SUSPICIOUS
assert translate_score(score=26, threshold=40) == Common.DBotScore.SUSPICIOUS
assert translate_score(score=40, threshold=40) == Common.DBotScore.BAD
assert translate_score(score=45, threshold=40) == Common.DBotScore.BAD
assert translate_score(score=10, threshold=-1) == Common.DBotScore.BAD
assert translate_score(score=10, threshold=0) == Common.DBotScore.BAD
assert translate_score(score=25, threshold=-1) == Common.DBotScore.BAD
assert translate_score(score=25, threshold=0) == Common.DBotScore.BAD
assert translate_score(score=26, threshold=-1) == Common.DBotScore.BAD
assert translate_score(score=26, threshold=0) == Common.DBotScore.BAD
def test_determine_hash(self):
from RecordedFuture import determine_hash
assert determine_hash(hash_value='s' * 128) == 'SHA512'
assert determine_hash(hash_value='s' * 64) == 'SHA256'
assert determine_hash(hash_value='s' * 40) == 'SHA1'
assert determine_hash(hash_value='s' * 32) == 'MD5'
assert determine_hash(hash_value='s' * 8) == 'CRC32'
assert determine_hash(hash_value='s' * 50) == 'CTPH'
assert determine_hash(hash_value='s' * 10) == 'CTPH'
assert determine_hash(hash_value='s') == 'CTPH'
def test_create_indicator_ip(self, mocker):
from RecordedFuture import create_indicator
from CommonServerPython import Common, DBotScoreType
mock_return_value = mocker.Mock()
mocker.patch('CommonServerPython.Common.IP', return_value=mock_return_value)
dbot_score_spy = mocker.spy(Common, 'DBotScore')
entity = '8.8.8.8'
entity_type = 'ip'
score = 45
description = 'test_description'
location = {'asn': 'test_asn', 'location': {'country': 'test_country'}}
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
location=location,
)
assert result == mock_return_value
dbot_score_spy.assert_called_once_with(
entity,
DBotScoreType.IP,
'Recorded Future v2',
Common.DBotScore.SUSPICIOUS,
'',
# reliability=DBotScoreReliability.B
reliability=None,
)
mock_call = Common.IP.mock_calls[0]
assert mock_call.args[0] == entity
assert mock_call.args[1].indicator == entity
# mock_call.args[1] - is Common.IP, and we verify it by dbot_score_spy.
# We can't assert it with `==` as the Common.IP does not implement `__eq__` method.
assert mock_call.kwargs == {
'asn': 'test_asn',
'geo_country': 'test_country',
}
def test_create_indicator_domain(self, mocker):
from RecordedFuture import create_indicator
from CommonServerPython import Common, DBotScoreType
mock_return_value = mocker.Mock()
mocker.patch('CommonServerPython.Common.Domain', return_value=mock_return_value)
dbot_score_spy = mocker.spy(Common, 'DBotScore')
entity = 'google.com'
entity_type = 'domain'
score = 45
description = 'test_description'
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
)
assert result == mock_return_value
dbot_score_spy.assert_called_once_with(
entity,
DBotScoreType.DOMAIN,
'Recorded Future v2',
Common.DBotScore.SUSPICIOUS,
'',
# reliability=DBotScoreReliability.B
reliability=None,
)
mock_call = Common.Domain.mock_calls[0]
assert mock_call.args[0] == entity
assert mock_call.args[1].indicator == entity
def test_create_indicator_url(self, mocker):
from RecordedFuture import create_indicator
from CommonServerPython import Common, DBotScoreType
mock_return_value = mocker.Mock()
mocker.patch('CommonServerPython.Common.URL', return_value=mock_return_value)
dbot_score_spy = mocker.spy(Common, 'DBotScore')
entity = 'https://google.com'
entity_type = 'url'
score = 45
description = 'test_description'
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
)
assert result == mock_return_value
dbot_score_spy.assert_called_once_with(
entity,
DBotScoreType.URL,
'Recorded Future v2',
Common.DBotScore.SUSPICIOUS,
'',
# reliability=DBotScoreReliability.B
reliability=None,
)
mock_call = Common.URL.mock_calls[0]
assert mock_call.args[0] == entity
assert mock_call.args[1].indicator == entity
def test_create_indicator_cve(self, mocker):
from RecordedFuture import create_indicator
from CommonServerPython import Common
mock_return_value = mocker.Mock()
mocker.patch('CommonServerPython.Common.CVE', return_value=mock_return_value)
entity = 'CVE-123'
entity_type = 'cve'
score = 45
description = 'test_description'
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
)
assert result == mock_return_value
mock_call = Common.CVE.mock_calls[0]
assert mock_call.args[0] == entity
assert mock_call.args[1] == ''
assert mock_call.args[2] == ''
assert mock_call.args[3] == ''
assert mock_call.args[4] == description
def test_create_indicator_file(self, mocker):
from RecordedFuture import create_indicator
from CommonServerPython import Common, DBotScoreType
mock_return_value = mocker.Mock()
mocker.patch('CommonServerPython.Common.File', return_value=mock_return_value)
dbot_score_spy = mocker.spy(Common, 'DBotScore')
entity_type = 'file'
score = 45
description = 'test_description'
# MD5.
entity = 's' * 32
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
)
assert result == mock_return_value
dbot_score_spy.assert_called_once_with(
entity,
DBotScoreType.FILE,
'Recorded Future v2',
Common.DBotScore.SUSPICIOUS,
'',
# reliability=DBotScoreReliability.B
reliability=None,
)
mock_call = Common.File.mock_calls[0]
assert mock_call.args[0].indicator == entity
assert mock_call.kwargs.get('md5') == entity
# SHA1.
entity = 's' * 40
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
)
assert result == mock_return_value
dbot_score_spy.assert_called_with(
entity,
DBotScoreType.FILE,
'Recorded Future v2',
Common.DBotScore.SUSPICIOUS,
'',
# reliability=DBotScoreReliability.B
reliability=None,
)
mock_call = Common.File.mock_calls[-1]
assert mock_call.args[0].indicator == entity
assert mock_call.kwargs.get('sha1') == entity
# SHA256.
entity = 's' * 64
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
)
assert result == mock_return_value
dbot_score_spy.assert_called_with(
entity,
DBotScoreType.FILE,
'Recorded Future v2',
Common.DBotScore.SUSPICIOUS,
'',
# reliability=DBotScoreReliability.B
reliability=None,
)
mock_call = Common.File.mock_calls[-1]
assert mock_call.args[0].indicator == entity
assert mock_call.kwargs.get('sha256') == entity
# SHA512.
entity = 's' * 128
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
)
assert result == mock_return_value
dbot_score_spy.assert_called_with(
entity,
DBotScoreType.FILE,
'Recorded Future v2',
Common.DBotScore.SUSPICIOUS,
'',
# reliability=DBotScoreReliability.B
reliability=None,
)
mock_call = Common.File.mock_calls[-1]
assert mock_call.args[0].indicator == entity
assert mock_call.kwargs.get('sha512') == entity
# CRC32.
entity = 's' * 20 # Length different from any previous hashes.
result = create_indicator(
entity=entity,
entity_type=entity_type,
score=score,
description=description,
)
assert result == mock_return_value
dbot_score_spy.assert_called_with(
entity,
DBotScoreType.FILE,
'Recorded Future v2',
Common.DBotScore.SUSPICIOUS,
'',
# reliability=DBotScoreReliability.B
reliability=None,
)
mock_call = Common.File.mock_calls[-1]
assert mock_call.args[0].indicator == entity
assert mock_call.kwargs == {}
class TestRFClient:
def test_whoami(self, mocker):
client = create_client()
mock_http_request = mocker.patch.object(client, '_http_request')
client.whoami()
mock_http_request.assert_called_once_with(
method='get',
url_suffix='info/whoami',
timeout=60,
)
def test_get_writeback_data_writeback_off(self, mocker):
"""
Test _get_writeback_data with writeback turned OFF.
"""
import demistomock as demisto
client = create_client()
mocker.patch.object(demisto, 'params', return_value={'writeback': False})
assert client._get_writeback_data() is None
def test_get_writeback_data_writeback_on(self, mocker):
"""
Test _get_writeback_data with writeback turned ON.
"""
import demistomock as demisto
client = create_client()
mocker.patch.object(
demisto, 'params', return_value={'collective_insights': 'On'}
)
demisto.callingContext = {
'context': {'ExecutionContext': 'to be removed', 'Incidents': []}
}
assert client._get_writeback_data() == {
'context': {'Incidents': []}
}
#
def test_call_writeback_on(self, mocker):
"""
Test _call() with writeback turned ON.
"""
import os
import demistomock as demisto
STATUS_TO_RETRY = [500, 501, 502, 503, 504]
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
# Mock data for writeback.
mocker.patch.object(
demisto,
'params',
return_value={
'collective_insights': "On",
},
)
mock_calling_context = {
'context': {'ExecutionContext': 'to be removed', 'Incidents': []},
'other': 'data',
}
demisto.callingContext = mock_calling_context
client = create_client()
mock_http_request = mocker.patch.object(client, '_http_request')
mock_url_suffix = 'mock_url_suffix'
client._call(url_suffix=mock_url_suffix)
json_data = {
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
'callingContext': {
'context': {'Incidents': []},
},
}
mock_http_request.assert_called_once_with(
method='post',
url_suffix=mock_url_suffix,
json_data=json_data,
timeout=90,
retries=3,
status_list_to_retry=STATUS_TO_RETRY,
)
def test_call_writeback_off(self, mocker):
"""
Test _call() with writeback turned OFF.
"""
import os
import demistomock as demisto
STATUS_TO_RETRY = [500, 501, 502, 503, 504]
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
# Mock data for writeback.
mocker.patch.object(
demisto,
'params',
return_value={
'collective_insights': "Off",
},
)
mock_calling_context = {
'context': {'ExecutionContext': 'to be removed', 'other': 'data'},
'other': 'data',
}
demisto.callingContext = mock_calling_context
client = create_client()
mock_http_request = mocker.patch.object(client, '_http_request')
mock_url_suffix = 'mock_url_suffix'
client._call(url_suffix=mock_url_suffix)
json_data = {
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
}
mock_http_request.assert_called_once_with(
method='post',
url_suffix=mock_url_suffix,
json_data=json_data,
timeout=90,
retries=3,
status_list_to_retry=STATUS_TO_RETRY,
)
def test_call_with_kwargs(self, mocker):
"""
Test _call() with kwargs.
"""
import os
import demistomock as demisto
STATUS_TO_RETRY = [500, 501, 502, 503, 504]
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_http_request = mocker.patch.object(client, '_http_request')
mock_url_suffix = 'mock_url_suffix'
client._call(url_suffix=mock_url_suffix, timeout=120, any_other_kwarg=True)
json_data = {
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
}
mock_http_request.assert_called_once_with(
method='post',
url_suffix=mock_url_suffix,
json_data=json_data,
timeout=120,
retries=3,
status_list_to_retry=STATUS_TO_RETRY,
any_other_kwarg=True,
)
def test_call_returns_response(self, mocker):
"""
Test _call() returns response.
"""
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_response = {'response': {'data': 'mock data'}}
mocker.patch.object(client, '_http_request', return_value=mock_response)
mock_url_suffix = 'mock_url_suffix'
response = client._call(url_suffix=mock_url_suffix)
assert response == mock_response
def test_call_response_processing_return_error(self, mocker):
"""
Test _call() return_error response processing.
"""
import os
import demistomock as demisto
STATUS_TO_RETRY = [500, 501, 502, 503, 504]
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
mock_return_error = mocker.patch('RecordedFuture.return_error')
client = create_client()
mock_http_request = mocker.patch.object(
client,
'_http_request',
return_value={'return_error': {'message': 'mock error'}},
)
mock_url_suffix = 'mock_url_suffix'
client._call(url_suffix=mock_url_suffix)
json_data = {
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
}
mock_http_request.assert_called_once_with(
method='post',
url_suffix=mock_url_suffix,
json_data=json_data,
timeout=90,
retries=3,
status_list_to_retry=STATUS_TO_RETRY,
)
mock_return_error.assert_called_once_with(message='mock error')
def test_call_response_processing_404(self, mocker):
"""
Test _call() response processing.
"""
import os
import demistomock as demisto
from CommonServerPython import DemistoException, CommandResults
STATUS_TO_RETRY = [500, 501, 502, 503, 504]
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
mocker.patch('RecordedFuture.return_error')
client = create_client()
def mock_http_request_method(*args, **kwargs):
# Imitate how CommonServerPython handles bad responses (when status code not in ok_codes,
# or if ok_codes=None - it uses requests.Response.ok to check whether response is good).
raise DemistoException('404')
mocker.patch.object(client, '_http_request', mock_http_request_method)
spy_http_request = mocker.spy(client, '_http_request')
mock_url_suffix = 'mock_url_suffix'
result = client._call(url_suffix=mock_url_suffix)
json_data = {
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
}
spy_http_request.assert_called_once_with(
method='post',
url_suffix=mock_url_suffix,
json_data=json_data,
timeout=90,
retries=3,
status_list_to_retry=STATUS_TO_RETRY,
)
assert isinstance(result, CommandResults)
assert result.outputs_prefix == ''
assert result.outputs_key_field == ''
assert result.outputs == {}
assert result.raw_response == {}
assert result.readable_output == 'No results found.'
def test_fetch_incidents(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mock_params = {'param1': 'param1 value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
mocker.patch.object(demisto, 'params', return_value=mock_params)
mock_last_run_dict = {"lastRun": "2022-08-31T12:12:20+00:00"}
mocker.patch.object(demisto, 'getLastRun', return_value=mock_last_run_dict)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
response = client.fetch_incidents()
mock_call.assert_called_once_with(
json_data={
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
'demisto_last_run': mock_last_run_dict,
'demisto_params': mock_params,
},
timeout=120,
url_suffix='/v2/alert/fetch_incidents',
)
assert response == mock_call_response
def test_entity_search(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
response = client.entity_search()
mock_call.assert_called_once_with(url_suffix='/v2/search')
assert response == mock_call_response
def test_get_intelligence(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
response = client.get_intelligence()
mock_call.assert_called_once_with(url_suffix='/v2/lookup/intelligence')
assert response == mock_call_response
def test_get_links(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
response = client.get_links()
mock_call.assert_called_once_with(url_suffix='/v2/lookup/links')
assert response == mock_call_response
def test_get_single_alert(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
response = client.get_single_alert()
mock_call.assert_called_once_with(url_suffix='/v2/alert/lookup')
assert response == mock_call_response
def test_get_alerts(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
response = client.get_alerts()
mock_call.assert_called_once_with(url_suffix='/v2/alert/search')
assert response == mock_call_response
def test_get_alert_rules(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
response = client.get_alert_rules()
mock_call.assert_called_once_with(url_suffix='/v2/alert/rule')
assert response == mock_call_response
def test_alert_set_status(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
alert_data = {'mock': 'data'}
response = client.alert_set_status(alert_data)
mock_call.assert_called_once_with(
url_suffix='/v2/alert/set_status',
json_data={
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
'alerts_update_data': alert_data,
},
)
assert response == mock_call_response
response = client.alert_set_status()
mock_call.assert_called_with(
url_suffix='/v2/alert/set_status',
json_data={
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
'alerts_update_data': None,
},
)
assert response == mock_call_response
def test_alert_set_note(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
alert_data = {'mock': 'data'}
response = client.alert_set_note(alert_data)
mock_call.assert_called_once_with(
url_suffix='/v2/alert/set_note',
json_data={
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
'alerts_update_data': alert_data,
},
)
assert response == mock_call_response
response = client.alert_set_note()
mock_call.assert_called_with(
url_suffix='/v2/alert/set_note',
json_data={
'demisto_command': mock_command_name,
'demisto_args': mock_command_args,
'alerts_update_data': None,
},
)
assert response == mock_call_response
def test_get_triage(self, mocker):
import os
import demistomock as demisto
# This is needed for CommonServerPython module to not add demisto.params() into callingContext.
os.environ['COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS'] = 'True'
# Mock demisto command and args.
mock_command_name = 'command_name'
mock_command_args = {'arg1': 'arg1_value', 'arg2': 'arg2_value'}
mocker.patch.object(demisto, 'command', return_value=mock_command_name)
mocker.patch.object(demisto, 'args', return_value=mock_command_args)
client = create_client()
mock_call_response = {'response': {'data': 'mock response'}}
mock_call = mocker.patch.object(
client, '_call', return_value=mock_call_response
)
response = client.get_triage()
mock_call.assert_called_once_with(url_suffix='/v2/lookup/triage')
assert response == mock_call_response
class TestActions:
def test_init(self, mocker):
from RecordedFuture import Actions
mock_client = mocker.Mock()
actions = Actions(mock_client)
assert actions.client == mock_client
def test_process_result_actions_404(self, mocker):
from RecordedFuture import Actions
from CommonServerPython import CommandResults
mock_client = mocker.Mock()
actions = Actions(mock_client)
# Test if response is CommandResults
# (case when we got 404 on response, and it was processed in self.client._call() method).
response = CommandResults(readable_output='Mock')
result_actions = actions._process_result_actions(response=response)
assert result_actions == [response]
def test_process_result_actions_response_is_not_dict(self, mocker):
from RecordedFuture import Actions
mock_client = mocker.Mock()
actions = Actions(mock_client)
# Test if response is not CommandResults and not Dict.
response = 'Mock string - not CommandResults and not dict'
result_actions = actions._process_result_actions(response=response) # type: ignore
assert result_actions is None
def test_process_result_actions_no_or_empty_result_actions_in_response(
self, mocker
):
from RecordedFuture import Actions
mock_client = mocker.Mock()
actions = Actions(mock_client)
# Test no results_actions in response.
response = {'data': 'mock'}
result_actions = actions._process_result_actions(response=response)
assert result_actions is None
# Test case when bool(results_actions) in response is False.
response = {'data': 'mock', 'result_actions': None}
result_actions = actions._process_result_actions(response=response)
assert result_actions is None
response = {'data': 'mock', 'result_actions': []}
result_actions = actions._process_result_actions(response=response)
assert result_actions is None
response = {'data': 'mock', 'result_actions': {}}
result_actions = actions._process_result_actions(response=response)
assert result_actions is None
def test_process_result_actions_command_results_only(self, mocker):
from RecordedFuture import Actions, CommandResults
mock_client = mocker.Mock()
actions = Actions(mock_client)
response = {
'data': 'mock',
'result_actions': [
{
'CommandResults': {
'outputs_prefix': 'mock_outputs_prefix',
'outputs': 'mock_outputs',
'raw_response': 'mock_raw_response',
'readable_output': 'mock_readable_output',
'outputs_key_field': 'mock_outputs_key_field',
},
}
],
}
result_actions = actions._process_result_actions(response=response)
assert len(result_actions) == 1
r_a = result_actions[0]
assert isinstance(r_a, CommandResults)
assert r_a.outputs_prefix == 'mock_outputs_prefix'
assert r_a.outputs == 'mock_outputs'
assert r_a.raw_response == 'mock_raw_response'
assert r_a.readable_output == 'mock_readable_output'
assert r_a.outputs_key_field == 'mock_outputs_key_field'
def test_process_result_actions_create_indicator_and_default_command_results(
self, mocker
):
import RecordedFuture
spy_create_indicator = mocker.spy(
RecordedFuture,
'create_indicator',
)
mock_client = mocker.Mock()
actions = RecordedFuture.Actions(mock_client)
response = {
'data': 'mock',
'result_actions': [
{
'create_indicator': {
'entity': 'mock_entity',
'entity_type': 'ip',
'score': 15,
'description': 'mock_description',
'location': {'country': 'mock_country', 'ans': 'mock_asn'},
},
}
],
}
result_actions = actions._process_result_actions(response=response)
spy_create_indicator.assert_called_once_with(
entity='mock_entity',
entity_type='ip',
score=15,
description='mock_description',
location={'country': 'mock_country', 'ans': 'mock_asn'},
)
assert len(result_actions) == 1
r_a = result_actions[0]
assert isinstance(r_a, RecordedFuture.CommandResults)
assert r_a.readable_output == (
'### New indicator was created.\n'
'|DBotScore(val.Indicator && val.Indicator == obj.Indicator && val.Vendor == '
'obj.Vendor && val.Type == obj.Type)|IP(val.Address && val.Address == '
'obj.Address)|\n'
'|---|---|\n'
'| Indicator: mock_entity<br>Type: ip<br>Vendor: Recorded Future v2<br>Score: '
'0 | Address: mock_entity |\n'
)
def test_process_result_actions_create_indicator_and_command_results(self, mocker):
import RecordedFuture
spy_create_indicator = mocker.spy(
RecordedFuture,
'create_indicator',
)
mock_client = mocker.Mock()
actions = RecordedFuture.Actions(mock_client)
response = {
'data': 'mock',
'result_actions': [
{
'create_indicator': {
'entity': 'mock_entity',
'entity_type': 'ip',
'score': 15,
'description': 'mock_indicator_description',
},
'CommandResults': {
'outputs_prefix': 'mock_outputs_prefix',
'outputs': 'mock_outputs',
'raw_response': 'mock_raw_response',
'readable_output': 'mock_readable_output',
'outputs_key_field': 'mock_outputs_key_field',
'indicator': 'indicator',
},
}
],
}
result_actions = actions._process_result_actions(response=response)
spy_create_indicator.assert_called_once_with(
entity='mock_entity',
entity_type='ip',
score=15,
description='mock_indicator_description',
)
assert len(result_actions) == 1
r_a = result_actions[0]
assert isinstance(r_a, RecordedFuture.CommandResults)
assert r_a.outputs_prefix == 'mock_outputs_prefix'
assert r_a.outputs == 'mock_outputs'
assert r_a.raw_response == 'mock_raw_response'
assert r_a.readable_output == 'mock_readable_output'
assert r_a.outputs_key_field == 'mock_outputs_key_field'
assert r_a.indicator.to_context() == {
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && val.Vendor == obj.Vendor && val.Type == obj.Type)': {
'Indicator': 'mock_entity',
'Score': 0,
'Type': 'ip',
'Vendor': 'Recorded Future v2',
},
'IP(val.Address && val.Address == obj.Address)': {'Address': 'mock_entity'},
}
def test_fetch_incidents_with_incidents_present(self, mocker):
from RecordedFuture import Actions
import demistomock as demisto
client = create_client()
mock_incidents_value = [
{'mock_incident_key1': 'mock_incident_value1'},
{'mock_incident_key2': 'mock_incident_value2'},
]
mock_demisto_last_run_value = 'mock_demisto_last_run'
mock_alerts_update_data_value = 'mock_alerts_update_data_value'
mock_client_fetch_incidents_response = {
'incidents': mock_incidents_value,
'demisto_last_run': mock_demisto_last_run_value,
'data': 'mock',
'alerts_update_data': mock_alerts_update_data_value,
}
mock_client_fetch_incidents = mocker.patch.object(
client, 'fetch_incidents', return_value=mock_client_fetch_incidents_response
)
mock_client_alert_set_status = mocker.patch.object(
client,
'alert_set_status',
)
mock_demisto_incidents = mocker.patch.object(demisto, 'incidents')
mock_demisto_set_last_run = mocker.patch.object(demisto, 'setLastRun')
actions = Actions(client)
actions.fetch_incidents()
mock_client_fetch_incidents.assert_called_once_with()
mock_demisto_incidents.assert_called_once_with(mock_incidents_value)
mock_demisto_set_last_run.assert_called_once_with(mock_demisto_last_run_value)
# Verify that we update alert status.
mock_client_alert_set_status.assert_called_once_with(
mock_alerts_update_data_value
)
def test_malware_search_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_entity_search = mocker.patch.object(
client, 'entity_search', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = (
'mock_process_result_actions_return_value'
)
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.malware_search_command()
mock_client_entity_search.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
assert result == mock_process_result_actions_return_value
def test_lookup_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_entity_lookup = mocker.patch.object(
client, 'entity_lookup', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = (
'mock_process_result_actions_return_value'
)
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.lookup_command()
mock_client_entity_lookup.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
assert result == mock_process_result_actions_return_value
def test_intelligence_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_get_intelligence = mocker.patch.object(
client, 'get_intelligence', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = (
'mock_process_result_actions_return_value'
)
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.intelligence_command()
mock_client_get_intelligence.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
assert result == mock_process_result_actions_return_value
def test_get_links_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_get_links = mocker.patch.object(
client, 'get_links', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = (
'mock_process_result_actions_return_value'
)
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.get_links_command()
mock_client_get_links.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
assert result == mock_process_result_actions_return_value
def test_get_single_alert_command_with_result_actions(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_get_single_alert = mocker.patch.object(
client, 'get_single_alert', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = (
'mock_process_result_actions_return_value'
)
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.get_single_alert_command()
mock_client_get_single_alert.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
# As there are some result actions - return those result actions.
assert result == mock_process_result_actions_return_value
def test_get_single_alert_command_without_result_actions(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_get_single_alert = mocker.patch.object(
client, 'get_single_alert', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = None
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.get_single_alert_command()
mock_client_get_single_alert.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
# As there is no result actions - just return response.
assert result == mock_response
def test_get_alerts_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_get_alerts = mocker.patch.object(
client, 'get_alerts', return_value=mock_response
)
actions = Actions(client)
result = actions.get_alerts_command()
mock_client_get_alerts.assert_called_once_with()
assert result == mock_response
def test_get_alert_rules_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_get_alert_rules = mocker.patch.object(
client, 'get_alert_rules', return_value=mock_response
)
actions = Actions(client)
result = actions.get_alert_rules_command()
mock_client_get_alert_rules.assert_called_once_with()
assert result == mock_response
def test_alert_set_status_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_alert_set_status = mocker.patch.object(
client, 'alert_set_status', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = (
'mock_process_result_actions_return_value'
)
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.alert_set_status_command()
mock_client_alert_set_status.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
assert result == mock_process_result_actions_return_value
def test_alert_set_note_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_alert_set_note = mocker.patch.object(
client, 'alert_set_note', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = (
'mock_process_result_actions_return_value'
)
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.alert_set_note_command()
mock_client_alert_set_note.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
assert result == mock_process_result_actions_return_value
def test_triage_command(self, mocker):
from RecordedFuture import Actions
client = create_client()
mock_response = 'mock_response'
mock_client_get_triage = mocker.patch.object(
client, 'get_triage', return_value=mock_response
)
actions = Actions(client)
mock_process_result_actions_return_value = (
'mock_process_result_actions_return_value'
)
mock_process_result_actions = mocker.patch.object(
actions,
'_process_result_actions',
return_value=mock_process_result_actions_return_value,
)
result = actions.triage_command()
mock_client_get_triage.assert_called_once_with()
mock_process_result_actions.assert_called_once_with(response=mock_response)
assert result == mock_process_result_actions_return_value
def test_test_module(self, mocker):
import RecordedFuture
import demistomock as demisto
import platform
mocker.patch.object(demisto, "command", return_value="test-module")
mocker.patch.object(
demisto, "demistoVersion", return_value={"version": "mock_version"}
)
mocker.patch.object(
demisto, "params", return_value={"token": {"password": "mocktoken"}}
)
mocker.patch.object(platform, "platform", return_value="mock_platform")
mocker.patch.object(RecordedFuture.Client, "whoami")
mocked_return_res = mocker.patch.object(RecordedFuture, "return_results")
RecordedFuture.main()
mocked_return_res.assert_called_with('ok')
def test_test_module_with_boom(self, mocker):
import RecordedFuture
import demistomock as demisto
import platform
mocker.patch.object(demisto, "command", return_value="test-module")
mocker.patch.object(
demisto, "demistoVersion", return_value={"version": "mock_version"}
)
mocker.patch.object(
demisto, "params", return_value={"token": {"password": "mocktoken"}}
)
mocker.patch.object(platform, "platform", return_value="mock_platform")
mock_whoami = mocker.patch.object(RecordedFuture.Client, "whoami")
mock_whoami.side_effect = Exception("Side effect triggered")
mocked_return_err = mocker.patch.object(RecordedFuture, "return_error")
RecordedFuture.main()
mocked_return_err.assert_called_with(
message=(
f'Failed to execute {demisto.command()} command: Failed due to - '
'Unknown error. Please verify that the API URL and Token are correctly configured. '
'RAW Error: Side effect triggered'
)
)
| [
"noreply@github.com"
] | demisto.noreply@github.com |
55f96f25d8159f07b2d0ec344980cf26cb305796 | e9e3f7c7a8c5e3029232327fb129967226e70c7c | /configatron/nodes/comment.py | 6ea96bafdea326f6eca5ceedda529b51283128f1 | [] | no_license | vtemian/configatron | 6ae8804d485597c732f6e8dbbb0b49156352677d | 863e73e983157bcf54e7fc2331831496ce5ba8d3 | refs/heads/master | 2023-02-02T09:05:58.361463 | 2020-12-09T18:05:44 | 2020-12-09T18:05:44 | 319,440,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | import re
from .base import Node
class Comment(Node):
"""
starts with
new line
or
any number spaces
followed by `;` and any character
end
"""
REGEX = re.compile("^(\n)|(\s*(;.*)?)$")
| [
"vladtemian@gmail.com"
] | vladtemian@gmail.com |
09cfc90b2f7570592ff9034e0952e3030cd5ae53 | 9e66d474c0bf3be77fe5c327d5e501f632154fa4 | /src/marketplace/forms.py | 11b179b2bd0066faa0881b5eef3f50226ee9b2ef | [] | no_license | rcmiskin10/university-social-network | 9064c7e952c18e445f3e32592eeedf0a0215b963 | d73d61e46b96561521a35777be363d2276617fc0 | refs/heads/master | 2021-01-17T13:04:49.069656 | 2017-01-11T03:22:23 | 2017-01-11T03:22:23 | 60,044,659 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,100 | py | from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from .models import Product
class ProductForm(forms.ModelForm):
def __init__(self, data=None, files=None, **kwargs):
super(ProductForm, self).__init__(data, files, kwargs)
self.helper = FormHelper()
self.helper.form_show_labels = True
self.helper.add_input(Submit('submit', 'Add Product', css_class='btn btn-primary'))
title = forms.CharField(label='', required=False,
widget=forms.TextInput( attrs={'placeholder': 'Title: 50 character limit.'} ))
description = forms.CharField(label='', required=False,
widget=forms.TextInput( attrs={'placeholder': 'Description: 200 character limit.'} ))
price = forms.CharField(label='', required=True,
widget=forms.TextInput( attrs={'placeholder': 'Price'} ))
image = forms.ImageField(required=False)
class Meta:
model = Product
fields = ['image', 'category', 'title', 'description', 'price'] | [
"rcmiskin@gmail.com"
] | rcmiskin@gmail.com |
f99da47b22a56aad16ca6fff173571222c2fd023 | 81d2815060bdf51e59f40366df72954ad28b2398 | /3rd_hw/blog/views.py | 10d159a9fa4b366b317ec75d748ca1377c8d86d6 | [] | no_license | ningpop/LikeLion_7th_HW | 6016604427e335250f2e3daeec27f17731612b47 | b2c65a0b7a9a928a45cf07b67cd9ed18fb86d799 | refs/heads/master | 2020-06-30T18:08:54.024617 | 2019-12-30T16:17:03 | 2019-12-30T16:17:03 | 200,902,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 937 | py | from django.shortcuts import render, get_object_or_404, redirect
from .models import Blog
from django.utils import timezone
from django.core.paginator import Paginator
# Create your views here.
def home(request):
blogs = Blog.objects
blog_list=Blog.objects.all()
paginator = Paginator(blog_list,3)
page = request.GET.get('page')
posts = paginator.get_page(page)
return render(request,'home.html',{'blogs':blogs,'posts':posts})
def detail(request, blog_id):
blog_detail = get_object_or_404(Blog, pk=blog_id)
return render(request, 'detail.html', {'blog':blog_detail})
def new(request):
return render(request, 'new.html')
def create(request):
blog = Blog()
blog.title = request.GET['title']
blog.body = request.GET['body']
blog.pub_date = timezone.datetime.now()
blog.save()
return redirect('/' + str(blog.id))
def notice(request):
return render(request, 'notice.html') | [
"craft1933@naver.com"
] | craft1933@naver.com |
7afd0e247bdfbc9411cf0e82498b396382fe3f4c | 959b410bf72bef851f9367ae9be42f654d7b0c94 | /setup.py | 52331ce330c06e458f72c28b9dc736b15908e384 | [] | no_license | boberstarosta/Watermark | 742c9e1233e7e2cd33cd6f28adb8a750c5bf9d5c | 56849afe85da11fdd604bcc35392f7a0498f4d26 | refs/heads/master | 2021-07-01T04:30:37.387931 | 2017-09-17T21:23:47 | 2017-09-17T21:23:47 | 103,862,429 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | from distutils.core import setup
#This is a list of files to install, and where
#(relative to the 'root' dir, where setup.py is)
#You could be more specific.
files = ["data/*"]
setup(
name = "watermark",
version = "1.0",
description = "Adding watermarks to images",
author = "Bober",
#Name the folder where your packages live:
#(If you have other packages (dirs) or modules (py files) then
#put them into the package directory - they will be found
#recursively.)
packages = ['watermark'],
package_data = {"watermark" : files },
scripts = ["run.py", "run.pyw"],
long_description = """Really long text here."""
)
| [
"boberstarosta@gmail.com"
] | boberstarosta@gmail.com |
97ac98b56d768380cf9b9c1ede253568585d7513 | 67d8173a716da10a7350213d98938aae9f2115ce | /ProgrammingCourses/CS61A/project/scheme/tests/11.py | dbc9107332c77b54238276f874348b76991872ab | [] | no_license | jxie0755/Learning_Python | 94490d41bdf93acf8396f843328e38b6da310b0f | 143422321cbc3715ca08f6c3af8f960a55887ced | refs/heads/master | 2021-11-02T22:47:35.790239 | 2021-09-26T04:26:23 | 2021-09-26T04:26:23 | 101,445,132 | 0 | 2 | null | 2019-02-19T15:48:44 | 2017-08-25T22:00:16 | Python | UTF-8 | Python | false | false | 2,690 | py | test = {
"name": "Problem 11",
"points": 2,
"suites": [
{
"cases": [
{
"code": r"""
>>> frame = global_frame.make_child_frame(Pair("a", Pair("b", Pair("c", nil))), Pair(1, Pair(2, Pair(3, nil))))
>>> global_frame.lookup("a") # Type SchemeError if you think this errors
SchemeError
>>> frame.lookup("a") # Type SchemeError if you think this errors
1
>>> frame.lookup("b") # Type SchemeError if you think this errors
2
>>> frame.lookup("c") # Type SchemeError if you think this errors
3
""",
"hidden": False,
"locked": False
},
{
"code": r"""
>>> frame = global_frame.make_child_frame(nil, nil)
>>> frame.parent is global_frame
True
""",
"hidden": False,
"locked": False
},
{
"code": r"""
>>> first = Frame(global_frame)
>>> second = first.make_child_frame(nil, nil)
>>> second.parent is first
True
""",
"hidden": False,
"locked": False
}
],
"scored": True,
"setup": r"""
>>> from scheme import *
>>> global_frame = create_global_frame()
""",
"teardown": "",
"type": "doctest"
},
{
"cases": [
{
"code": r"""
>>> # More argument values than formal parameters
>>> global_frame.make_child_frame(Pair("a", nil), Pair(1, Pair(2, Pair(3, nil))))
SchemeError
""",
"hidden": False,
"locked": False
},
{
"code": r"""
>>> # More formal parameters than argument values
>>> global_frame.make_child_frame(Pair("a", Pair("b", Pair("c", nil))), Pair(1, nil))
SchemeError
""",
"hidden": False,
"locked": False
},
{
"code": r"""
>>> # Values can be pairs.
>>> frame = global_frame.make_child_frame(Pair("a", Pair("b", nil)), Pair(Pair(1, nil), Pair(Pair(2, nil), nil)))
>>> frame.lookup("a")
Pair(1, nil)
>>> frame.lookup("b")
Pair(2, nil)
>>> frame2 = frame.make_child_frame(nil, nil) # Bind parents correctly
>>> frame2.lookup("a")
Pair(1, nil)
""",
"hidden": False,
"locked": False
}
],
"scored": True,
"setup": r"""
>>> from scheme import *
>>> global_frame = create_global_frame()
""",
"teardown": "",
"type": "doctest"
}
]
}
| [
"30805062+jxie0755@users.noreply.github.com"
] | 30805062+jxie0755@users.noreply.github.com |
9f48c60e618932f8fb6ccf0625f40cbc02f27339 | 990f13d54c09ebc217c5fcc509dab2f01f020d16 | /setup.py | 8ea30fe5ee3a5490078354b68d0ee2059bf02f16 | [] | no_license | jamesblunt/python-tclip | 5ade53b7185907e72257f8e9f6bea9fb1e71b836 | 301ca16adeb56ad791cf3b6827e8f2d4f4eecf9b | refs/heads/master | 2021-01-15T08:35:40.462261 | 2013-12-07T11:06:35 | 2013-12-07T11:06:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 282 | py | from setuptools import setup
setup(
name="python-tclip",
author="Jiangge Zhang",
author_email="tonyseek@gmail.com",
version="0.1.0",
zip_safe=False,
url="https://github.com/tonyseek/python-tclip",
py_modules=["tclip"],
install_requires=["cffi"],
)
| [
"tonyseek@gmail.com"
] | tonyseek@gmail.com |
666d936289216c750dad4e20c1392402bebc9ad9 | 4553d9a87fa134976b1622179dd2077f0f8142a0 | /apps/photos/models.py | cb1f3508bc4041860102d1f87737ee4832bc9f9d | [] | no_license | wd5/mamochkam.com | 27f07cd692ad46049ae5b8c0cd7425448fc62b66 | d554f2890e5dbc42b7124aef108938bb73481898 | refs/heads/master | 2021-01-23T21:03:17.772465 | 2011-06-05T21:56:05 | 2011-06-05T21:56:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,413 | py | # -*- coding: utf-8 -*-
from datetime import datetime
from PIL import Image
from django.contrib.auth.models import User
from django.conf import settings
from django.db import models
from mamochkam.apps.common.models import Entity
from mamochkam.apps.search.models import Tag
from mamochkam.apps.utils import images
#GALLERY MODEL
class Gallery(models.Model):
title = models.CharField(max_length=50)
description = models.CharField(max_length=255)
slug = models.CharField(max_length=50)
#STRING REPRESENTATION
def __unicode__(self):
return self.title
'''
#ADMIN
class Admin:
prepopulated_fields = {'slug': ('title',)}
'''
#META
class Meta:
db_table = 'gallery'
verbose_name = u'Галлерея'
verbose_name_plural = u'Галлереи'
#COMMENTS MODEL
class PhotoComment(models.Model):
user = models.ForeignKey(User)
pub_date = models.DateTimeField(default=datetime.now)
text = models.CharField(max_length=255)
#STRING REPRESENTATION
def __unicode__(self):
return self.text
#META
class Meta:
ordering = ['pub_date',]
db_table = 'photo_comment'
verbose_name = u'Комментарий'
verbose_name_plural = u'Комментарии'
#MAIN PHOTO MODEL
class Photo(models.Model, Entity):
user = models.ForeignKey(User, related_name='photos')
pub_date = models.DateTimeField(default=datetime.now)
gallery = models.ForeignKey(Gallery)
photo = models.ImageField(upload_to='upload/photos')
title = models.CharField(max_length=50)
publish = models.BooleanField('Publish on site', default=False)
comments = models.ManyToManyField(PhotoComment, blank=True)
tags = models.ManyToManyField(Tag, related_name='photos', db_table='photo_tag', blank=True)
#STRING REPRESENTATION
def __unicode__(self):
return self.title
#СОХРАНИТЬ ФОТО И СОЗДАТЬ THUMB
def save(self):
super(Photo, self).save()
images.resize(self.photo.path)
images.generate_thumb(self.photo.path, (100, 100))
#PREPARE URL
def thumb_url(self):
try:
return self.photo.url+'_thumb'
except KeyError:
return ''
#META
class Meta:
db_table = 'photo'
verbose_name = u'Изображение'
verbose_name_plural = u'Изображения'
| [
"nide@inbox.ru"
] | nide@inbox.ru |
e3f1939c28d697c0ca181b82b6da1242428ed978 | 3f4210b6a092c8a7cc43820e8d79f495018d4e68 | /starter_code/config.py | 1942d945533b930b9677779fccd1ec2f9034480b | [] | no_license | yopi1838/fyyur_yopiprabowo | a19e35722b9f2a29b44630fb8dd17972d210c824 | 557c0628417dd7be752f6e28afc8a5f510a4e1b6 | refs/heads/master | 2022-08-03T11:48:27.175178 | 2020-06-02T15:07:19 | 2020-06-02T15:07:19 | 268,832,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 343 | py | import os
SECRET_KEY = os.urandom(32)
# Grabs the folder where the script runs.
basedir = os.path.abspath(os.path.dirname(__file__))
# Enable debug mode.
DEBUG = True
# Connect to the database
# TODO IMPLEMENT DATABASE URL
SQLALCHEMY_DATABASE_URI = 'postgres://yopiprabowooktiovan@localhost:5432/fyyur'
SQLALCHEMY_TRACK_MODIFICATIONS=False | [
"yopi1838@gmail.com"
] | yopi1838@gmail.com |
5ee277f18cbfac9784e9029f3e68f1925cf426b2 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4079/codes/1668_1396.py | 74195d25358994a69e2ee3eed0ec42b47ccd773c | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 160 | py | conta_restaurante=float(input("valor))
if (gorjeta<=300):
print(gorjeta-round(gorjeta*0.10))
else:
gorjeta(gorjeta-(gorjeta*0.06)
print(conta_restaurante,2) | [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
cc5f92d4059fe216023b354f347ebbcf61357fe2 | 1b5f653955779f45e78ca6dda925518779d09e8f | /submissions/2165.py | 6419ea658a8b704bd0f3fe499264457aa356c15a | [] | no_license | LeonardoSaid/uri-py-solutions | ad285f552934ead54ad2410e23113e84b0724f72 | 43c10c0e99e99d22b4b5ae2871e5d897f8823b42 | refs/heads/master | 2020-08-11T00:28:48.661578 | 2020-04-23T20:21:39 | 2020-04-23T20:21:39 | 214,453,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | s = input()
if len(s) > 140:
print("MUTE")
else:
print("TWEET") | [
"noreply@github.com"
] | LeonardoSaid.noreply@github.com |
d4673bf29ad447112ef749f137da86ca1e1456d5 | 25e40b56430fa44538aea8f77258d59899d69ff6 | /bank/context_processors.py | 12feb24860c0afb1c372305f2d10a04b0da5b429 | [] | no_license | Galaxylive/bankcenter | 207e96714fbfddc508596aad911ea85ce779bc32 | e4a2fe0e3e8b4e8b63b055aa61a39fb8cb20c12b | refs/heads/master | 2021-09-12T12:13:51.086481 | 2018-04-16T18:01:14 | 2018-04-16T18:01:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | from .models import Bank, Location
def required_context(request):
bank_list = Bank.objects.all()
location_list = Location.objects.all()
return {'bank_list': bank_list, 'location_list': location_list}
| [
"akshar@agiliq.com"
] | akshar@agiliq.com |
e060e32c1f53318693963055bc73bedd61728875 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/SSD/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py | b807679c46fa189d66d26fce30557db4a4e06bf8 | [
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 962 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_base_ = './retinanet_r50_fpn_1x_coco.py'
model = dict(
pretrained='open-mmlab://resnext101_64x4d',
backbone=dict(
type='ResNeXt',
depth=101,
groups=64,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'))
| [
"wangjiangben@huawei.com"
] | wangjiangben@huawei.com |
9c967c77f5b7beacb31b0008b465780a3e1016b0 | 9f79c4f9a8a9154fc3dc9202ab8ed2547a722b5f | /DataTypes&Variables/Print_ASCII.py | 3c27b61fa6794a3af2aeb387cffcd2dfcb906eb1 | [] | no_license | grigor-stoyanov/PythonFundamentals | 31b6da00bd8294e8e802174dca4e62b231134090 | 5ae5f1f1b9ca9500d10e95318a731d3b29950a30 | refs/heads/main | 2023-02-11T12:17:19.010596 | 2021-01-14T22:14:54 | 2021-01-14T22:14:54 | 321,658,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | start = int(input())
end = int(input())
for i in range(start,end+1):
print(f"{chr(i)} ",end="")
| [
"76039296+codelocks7@users.noreply.github.com"
] | 76039296+codelocks7@users.noreply.github.com |
756b2e0419e746d5f2ba7d94893571724ac1e41f | 81357d11785eb03ec9abceb93e545e5fd9bcc156 | /shop/urls.py | 41b553a2f380016c52011588f46c52fa86ce87ee | [] | no_license | ytshaha/my-first-blog | f63dcc8372aac8cd0b1bfad47a67200b2b19772d | 7ee8c893e8c98cd0c290a1121b94f34110018525 | refs/heads/master | 2023-04-19T15:31:29.996220 | 2021-05-11T18:22:12 | 2021-05-11T18:22:12 | 328,199,497 | 0 | 0 | null | 2021-04-02T07:48:22 | 2021-01-09T16:39:27 | HTML | UTF-8 | Python | false | false | 864 | py | from django.urls import path
from . import views
import django.contrib.auth.views as django_views
app_name = 'shop'
urlpatterns = [
path('', views.index, name='index'),
path('moum/', views.introduction, name='introduction'),
# path('ticket/', views.buying_ticket, name='buying_ticket'),
path('ticket/', views.buying_ticket, name='buying_ticket'),
path('ticket/result/<int:pk>', views.BuyingTicketResultView.as_view(), name='buying_ticket_result'),
# path('shop_login/', django_views.LoginView.as_view(), name='shop_login'),
path('accounts/logout/', django_views.LogoutView.as_view(next_page='/shop/'), name='logout'),
path('event/1/', views.open_event, name='event_1'),
path('privacy/', views.privacy, name='privacy'),
path('terms/', views.terms, name='terms'),
path('policy/', views.policy, name='policy'),
] | [
"ytshaha@naver.com"
] | ytshaha@naver.com |
7c9dd7e6e35c09d734608c28e1631e8577429162 | 595c4816196017a36850770ca2e39b64a2fb7590 | /landsat/msi_landsat5.py | d8a67eba7f3bac42ba368f2d1d58a2a4149bf579 | [
"Apache-2.0"
] | permissive | XingyuXu-cuhk/Landuse_DL | 6feb546b26400733caaadcd4b420ac8557b8fe31 | a993988727387be22e7a88f6f5790de8a88dccd6 | refs/heads/master | 2023-06-06T09:23:22.323195 | 2021-06-18T09:03:16 | 2021-06-18T09:03:16 | 269,514,918 | 0 | 0 | Apache-2.0 | 2020-06-05T02:43:25 | 2020-06-05T02:43:24 | null | UTF-8 | Python | false | false | 4,215 | py | #!/usr/bin/env python
# Filename: ndvi_landsat5
"""
introduction: calculate multispectral indices of Landsat 5,
including Brightness, Greenness, Wetness, NDVI, NDWI, NDMI
The input image is download from Google Eerth Engine
For comparison, we will stack the NDVI of each image and give a name consiting with image date, pathrow, and 'NDVI'
authors: Huang Lingcao
email:huanglingcao@gmail.com
add time: 26 March, 2019
"""
import sys,os
from optparse import OptionParser
import rasterio
import numpy as np
HOME = os.path.expanduser('~')
codes_dir2 = HOME + '/codes/PycharmProjects/DeeplabforRS'
sys.path.insert(0, codes_dir2)
import basic_src.io_function as io_function
import datetime
import struct
# from basic_src.RSImage import RSImageclass
# from msi_landsat8 import get_band_names # get_band_names (img_path):
# from msi_landsat8 import get_band_name # get_band_name (img_path,pro_name):
from msi_landsat8 import cal_two_band # cal_two_band (img_path,band1_name,band2_name,pro_name)
from msi_landsat8 import cal_tasselled_cap # cal_tasselled_cap(img_path,coefficents,pro_name):
# from msi_landsat8 import save_oneband_list # save_oneband_list(save_path,ndvi_list, band_name_list, org_img):
from msi_landsat8 import batch_cal_msi # batch_cal_msi(img_file_list, output_name, cal_function):
def cal_ndvi_landsat5(img_path):
return cal_two_band(img_path, 'B4','B3','NDVI')
def cal_ndwi_landsat5(img_path):
return cal_two_band(img_path, 'B2','B4','NDWI')
def cal_ndmi_landsat5(img_path):
return cal_two_band(img_path, 'B4','B5','NDMI')
## coefficents are from paper: Crist, E. P. (1985).
# A TM tasseled cap equivalent transformation for reflectance factor data.
# Remote Sensing of Environment, 17(3), 301-306.
def cal_brightness_landsat5(img_path):
brightness_coeff = np.array([0.2043, 0.4158, 0.5524, 0.5741, 0.3124, 0.2303])
return cal_tasselled_cap(img_path,brightness_coeff,'brightness')
def cal_greenness_landsat5(img_path):
greenness_coeff = np.array([-0.1603, -0.2819, -0.4934, 0.794, -0.0002, -0.1446])
return cal_tasselled_cap(img_path, greenness_coeff, 'greenness')
def cal_wetness_landsat5(img_path):
wetness_coeff = np.array([0.0315, 0.2021, 0.3102, 0.1594, -0.6806, -0.6109])
return cal_tasselled_cap(img_path, wetness_coeff, 'wetness')
def main(options, args):
# folder containing images (download from Google Earth Engine)
# img_folder = args[0]
img_folder = '/Users/huanglingcao/Data/Qinghai-Tibet/beiluhe/beiluhe_landsat/LT05_2010to2011'
img_file_list = io_function.get_file_list_by_ext('.tif',img_folder,bsub_folder=False)
# img_file_list = img_file_list[:2] # for test
satellite = 'landsat5'
# #ndvi
batch_cal_msi(img_file_list, satellite+'_ndvi.tif', cal_ndvi_landsat5)
#ndwi
batch_cal_msi(img_file_list, satellite+'_ndwi.tif', cal_ndwi_landsat5)
#ndmi
batch_cal_msi(img_file_list, satellite+'_ndmi.tif', cal_ndmi_landsat5)
#brightness
batch_cal_msi(img_file_list, satellite+'_brightness.tif', cal_brightness_landsat5)
# greenness
batch_cal_msi(img_file_list, satellite+'_greenness.tif', cal_greenness_landsat5)
# wetness
batch_cal_msi(img_file_list, satellite+'_wetness.tif', cal_wetness_landsat5)
pass
if __name__ == "__main__":
usage = "usage: %prog [options] image_folder "
parser = OptionParser(usage=usage, version="1.0 2019-3-26")
parser.description = 'Introduction: calculate MSI from the image downloaded from Google Earth Engine'
parser.add_option("-o", "--output",
action="store", dest="output",
help="the output file path")
# parser.add_option("-p", "--para",
# action="store", dest="para_file",
# help="the parameters file")
(options, args) = parser.parse_args()
if len(sys.argv) < 2:
parser.print_help()
sys.exit(2)
## set parameters files
# if options.para_file is None:
# print('error, no parameters file')
# parser.print_help()
# sys.exit(2)
# else:
# parameters.set_saved_parafile_path(options.para_file)
main(options, args)
| [
"huanglingcao@gmail.com"
] | huanglingcao@gmail.com |
5ee65cca03a834f446911ce6e22c47e2f86c728d | ebb71d8710ac1445dc00b1c2a65a1e42979e2718 | /25-introduction-to-time-series-analysis-in-python/4-moving-average-ma-and-arma-models/03-estimating-an-ma-model.py | 642e3f491204bf86580507648592c948dc4a5af6 | [] | no_license | 0ashu0/datacamp-1 | 6473d83afc0ae00dc43116c1889bf065fb923ce4 | 9f0f64427ff07ff5f132886a5f44e19c5045c705 | refs/heads/master | 2020-12-13T07:56:18.687044 | 2019-01-09T07:18:02 | 2019-01-09T07:18:02 | 234,354,666 | 1 | 0 | null | 2020-01-16T15:49:48 | 2020-01-16T15:49:47 | null | UTF-8 | Python | false | false | 1,760 | py | '''
Estimating an MA Model
You will estimate the MA(1) parameter, <theta>, of one of the simulated series that you generated in the earlier exercise. Since the parameters are known for a simulated series, it is a good way to understand the estimation routines before applying it to real data.
For simulated_data_1 with a true <theta> of -0.9, you will print out the estimate of <theta>. In addition, you will also print out the entire output that is produced when you fit a time series, so you can get an idea of what other tests and summary statistics are available in statsmodels.
'''
import numpy as np
from statsmodels.tsa.arima_process import ArmaProcess
ar1 = np.array([1])
ma1 = np.array([1, -0.9])
MA_object1 = ArmaProcess(ar1, ma1)
simulated_data_1 = MA_object1.generate_sample(nsample=1000)
'''
INSTRUCTIONS
* Import the class ARMA in the module statsmodels.tsa.arima_model.
* Create an instance of the ARMA class called mod using the simulated data simulated_data_1 and the order (p,q) of the model (in this case, for an MA(1)), is order=(0,1).
* Fit the model mod using the method .fit() and save it in a results object called res.
* Print out the entire summmary of results using the .summary() method.
* Just print out an estimate of the constant and phi parameter using the .params attribute (no arguments).
'''
# Import the ARMA module from statsmodels
from statsmodels.tsa.arima_model import ARMA
# Fit an MA(1) model to the first simulated data
mod = ARMA(simulated_data_1, order=(0,1))
res = mod.fit()
# Print out summary information on the fit
print(res.summary())
# Print out the estimate for the constant and for theta
print("When the true theta=-0.9, the estimate of theta (and the consant) are:")
print(res.params)
| [
"sashakrasnov.nfo@gmail.com"
] | sashakrasnov.nfo@gmail.com |
94d4170d3d0d54d449799c097408c38eeade2496 | f9f1f887629855bbf12ecb0b7358fed5946b3caa | /.history/app_blog_forum/views_20201117210223.py | ff14f3fac7843008b6d907ae22f9c8a8e7ca936e | [] | no_license | hibamohi5/blog_forum | 4f687cee3ca6bdb1d0302b3657a77c01945404b3 | d6380eb7149355c79276b738da7da94c2ee03570 | refs/heads/main | 2023-01-14T18:33:53.043754 | 2020-11-20T01:52:22 | 2020-11-20T01:52:22 | 314,417,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,303 | py | from django.shortcuts import render, redirect
from .models import *
from django.contrib import messages
def index(request):
return render(request, "index.html")
def register_new_user(request):
errors = User.objects.user_registration_validator(request.POST)
if len(errors) > 0:
for key, value in errors.items():
error_msg = key + ' - ' + value
messages.error(request, error_msg)
return redirect("/")
else:
first_name_from_post = request.POST['first_name']
last_name_from_post = request.POST['last_name']
email_from_post = request.POST['email']
password_from_post = request.POST['password']
new_user = User.objects.create(
first_name=first_name_from_post,
last_name=last_name_from_post,
email=email_from_post,
password=password_from_post
)
print(new_user.id)
request.session['user_id'] = new_user.id
return redirect('/register/view')
def login(request):
# user did provide email/password, now lets check database
email_from_post = request.POST['email']
password_from_post = request.POST['password']
# this will return all users that have the email_from_post
# in future we should require email to be unique
users = User.objects.filter(email=email_from_post)
if len(users) == 0:
messages.error(request, "email/password does not exist")
return redirect("/")
user = users[0]
print(user)
if (user.password != password_from_post):
messages.error(request, "email/password does not exist")
return redirect("/")
request.session['user_id'] = user.id
return redirect("/login")
def logout(request):
request.session.clear()
return redirect("/")
def view_home(request):
if 'user_id' not in request.session:
return redirect("/")
user = User.objects.get(id=request.session['user_id'])
context = {
"user": user
}
return render(request, "view_home.html", context)
def view_articles(request):
if 'user_id' not in request.session:
return redirect('/')
user = User.objects.get(id=request.session['user_id'])
context = {
'user':user
}
return render(request, "view")
| [
"hibamohi5@gmail.com"
] | hibamohi5@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.