blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c897c81c731637190368f66d5e1b1c115ed5e888 | b1f76c7d9831d0307e2cda687e6f40877e31652e | /stepmaker/exceptions.py | 0b53f9bd7d5185c60ec2661a78a99b920e956faf | [
"Apache-2.0"
] | permissive | klmitch/stepmaker | 136a9efdabedce2053c94b91c7f35529dd653988 | 9f024ca2fbb575e0758c70276b441e0f7df26068 | refs/heads/master | 2021-04-28T14:55:03.687765 | 2018-02-18T18:07:19 | 2018-02-18T18:07:19 | 121,976,215 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,241 | py | # Copyright (C) 2018 by Kevin L. Mitchell <klmitch@mit.edu>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
import signal
import six
class StepError(Exception):
"""
Report a step configuration error. The address, if provided, will
be stored in the ``addr`` attribute.
"""
def __init__(self, msg, addr=None):
"""
Initialize a ``StepError`` instance.
:param str msg: A message describing the error.
:param addr: The address at which the error occurred.
:type addr: ``StepAddress``
"""
# Add the address to the message
if addr is not None:
msg += ' (%s)' % addr
# Initialize the exception
super(StepError, self).__init__(msg)
# Save the address
self.addr = addr
# Sentinel that an action was skipped
skipped = object()
class AbortStep(Exception):
"""
An exception that can be raised by ``Modifier.pre_call()`` to
abort a step. This is treated as non-fatal by the step processing
logic, and will abort further ``Modifier`` and ``Action``
processing in the step.
"""
def __init__(self, result=skipped):
"""
Initialize an ``AbortStep`` instance.
:param result: The result to return from the step. If not
provided, the step processing logic will
continue as if the step was completely skipped.
"""
super(AbortStep, self).__init__()
self.result = result
class ProcessError(Exception):
"""
An exception raised when a process executed through the facilities
provided by the ``stepmaker.Environment`` class exits with a
non-zero return code.
"""
def __init__(self, result):
"""
Initialize a ``ProcessError`` exception.
:param result: The result of the process execution.
:type result: ``stepmaker.CompletedProcess``
"""
# Construct a message
if result.returncode and result.returncode < 0:
# Died due to a signal; figure out the signal name
signame = None
# Try the Python 3 method of resolving the signal name
if six.PY3: # pragma: no cover
try:
signame = signal.Signals(-result.returncode).name
except AttributeError:
# Doesn't have Signals, we'll fall back to the
# Python 2 method
pass
except ValueError:
signame = 'unknown signal %d' % -result.returncode
if signame is None: # pragma: no cover
# Python 2 version of signal name lookup
for name, value in signal.__dict__.items():
if (name.startswith('SIG') and
not name.startswith('SIG_') and
value == -result.returncode):
signame = name
break
if signame is None:
# Guess we don't know the signal name
signame = 'unknown signal %d' % -result.returncode
super(ProcessError, self).__init__(
'Command "%s" died with %s' % (result.args[0], signame)
)
elif result.returncode:
# Non-zero error code
super(ProcessError, self).__init__(
'Command "%s" returned non-zero exit status %d' %
(result.args[0], result.returncode)
)
else:
# Did it really fail?
super(ProcessError, self).__init__(
'Command "%s" successful' % result.args[0]
)
self.result = result
| [
"klmitch@mit.edu"
] | klmitch@mit.edu |
726011e89cde94425a4f3765a97b8e7bd24321c1 | 2621798adafdf2c4559059f5a0dda6477f54fdab | /databases/playlist-app/app.py | 4e87d815089e72ffe70d2331742bc3040b4264b8 | [] | no_license | BrandonLMcintosh/DatabaseDJ | 5eeabfb954a383e9ac8403771a7da78c8908bec3 | 1e73fce76722ed292410dce6d91688fe6584dba5 | refs/heads/master | 2023-03-07T06:52:22.503310 | 2021-02-01T19:09:04 | 2021-02-01T19:09:04 | 334,297,896 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,683 | py | from flask import Flask, redirect, render_template, request, flash
from flask_debugtoolbar import DebugToolbarExtension
from models import db, connect_db, Playlist, Song, PlaylistSong
from forms import NewSongForPlaylistForm, SongForm, PlaylistForm
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///playlist-app'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_ECHO'] = True
connect_db(app)
db.create_all()
app.config['SECRET_KEY'] = "I'LL NEVER TELL!!"
# Having the Debug Toolbar show redirects explicitly is often useful;
# however, if you want to turn it off, you can uncomment this line:
#
app.config['DEBUG_TB_INTERCEPT_REDIRECTS'] = False
debug = DebugToolbarExtension(app)
@app.route("/")
def root():
"""Homepage: redirect to /playlists."""
return redirect("/playlists")
##############################################################################
# Playlist routes
@app.route("/playlists")
def show_all_playlists():
"""Return a list of playlists."""
playlists = Playlist.query.all()
return render_template("playlists.html", playlists=playlists)
@app.route("/playlists/<int:playlist_id>")
def show_playlist(playlist_id):
"""Show detail on specific playlist."""
# ADD THE NECESSARY CODE HERE FOR THIS ROUTE TO WORK
playlist = Playlist.query.get_or_404(playlist_id)
print('Playlist Songs on Playlist Page::::::::::::::::::::::::::::::')
print(playlist.songs)
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
return render_template('playlist.html', playlist=playlist)
@app.route("/playlists/add", methods=["GET", "POST"])
def add_playlist():
"""Handle add-playlist form:
- if form not filled out or invalid: show form
- if valid: add playlist to SQLA and redirect to list-of-playlists
"""
# ADD THE NECESSARY CODE HERE FOR THIS ROUTE TO WORK
form = PlaylistForm()
if request.method == "GET":
return render_template('new_playlist.html', form=form)
elif request.method == "POST":
if form.validate_on_submit():
name = form.name.data
description = form.description.data
playlist = Playlist(name=name, description=description)
db.session.add(playlist)
db.session.commit()
return redirect('/playlists')
else:
return render_template('new_playlist.html', form=form)
else:
flash('something went wrong')
return redirect('/playlists')
##############################################################################
# Song routes
@app.route("/songs")
def show_all_songs():
"""Show list of songs."""
songs = Song.query.all()
return render_template("songs.html", songs=songs)
@app.route("/songs/<int:song_id>")
def show_song(song_id):
"""return a specific song"""
# ADD THE NECESSARY CODE HERE FOR THIS ROUTE TO WORK
song = Song.query.get_or_404(song_id)
return render_template('song.html', song=song)
@app.route("/songs/add", methods=["GET", "POST"])
def add_song():
"""Handle add-song form:
- if form not filled out or invalid: show form
- if valid: add playlist to SQLA and redirect to list-of-songs
"""
# ADD THE NECESSARY CODE HERE FOR THIS ROUTE TO WORK
form = SongForm()
if request.method == "GET":
return render_template('new_song.html', form=form)
elif request.method == "POST":
if form.validate_on_submit():
title = form.title.data
artist = form.artist.data
song = Song(title=title, artist=artist)
db.session.add(song)
db.session.commit()
return redirect('/songs')
else:
return render_template('new_song.html', form=form)
else:
flash('something went wrong')
return redirect('/songs')
@app.route("/playlists/<int:playlist_id>/add-song", methods=["GET", "POST"])
def add_song_to_playlist(playlist_id):
"""Add a playlist and redirect to list."""
# BONUS - ADD THE NECESSARY CODE HERE FOR THIS ROUTE TO WORK
# THE SOLUTION TO THIS IS IN A HINT IN THE ASSESSMENT INSTRUCTIONS
playlist = Playlist.query.get_or_404(playlist_id)
form = NewSongForPlaylistForm()
# Restrict form to songs not already on this playlist
curr_on_playlist = [song.id for song in playlist.songs]
form.song.choices = (db.session.query(Song.id, Song.title)
.filter(Song.id.notin_(curr_on_playlist))
.all())
if request.method == "GET":
return render_template("add_song_to_playlist.html",
playlist=playlist,
form=form)
elif request.method == "POST":
if form.validate_on_submit():
# ADD THE NECESSARY CODE HERE FOR THIS ROUTE TO WORK
song_id = form.song.data
song = Song.query.get_or_404(song_id)
playlist.songs.append(song)
db.session.add(playlist)
db.session.commit()
print(
'Playlist songs on add route:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
print(playlist.songs)
print(':::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
return redirect(f"/playlists/{playlist_id}")
else:
return render_template('/add_song_to_playlist.html', playlist=playlist, form=form)
else:
flash('something went wrong')
return redirect(f'/playlists/{playlist_id}')
| [
"Brandon.L.Mcintosh@gmail.com"
] | Brandon.L.Mcintosh@gmail.com |
eed75e0d57c80f1d2f9c7899be3f2d660a805599 | 3ecc78f69b63f8107de5ae65762192a5fdd60cc5 | /mhelper.v3.py | 73b3f0207a41be30127e34c6bb743cbbe0d14fe4 | [
"Apache-2.0"
] | permissive | KEKE046/memory-helper | 4d45a41d92aca4044f95c812fd915a812bf99ef1 | 377baaa823bd66de3d44770bb779ead9a5bdc334 | refs/heads/main | 2023-08-25T01:31:38.729101 | 2021-10-29T10:12:34 | 2021-10-29T10:12:34 | 422,525,858 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 43,655 | py | import warnings
if __name__ == '__main__':
print('Loading...')
warnings.filterwarnings("ignore")
import pyfiglet
import time
from rich.console import Console
from rich import print as rprint
from sklearn.linear_model import Ridge
from pypinyin import lazy_pinyin
import nagisa
import jieba
import regex
import numpy as np
import gtts
import sounddevice as sd
import librosa
import langdetect
from typing import Any, Callable, Dict, List, Tuple, Union
from pathlib import Path
import random
import copy
import re
import json
import hashlib
from tqdm import tqdm
def simple_digest(s: str):
return s[:10] + '-' + hashlib.md5(s.encode('utf-8')).hexdigest()
class SerializableObjectMeta(type):
def __new__(cls, name, bases, attrs):
annotations = {}
default_values = {}
for base in bases[::-1]:
annotations.update(base.__dict__.get('__annotations__', {}))
default_values.update(base.__dict__)
annotations.update(attrs.get('__annotations__', {}))
default_values.update(attrs)
default_values = {k: v for k,
v in default_values.items() if k in annotations}
attrs['__serializableobject_fields__'] = annotations
attrs['__serializableobject_values__'] = default_values
return type.__new__(cls, name, bases, attrs)
class SerializableObject(metaclass=SerializableObjectMeta):
'''一个抽象类,继承这个类的,可以调用serialize方法来打散成字典。便于与前端交互和存盘。'''
def __init__(self, *args:Tuple[Any], **kws:Dict[str, Any]):
for i, k in enumerate(self.__serializableobject_fields__):
if i < len(args):
setattr(self, k, args[i])
elif k in kws:
setattr(self, k, kws[k])
elif k in self.__serializableobject_values__:
setattr(self, k, self.__serializableobject_values__[k])
else:
raise ValueError(f"param {k} not specified")
def serialize(self):
ret = {}
for k, v in self.__serializableobject_fields__.items():
if type(v) is SerializableObjectMeta:
ret[k] = self.__dict__[k].serialize()
else:
ret[k] = self.__dict__[k]
return ret
@classmethod
def deserialize(cls, data):
state_dict = {}
for k, v in cls.__serializableobject_fields__.items():
if type(v) is SerializableObjectMeta:
state_dict[k] = v.deserialize(data[k])
elif k in data:
state_dict[k] = data[k]
return cls(**state_dict)
def __str__(self):
msg = self.__class__.__name__ + '('
msg += ', '.join([k + '=' + str(self.__dict__[k])
for k in self.__serializableobject_fields__])
msg += ')'
return msg
def __repr__(self):
return str(self)
def __eq__(self, rhs):
if type(rhs.__class__) is not SerializableObjectMeta:
return False
for k in self.__serializableobject_fields__:
if k not in rhs.__serializableobject_fields__:
return False
if rhs.__dict__[k] != self.__dict__[k]:
return False
return True
def __ne__(self, rhs):
return not self.__eq__(rhs)
class MemoryStat(SerializableObject):
EF: float = 2.5
interval: int = 0
upcoming: int = 0
def decrease_tick(self) -> bool:
self.upcoming = max(self.upcoming - 1, 0)
return self.upcoming == 0
def is_active(self) -> bool:
return self.upcoming == 0
def add_stat(self, q: int):
if q < 3:
self.interval = 0
self.upcoming = 0
self.EF = max(self.EF + (0.1 - (5 - q) * (0.08 + (5 - q) * 0.02)), 1.3)
if self.interval == 0:
self.interval = 1
elif self.interval == 1:
self.interval = 6
else:
self.interval = int(round(self.EF * self.interval))
self.upcoming = self.interval
class Question(SerializableObject):
title: str = ''
answer: str = ''
language: str = ''
autoplay: bool = False
memory_stat: MemoryStat = MemoryStat()
question_id: int = 0
reconstruct_pattern: str = '**Question** {title}\n{answer}'
match_method: Union[List[str], None] = None
match_ignore: Union[List[str], None] = None
invisible: bool = False
def __init__(self, *args, **kws):
super().__init__(*args, **kws)
if not self.language:
self.language = langdetect.detect(self.title + self.answer)
self.title = self.title.strip()
self.answer = self.answer.strip()
def get_uid(self):
return simple_digest(self.title + '#' + self.answer)
class AudioManager:
def __init__(self, cache_dir: Path):
cache_dir.mkdir(exist_ok=True, parents=True)
self.cache_dir = cache_dir
self.cache_size = sum(f.stat().st_size for f in cache_dir.glob('*.mp3'))
def get_cache_size(self):
return self.cache_size
def get_audio(self, title: str, force_download: bool = False, **params: Dict[str, Any]) -> Path:
name = simple_digest(title)
path = self.cache_dir.joinpath(f'{name}.mp3')
if not path.exists() or force_download:
tts = gtts.gTTS(title.replace('*', ''), **params)
tts.save(path)
if not force_download:
self.cache_size += path.stat().st_size
return path
def play_audio(self, data: str, force_download: bool = False, **params: Dict[str, Any]):
sd.stop()
path = self.get_audio(data, force_download, **params)
data, fs = librosa.load(path)
sd.play(data, fs, blocking=False)
list(jieba.cut('测试结巴分词'))
class MatchManager:
def __init__(self):
self.match_method: Dict[str, Callable[[Question, str], Tuple[bool, str]]] = [
('full-match', self.full_match),
('token-match', self.token_match),
('pinyin-match', self.pinyin_match),
('char-match', self.char_match),
]
def clean_word(self, s):
return regex.sub(r'[\p{P}\s]+', ' ', s.strip())
def split_word_zh(self, s):
return list(jieba.cut(s.lower()))
def split_word_ja(self, s):
return list(nagisa.tagging(s).words)
def first_pinyin_zh(self, s):
words = self.split_word_zh(s)
return [''.join(lazy_pinyin(w, 4)).upper() for w in words]
def pattern_match(self, patterns: List[str], data: Union[str, List], tag: str = '') -> Tuple[bool, str]:
'''
检查patterns中的每一个是否都在data之中出现了
'''
match_mask = np.zeros(len(data))
unmatched = []
for pat in patterns:
if pat in data:
i = data.index(pat)
if isinstance(data, str):
match_mask[i:i+len(pat)] = 1
else:
match_mask[i] = 1
else:
unmatched.append(pat)
msg = f"**{tag}** "
edge = np.diff(match_mask, prepend=0, append=0)
if edge[0]:
msg += "*"
for c, e in zip(data, edge[1:]):
msg += c
if e:
msg += "*"
if unmatched:
msg += ' {' + ' '.join(unmatched) + '}'
return len(unmatched) == 0, msg
def clean_first(func) -> Callable[..., Any]:
'''
将question.answer和用户的answer除去标点符号和空白字符
分别作为ground-truth和answer传递给被包装的函数
'''
def inner(self, question: Question, answer: str):
gt = self.clean_word(question.answer)
answer = self.clean_word(answer)
return func(self, answer, gt, question)
return inner
def then_pattern_match(tag) -> Callable[..., Tuple[bool, str]]:
'''函数处理完之后,输出 pattern, data, 返回调用pattern_match的结果'''
def wrapper(func):
def inner(self, *args, **kws):
pat, data = func(self, *args, **kws)
return self.pattern_match(pat, data, tag)
return inner
return wrapper
@clean_first
def full_match(self, answer: str, gt: str, question: Question):
if answer == gt:
return True, "**full-match** success"
else:
return False, "**full-match** failed"
@clean_first
@then_pattern_match("char-match")
def char_match(self, answer: str, gt: str, question: Question):
return list(answer), list(gt)
@clean_first
@then_pattern_match("token-match")
def token_match(self, answer: str, gt: str, question: Question):
func_name = 'split_word_' + question.language
if hasattr(self, func_name):
func = getattr(self, func_name)
return func(answer), func(gt)
else:
return list(answer), list(gt)
@clean_first
@then_pattern_match("pinyin-match")
def pinyin_match(self, answer: str, gt: str, question: Question):
return answer.split(), ''.join(self.first_pinyin_zh(gt))
def match_answer(self, question: Question, answer: str):
all_msg = []
for name, method in self.match_method:
need_match = False
if question.match_method:
if name in question.match_method:
need_match = True
elif question.match_ignore:
if name not in question.match_ignore:
need_match = True
else:
need_match = True
if need_match:
mat, msg = method(question, answer)
if mat:
return mat, msg
else:
all_msg.append(msg)
return False, '\n'.join(all_msg)
def auto_score(self, question: Question, answer: str, speed_score: float):
match, match_msg = self.match_answer(question, answer)
if match:
return max(speed_score, 3), match_msg
else:
return min(speed_score, 3), match_msg
class LLRegresser(SerializableObject):
max_history: int = 500
alpha: float = 1
X: Union[List[List[float]], None] = None
y: Union[List[List[float]], None] = None
def add_data(self, X: List[float], y: float):
if self.X is None:
self.X = []
if self.y is None:
self.y = []
self.X.append(X)
self.y.append(y)
if len(self.X) > self.max_history:
self.X.pop(0)
self.y.pop(0)
def estimate(self, X: List[float]) -> float:
cur_X = np.asarray(X)[np.newaxis, :]
cur_X = np.concatenate([cur_X, np.log(cur_X + 1)], axis=1)
try:
X = np.asarray(self.X)
X = np.concatenate([X, np.log(X + 1)], axis=1)
y = np.asarray(self.y)
return max(Ridge(alpha=self.alpha).fit(X, y).predict(cur_X)[0], 0.01)
except:
return cur_X.sum() * 0.1
class SpeedEstimator:
def __init__(self, path: Path):
path.parent.mkdir(exist_ok=True, parents=True)
self.path = path
self.estimators: Dict[str, LLRegresser] = {}
if self.path.exists():
self.load()
def save(self):
with self.path.open('w') as f:
json.dump({k: v.serialize()
for k, v in self.estimators.items()}, f)
def load(self):
with self.path.open() as f:
data = json.load(f)
self.estimators = {k: LLRegresser.deserialize(
v) for k, v in data.items()}
@staticmethod
def get_feature(question: Question, answer: str):
def str_feat(s):
return [
len(s),
min(map(len, re.split('\s+', s))),
len(re.split('\s+', s)),
min(map(len, s.split('\n'))),
len(s.split('\n'))
]
return str_feat(question.title) + str_feat(question.answer) + str_feat(answer)
def add_data(self, question: Question, answer: str, timing: float):
identifier = question.language
if identifier not in self.estimators:
self.estimators[identifier] = LLRegresser()
X = self.get_feature(question, answer)
y = timing
self.estimators[identifier].add_data(X, y)
def estimate(self, question: Question, answer: str):
identifier = question.language
if identifier not in self.estimators:
self.estimators[identifier] = LLRegresser()
X = self.get_feature(question, answer)
return self.estimators[identifier].estimate(X)
def speed_score(self, question: Question, answer: str, timing: float):
y = self.estimate(question, answer)
return min(int(y / timing * 7), 5)
class DataSource:
default_state = {
'autoplay': False,
'question': False,
'inline': False,
'language': '',
'invisible': False,
'match_method': None,
'match_ignore': None
}
dictation_preset = {
'autoplay': True,
'invisible': True
}
no_dictation_preset = {
'autoplay': False,
'invisible': False
}
def __init__(self, markdown_file: Path, database_file: Path):
markdown_file.parent.mkdir(exist_ok=True, parents=True)
database_file.parent.mkdir(exist_ok=True, parents=True)
self.markdown_file = markdown_file
self.database_file = database_file
self.q: List[Question] = []
self.db: Dict[str, Question] = {}
self._state = copy.copy(self.default_state)
self.state_stack = []
self.cmd_pattern = re.compile('(.*?)=(.*?)')
self.reconstruct_pattern = []
self.parse_message = []
self.current_line = 0
self.question_id = -1
self.current_question: Union[Question, None] = None
self.froce_state = {}
@property
def state(self):
self._state.update(self.froce_state)
return self._state
@state.setter
def state(self, value):
self._state = copy.copy(value)
self._state.update(self.froce_state)
def set_force_dictation(self):
self.froce_state.update(self.dictation_preset)
def set_force_no_dictation(self):
self.froce_state.update(self.no_dictation_preset)
def set_force_voice(self):
self.froce_state.update(autoplay=True)
def set_force_no_voice(self):
self.froce_state.update(autoplay=False)
def set_config(self, presets=[], forces=[]):
for preset in presets:
name = 'set_preset_' + preset.replace('-', '_')
if hasattr(self, name):
self.add_message('preset ' + preset)
getattr(self, name)()
for force in forces:
name = 'set_force_' + force.replace('-', '_')
if hasattr(self, name):
self.add_message('force ' + force)
getattr(self, name)()
def push_stack(self):
self.state_stack.append(copy.copy(self.state))
def pop_stack(self):
self.state = self.state_stack.pop()
def update_stack(self, *args, **kws):
self.state.update(*args, **kws)
self.state.update(self.froce_state)
def handle_voice(self):
self.update_stack(autoplay=True)
def handle_question(self):
self.update_stack(question=True)
def handle_inline(self):
self.update_stack(inline=True)
def handle_invisible(self):
self.update_stack(invisible=True)
def handle_language(self, lang):
self.update_stack(language=lang)
def handle_match_method(self, *params):
self.update_stack(match_method=params)
def handle_match_ignore(self, *params):
self.update_stack(match_ignore=params)
def handle_end_all(self):
self.state = self.state_stack[0]
self.state_stack = []
def handle_end(self):
self.pop_stack()
def handle_dictation(self):
self.update_stack(**self.dictation_preset)
def add_question(self, title, answer, **kws):
wrap_params = ['language', 'autoplay',
'match_method', 'match_ignore', 'invisible']
wrap_dict = {k: self.state[k] for k in wrap_params}
self.question_id += 1
q = Question(
title=title,
answer=answer,
questoin_id=self.question_id,
**wrap_dict,
**kws
)
if (uid := q.get_uid()) in self.db:
q.memory_stat = self.db[uid].memory_stat
self.current_question = q
self.q.append(q)
self.reconstruct_pattern.append(None)
return q
def add_message(self, msg):
self.parse_message.append(
f'File {self.markdown_file.stem} Line {self.current_line + 1}: {msg}')
def parse_markdown(self, md):
self.state.update(self.froce_state)
ignore_raw_text = False
for self.current_line, line in enumerate(md.split('\n')):
line = line.strip()
if line.startswith('```'):
ignore_raw_text = not ignore_raw_text
if ignore_raw_text:
self.reconstruct_pattern.append(line)
continue
inline_command = False
inline_depth = 0
ctrl_cmd = ''
if line.endswith('-->'):
idx = line.index('<!--')
line_new, ctrl_cmd = line[:idx], line[idx:]
for ctrl_part in re.findall('\s*<!--(.*?)-->\s*', ctrl_cmd):
part_tokens = ctrl_part.split('&')
if any([x.strip() == 'end-all' for x in part_tokens]):
self.handle_end_all()
elif any([x.strip() == 'end' for x in part_tokens]):
self.handle_end()
else:
self.push_stack()
inline_depth += 1
for ctrl in part_tokens:
ctrl_tokens = ctrl.strip().split('=')
cmd = ctrl_tokens[0].replace('-', '_')
if len(ctrl_tokens) > 1:
params = [x.strip()
for x in ctrl.split('=')[1].split(',')]
else:
params = []
if hasattr(self, 'handle_' + cmd):
getattr(self, 'handle_' + cmd)(*params)
else:
self.add_message(
f'Unknown Control Command {ctrl}')
if line_new:
line = line_new
inline_command = True
else:
self.reconstruct_pattern.append(line)
continue
if self.state['inline']:
tokens = re.split('\s+', line)
self.add_question(
title=tokens[0],
answer=' '.join(tokens[1:]),
reconstruct_pattern='{title} {answer} %s ' % ctrl_cmd.strip(
)
)
self.current_question = None
elif self.state['question']:
if mat := re.fullmatch(r'\*\*(.*?)\*\*(.*?)', line):
label, title = mat.groups()
self.current_question = self.add_question(
title, '', reconstruct_pattern='**%s** {title} %s\n{answer}' % (label, ctrl_cmd))
elif self.current_question is not None:
if line:
self.current_question.answer += line + '\n'
elif line != '':
self.add_message(f'Ignore line: {line}')
else:
self.reconstruct_pattern.append(line)
if inline_command:
for _ in range(inline_depth):
self.pop_stack()
def load(self):
if self.database_file.exists():
with self.database_file.open() as f:
for prob in json.load(f):
q: Question = Question.deserialize(prob)
self.db[q.get_uid()] = q
with self.markdown_file.open() as f:
self.parse_markdown(f.read())
return '\n'.join(self.parse_message)
def get_questions(self):
return self.q
def generate_markdown(self):
questions = sorted(self.q, key=lambda x: x.question_id)
ques_out = []
for q in questions:
ques_out.append(q.reconstruct_pattern.format(
title=q.title, answer=q.answer))
qidx = 0
reconstructed = []
for r in self.reconstruct_pattern:
if r is None:
reconstructed.append(ques_out[qidx])
qidx += 1
else:
reconstructed.append(r)
return '\n'.join(reconstructed)
def save(self) -> Dict[str, Any]:
with self.markdown_file.open('w') as f:
f.write(self.generate_markdown())
with self.database_file.open('w') as f:
json.dump([x.serialize() for x in self.q],
f, ensure_ascii=False, indent=4)
class HistoryStat(SerializableObject):
total_problems: int = 0
total_failed_problems: int = 0
total_answering: int = 0
total_failed_answering: int = 0
score_distribution: Union[None, List[int]] = None
max_combo: int = 0
total_using_time: float = 0
def __init__(self, *args, **kws):
super().__init__(*args, **kws)
if self.score_distribution is None:
self.score_distribution = [0] * 6 # 0, 1, 2, 3, 4, 5
class StatManager:
def __init__(self, file: Path):
file.parent.mkdir(exist_ok=True, parents=True)
self.file = file
self.history_stat = HistoryStat()
self.load()
def load(self):
self.last_tick = time.time()
if self.file.exists():
with self.file.open() as f:
self.history_stat = HistoryStat.deserialize(json.load(f))
def save(self):
with self.file.open('w') as f:
self.history_stat.total_using_time += time.time() - self.last_tick
self.last_tick = time.time()
json.dump(self.history_stat.serialize(), f)
def __getattr__(self, key):
if (hs:=self.__dict__.get('history_stat', None)) is not None:
if key in hs.__dict__: return hs.__dict__[key]
return self.__dict__[key]
def __setattr__(self, key, value):
if (hs:=self.__dict__.get('history_stat', None)) is not None:
if key in hs.__dict__:
hs.__dict__[key] = value
self.__dict__[key] = value
def get_data(self):
return self.history_stat.serialize()
class Session:
def __init__(self, data_srcs: List[DataSource], audio_manager: AudioManager, speed_estimator: SpeedEstimator, match_manager: MatchManager, stat_manager: StatManager):
self.state = 'loaded'
self.data_srcs = data_srcs
self.questions: List[Question] = []
for src in data_srcs:
self.questions += src.get_questions()
self.active_questions: List[Question] = []
self._decrease_tick()
self.prob_idx = 0
self.current_round = []
self.next_round = []
self.first_round = True
self.current_prob: Question = None
self.failed_probs = []
self.audio_manager = audio_manager
self.speed_estimator = speed_estimator
self.cache_autoplay_audio()
self.score_func = match_manager.auto_score
self.total_error = 0
self.combo = 0
self.current_timing = None
self.show_all = False
self.stat_manager = stat_manager
def cache_all_audio(self, force_download=False):
for prob in tqdm(self.questions, desc='cacheing audio'):
self.audio_manager.get_audio(
title=prob.title, force_download=force_download, lang=prob.language)
def cache_autoplay_audio(self, force_download=False):
for prob in tqdm(self.questions, desc='cacheing audio'):
if prob.autoplay:
self.audio_manager.get_audio(
title=prob.title, force_download=force_download, lang=prob.language)
def _decrease_tick(self):
for q in self.questions:
q.memory_stat.decrease_tick()
if q.memory_stat.is_active():
self.active_questions.append(q)
def save(self):
for src in self.data_srcs:
src.save()
self.speed_estimator.save()
self.stat_manager.save()
def error_msg(self, reason=''):
return {'state': self.state, "result": 'fail', 'reason': reason}
def success_msg(self, data):
data = data or {}
data['state'] = self.state
data['result'] = 'success'
return data
def when(*state):
def wrapper(func):
def inner(self, *args, **kws):
if self.state not in state:
return self.error_msg('can only be called at {}'.format(','.join(state)))
msg = func(self, *args, **kws)
return self.success_msg(msg)
return inner
return wrapper
@when("loaded")
def start(self) -> Dict[str, bool]:
config = {
'showall': False,
'shuffle': True,
}
if len(self.active_questions) == 0:
config['fastforward'] = False
self.state = 'configuring'
return config
@when("configuring")
def set_config(self, config):
if 'fastforward' in config and config['fastforward']:
while not self.active_questions:
self._decrease_tick()
if config['showall']:
self.active_questions = self.questions
self.show_all = True
if config['shuffle']:
random.shuffle(self.active_questions)
self.state = 'ready'
self.current_round = self.active_questions
self.prob_idx = 0
@when("ready", "round-end")
def next_prob(self):
if self.prob_idx < len(self.current_round):
self.current_prob = self.current_round[self.prob_idx]
self.state = 'answering'
data = self.current_prob.serialize()
data['combo'] = self.combo
data['total_error'] = self.total_error
data['round_idx'] = self.prob_idx
data['round_total'] = len(self.current_round)
data['round_remain'] = len(self.current_round) - self.prob_idx
data['total_remain'] = len(
self.current_round) + len(self.next_round) - self.prob_idx
data['next_round'] = len(self.next_round)
return data
else:
self.current_round = self.next_round
random.shuffle(self.current_round)
self.next_round = []
self.first_round = False
if not self.current_round:
self.state = 'end'
else:
self.state = 'round-end'
self.prob_idx = 0
@when("answering")
def score(self, answer: str, timing: float):
if answer == '':
return {'score': 0, 'message': 'No input'}
estimate = self.speed_estimator.estimate(self.current_prob, answer)
speed_score = self.speed_estimator.speed_score(
self.current_prob, answer, timing)
self.current_answer = answer
self.current_timing = timing
score, msg = self.score_func(self.current_prob, answer, speed_score)
msg = f'Timing: {timing:.2f}, Esti. {estimate:.2f}\n' + msg
return {'score': score, 'message': msg}
@when("answering")
def answer(self, q):
if self.first_round:
self.stat_manager.total_problems += 1
if not self.show_all:
self.current_prob.memory_stat.add_stat(q)
if q <= 3:
self.stat_manager.total_failed_problems += 1
self.failed_probs.append(self.current_prob)
self.stat_manager.total_answering += 1
self.stat_manager.score_distribution[q] += 1
if q >= 4:
self.combo += 1
else:
self.combo = 0
self.stat_manager.max_combo = max(self.stat_manager.max_combo, self.combo)
if q <= 3:
self.stat_manager.total_failed_answering += 1
self.next_round.append(self.current_prob)
self.total_error += 1
if q >= 3 and self.current_timing is not None:
self.speed_estimator.add_data(
self.current_prob, self.current_answer, self.current_timing)
self.current_answer = None
self.current_timing = None
if q >= 2:
self.prob_idx += 1
self.state = 'ready'
return {'combo': self.combo}
@when("round-end")
def get_failed_last_round(self):
return {'failed_probs': [prob.serialize() for prob in self.current_round]}
@when("answering")
def modify_answer(self, answer):
self.current_prob.answer = answer
def get_failed(self):
return {'failed_probs': [prob.serialize() for prob in self.failed_probs]}
def get_state(self):
return {'state': self.state}
class Server:
def __init__(self, root_path, search_path=None):
self.root_path = Path(root_path)
self.search_path = Path(search_path) if search_path else self.root_path
self.files: List[Path] = list(self.search_path.glob('*.md'))
self.speed_estimator = SpeedEstimator(
self.root_path.joinpath('.mhelper', '.speed-estimator.json'))
self.audio_manager = AudioManager(
self.root_path.joinpath('.mhelper', '.audio'))
self.match_manager = MatchManager()
self.stat_manager = StatManager(self.root_path.joinpath('.mhelper', '.stat.json'))
def get_file_names(self):
return [x.stem for x in self.files]
def new_session(self, indices, presets=[], forces=[]):
srcs = []
for idx in indices:
md_file = self.files[idx]
db_file = md_file.parent.joinpath(
'.mhelper', md_file.stem + '.json')
data_src = DataSource(md_file, db_file)
data_src.set_config(presets=presets, forces=forces)
print(data_src.load())
data_src.add_message
data_src.save()
srcs.append(data_src)
return Session(
audio_manager=self.audio_manager,
data_srcs=srcs,
speed_estimator=self.speed_estimator,
match_manager=self.match_manager,
stat_manager=self.stat_manager
)
class ConsoleFrontend:
best_console_size = (70, 20)
logo = '[blue]' + pyfiglet.figlet_format("MemoryHelper") + '[/blue]'
hint_ver = r' [red]v3.0[/red]'
file_head = r'[green]---------------------------- 请选择文件 ----------------------------[/green]'
ques_head = r'[green]---------------------------- 问 题 ----------------------------[/green]'
anse_head = r'[green]---------------------------- 回 答 ----------------------------[/green]'
scor_head = r'[green]---------------------------- 得 分 ----------------------------[/green]'
roun_head = r'[green]---------------------------- 一轮结束了 ----------------------------[/green]'
resu_head = r'[green]---------------------------- 答 案 ----------------------------[/green]'
end_head = r'[green]---------------------------- 结 束 ----------------------------[/green]'
stat_head = r'[green]---------------------------- 统 计 ----------------------------[/green]'
err_input_int = r'[red]请输入一个整数[/red]'
err_input_set = r'[red]请输入 {intset} 中的一个数[/red]'
entry_quit = r'退出'
entry_select_all = r'[green]都来一遍[/green]'
entry_stat = r'统计'
entry_about = r'关于'
hint_sel_file = r'请选择文件(空格间隔多个文件):'
hint_retry = r'[blue]重试[/blue]'
hint_combos = {
10: '[blue]' + pyfiglet.figlet_format('Comb 10\n Good!') + '[/blue]',
20: '[yellow]' + pyfiglet.figlet_format(' Comb 20\nVery Good!') + '[/yellow]',
30: '[purple]' + pyfiglet.figlet_format('Comb 30\nPerfect!') + '[/purple]',
40: '[green]' + pyfiglet.figlet_format('Comb 40\n Excellent!') + '[/green]',
50: '[pink]' + pyfiglet.figlet_format(' Comb 50\n You Made It!') + '[/pink]',
100: '[red]' + pyfiglet.figlet_format(' Comb 100\n Unbelievable!') + '[/red]',
200: '[red][bold]' + pyfiglet.figlet_format(' Comb 200\n Superman!') + '[/bold][/red]',
}
hint_invisible = r'[yellow]-- invisible --[/yellow]'
hint_round = r'本轮: {round_idx}/{round_total} 下一轮: {next_round} 合计错误: {total_error} Combo: {combo}'
hint_modify = r'[yellow]请输入修改后的答案[/yellow]'
hint_esti_score = r'[purple]Score:[/purple] {score}'
hint_input_score = r'InputScore(0~5):'
hint_show_fails = r'是否查看上一轮错题[0/1]:'
hint_show_fails_answer = r'用Ctrl-D查看答案'
hint_round_end = r'休息一下,开始下一轮'
hint_force_exit = r'[red][bold]强制退出MemoryHelper[/bold][/red]'
hint_duration = r'[yellow]在{hour:02d}:{min:02d}内回答了{cnt}个问题[/yellow]'
hint_max_combo = r'[pink] Max Combo: {combo} [/pink]'
hint_loading = r'[green]读取中...[/green]'
config_dictation = r'听写选项([bold]0默认[/bold],1强制听写,2强制不听写):'
config_force_voice = r'声音选项([bold]0默认[/bold],1强制有声,2强制无声):'
config_showall = r'全部都来一遍([bold]0[/bold]/1): '
config_shuffle = r'打乱顺序([bold]1[/bold]/0)?'
config_fastforward = r'没有问题会出现强行要做([bold]0[/bold]/1)?'
stat_pattern = \
r'''滚过的总问题数:{total_problems}
失败的总问题数:{total_failed_problems}
总回答数:{total_answering}
失败回答数:{total_failed_answering}
历史最大combo:{max_combo}
使用时间:{total_using_time}
分数分布:{score_distribution}
'''
about_message = \
r'''
[purple]KEKE[/purple]的死记硬背辅助软件
最开始是设计来背政治课的,[blue]思修[bold]军理[/bold]史纲[bold]马原[/bold]离谱性[bold]递增[/bold][/blue]
后来加入了[bold]中文匹配[/bold]和[bold]文本到语音转换[/bold],用来听写单词了
[yellow]现在的版本可以支持通用的问答记忆[/yellow]
非常适合[green]打字远快于手写[/green]的程序猿朋友
项目主页: https://github.com/KEKE046/memory-helper
软件遵循 [blue]Apache 2.0[/blue] 协议开源,欢迎提Issue
但提的Issue可能被KEKE[grey]鸽掉[/grey],想要新功能可以自己先尝试写一写'''
def __init__(self, root_path: Path, search_path: Path):
self.root_path = Path(root_path)
self.search_path = Path(search_path) if search_path else self.root_path
self.server = Server(self.root_path, self.search_path)
self.state = 'welcome'
self.session = None
self.console = Console()
self.start_time = None
def get_int(self, msg, in_set=[], default=0, multiple=False):
while True:
try:
rprint(msg, end='')
data = input().strip()
if not data:
return default
if multiple:
data = [int(x) for x in re.split('\s+', data)]
if in_set and all([x in in_set for x in data]):
return data
else:
data = int(data)
if in_set and data in in_set:
return data
except KeyboardInterrupt:
rprint(self.hint_force_exit)
exit(0)
except:
rprint(self.err_input_int)
rprint(self.err_input_set.format(
intset=','.join(map(str, in_set))))
def statistics(self):
self.console.clear()
rprint(self.stat_head)
stat = self.server.stat_manager.get_data()
rprint(self.stat_pattern.format(**stat))
input()
self.state = 'welcome'
def about(self):
self.console.clear()
rprint(self.logo)
rprint(self.hint_ver)
rprint()
rprint(self.about_message)
input()
self.state = 'welcome'
def welcome(self):
self.console.clear()
rprint(self.logo)
rprint(self.hint_ver)
rprint(self.file_head)
files = self.server.get_file_names()
rprint(f'[00] {self.entry_quit}')
for i, f in enumerate(files):
rprint(f'[{i + 1:02d}] {f}')
idx_sel_all = len(files) + 1
idx_stat = len(files) + 2
idx_about = len(files) + 3
rprint(f'[{idx_sel_all:02d}] {self.entry_select_all}')
rprint(f'[{idx_stat:02d}] {self.entry_stat}')
rprint(f'[{idx_about:02d}] {self.entry_about}')
indices = self.get_int(self.hint_sel_file, range(0, idx_about + 1), default=[0], multiple=True)
if 0 in indices:
self.state = ''
elif idx_stat in indices:
self.state = 'statistics'
elif idx_about in indices:
self.state = 'about'
else:
if idx_sel_all in indices:
indices = range(len(files))
rprint(self.hint_loading)
dictation = self.get_int(self.config_dictation, [0, 1, 2])
forces = [[], ["dictation"], ["no-dictation"]][dictation]
if dictation != 1:
voice = self.get_int(self.config_force_voice, [0, 1, 2])
forces += [[], ["voice"], ["no-voice"]][voice]
self.session = self.server.new_session(
[x - 1 for x in indices], forces=forces)
self.state = 'rounding'
def start(self):
while self.state:
getattr(self, self.state)()
@staticmethod
def wrap_markdown(s):
s = re.sub(r'\*\*(.*?)\*\*',
lambda x: '[yellow]' + x.group(1) + '[/yellow]', s)
s = re.sub(
r'\*(.*?)\*', lambda x: '[blue]' + x.group(1) + '[/blue]', s)
return s
def get_long_input(self):
while True:
data = []
try:
start_time = time.time()
while True:
user_input = input()
if user_input == '':
break
data.append(user_input)
end_time = time.time()
data = '\n'.join(data)
timing = end_time - start_time
break
except EOFError:
rprint('\n' + self.hint_retry, end='')
input()
except KeyboardInterrupt:
rprint(self.hint_force_exit)
exit(0)
return data, timing
def show_combo(self, combo):
self.console.clear()
if combo in self.hint_combos:
rprint(self.hint_combos[combo])
input()
def print_question(self, q):
rprint(self.ques_head)
if q['invisible']:
rprint(self.hint_invisible)
else:
rprint(self.wrap_markdown(q['title']))
rprint('')
rprint(self.anse_head)
if q['autoplay']:
self.session.audio_manager.play_audio(q['title'])
answer, timing = self.get_long_input()
ret = self.session.score(answer, timing)
score = ret['score']
score_msg = ret['message']
rprint(self.scor_head)
rprint(self.wrap_markdown(score_msg))
rprint(self.resu_head)
if q['invisible']:
rprint(self.wrap_markdown(q['title']))
rprint(self.wrap_markdown(q['answer']))
rprint()
rprint(self.hint_round.format(**q))
rprint(self.hint_esti_score.format(score=score))
score = self.get_int(self.hint_input_score,
range(-1, 6), default=score)
if score == -1:
rprint(self.hint_modify)
answer, _ = self.get_long_input()
self.session.modify_answer(answer)
q['answer'] = answer
self.console.clear()
self.print_question(q)
else:
combo = self.session.answer(score)['combo']
self.max_combo = max(self.max_combo, combo)
self.show_combo(combo)
def print_round_end_msg(self):
rprint(self.roun_head)
if self.get_int(self.hint_show_fails, [0, 1]):
failed = self.session.get_failed_last_round()['failed_probs']
rprint(self.hint_show_fails_answer)
for prob in failed:
rprint(self.wrap_markdown(prob['title']))
try:
input()
except EOFError:
rprint(self.wrap_markdown(prob['answer']))
rprint()
rprint(self.hint_round_end)
input()
def print_end_msg(self):
rprint(self.end_head)
duration = self.end_time - self.start_time
rprint(self.hint_duration.format(hour=int(duration//60//60),
min=int(duration//60 % 60), cnt=int(self.total_answered)))
input()
keys = sorted(list(self.hint_combos.keys()))
if any([x <= self.max_combo for x in keys]):
rprint(self.hint_max_combo.format(combo=self.max_combo))
combo = max([x for x in keys if x <= self.max_combo])
rprint(self.hint_combos[combo])
input()
def rounding(self):
self.start_time = time.time()
self.total_answered = 0
self.max_combo = 0
config = self.session.start()
if 'showall' in config:
config['showall'] = bool(self.get_int(
self.config_showall, [0, 1], default=0))
if 'shuffle' in config:
config['shuffle'] = bool(self.get_int(
self.config_shuffle, [0, 1], default=1))
if not config['showall'] and 'fastforward' in config:
config['fastforward'] = bool(self.get_int(
self.config_fastforward, [0, 1], default=0))
self.session.set_config(config)
while True:
self.console.clear()
ret = self.session.next_prob()
if ret['state'] == 'end':
break
elif ret['state'] == 'answering':
self.print_question(ret)
elif ret['state'] == 'round-end':
self.print_round_end_msg()
self.session.save()
self.total_answered += 1
self.end_time = time.time()
self.print_end_msg()
self.state = 'welcome'
if __name__ == '__main__':
import sys
search = sys.argv[1] if len(sys.argv) > 1 else '.'
cli = ConsoleFrontend(Path(__file__).parent, search)
cli.start()
| [
"KEKE_046@pku.edu.cn"
] | KEKE_046@pku.edu.cn |
7b7636db25b9e2e083fd418062f950259431149f | 35244ce6da8ec7e86ab085c2ff17611a36d3bcd4 | /DrawCodes/MaskMakerPro.py | f79f797389960c946574a535f43b2d0b43dfd96e | [] | no_license | MRitter95/GraphCodes | c68a0e45585a22feaecb0b6481ef3cca2ed36539 | 6a561f41e908202362eba0c89964bf914ec9e712 | refs/heads/master | 2023-06-13T08:08:52.742532 | 2021-06-22T20:33:45 | 2021-06-22T20:33:45 | 302,158,298 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140,807 | py | # -*- coding: utf-8 -*-
"""
Created on Wednesday, Oct. 25 2017
MaskMakerPro. This provides a set of functions for drawing masks
@author: Mattias Fitzpatrick
"""
from . import sdxf
from math import floor
from . import sdxf
from math import sin,cos,pi,floor,asin,acos,tan,atan,sqrt
from .alphanum import alphanum_dict
from random import randrange
class MaskError:
"""MaskError is an exception to be raised whenever invalid parameters are used in one of the MaskMaker functions, value is just a string"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
#===============================================================================
# POINT-WISE OPERATIONS
#===============================================================================
def rotate_pt(p,angle,center=(0,0)):
"""rotates point p=(x,y) about point center (defaults to (0,0)) by CCW angle (in degrees)"""
dx=p[0]-center[0]
dy=p[1]-center[1]
theta=pi*angle/180
return (center[0]+dx*cos(theta)-dy * sin(theta),center[1]+dx * sin(theta)+dy * cos(theta))
def rotate_pts(points,angle,center=(0,0)):
"""Rotates an array of points one by one using rotate_pt"""
return [rotate_pt(p,angle,center) for p in points]
def translate_pt(p,offset):
"""Translates point p=(x,y) by offset=(x,y)"""
return (p[0]+offset[0],p[1]+offset[1])
def translate_pts(points,offset):
"""Translates an array of points one by one using translate_pt"""
return [translate_pt(p,offset) for p in points]
def orient_pt(p,angle,offset):
"""Orient_pt rotates point p=(x,y) by angle (in degrees) and then translates it to offset=(x,y)"""
return translate_pt(rotate_pt(p,angle),offset)
def orient_pts(points,angle,offset):
"""Orients an array of points one by one using orient_pt"""
return [orient_pt(p,angle,offset) for p in points]
def scale_pt(p,scale):
"""Scales p=(x,y) by scale"""
return (p[0]*scale[0],p[1]*scale[1])
def scale_pts(points,scale):
"""Scales an array of points one by one using scale_pt"""
return [scale_pt(p,scale) for p in points]
def mirror_pt(p, axis_angle,axis_pt):
"""Mirrors point p about a line at angle "axis_angle" intercepting point "axis_pt" """
theta=axis_angle*pi/180.
return (axis_pt[0] + (-axis_pt[0] + p[0])* cos(2 * theta ) + (-axis_pt[1] + p[1])*sin(2 *theta),
p[1] + 2 * (axis_pt[1] - p[1])* cos(theta)**2 + (-axis_pt[0] + p[0])* sin(2*theta) )
def mirror_pts(points,axis_angle,axis_pt):
"""Mirrors an array of points one by one using mirror_pt"""
return [mirror_pt(p,axis_angle,axis_pt) for p in points]
#===============================================================================
# MASK- and CHIP GENERATION
#===============================================================================
class WaferMask(sdxf.Drawing):
"""Mask class for placing chips on a wafer with a flat.
Contains functions which:
- layout the chips,
- add chips to the mask
- create a manifest of the mask.
- etchtype 'False' allows you to make a chip without the dicing borders for a positive mask
- etchtype 'True' is the standard version with dicing borders
"""
def __init__(self,name,diameter=50800.,flat_distance=24100.,wafer_padding=2000,chip_size=(7000.,2000.),dicing_border=200,textsize=(800,800),etchtype=True):
sdxf.Drawing.__init__(self)
self.name=name
self.fileName=name+".dxf"
self.diameter=diameter
self.flat_distance=flat_distance
self.textsize=textsize
self.border_width=200 #width of line used to align wafer edge
self.chip_size=chip_size
self.dicing_border=dicing_border
self.die_size=(chip_size[0]+dicing_border,chip_size[1]+dicing_border)
self.wafer_padding=wafer_padding
self.buffer=self.wafer_padding # + self.dicing_border/2
self.etchtype=etchtype
start_angle=270.+180./pi *acos(2.*flat_distance/diameter)
stop_angle=270.-180./pi* acos(2.*flat_distance/diameter)
iradius=(diameter-self.border_width)/2.
oradius=(diameter+self.border_width)/2.
starti=rotate_pt((iradius,0.),start_angle)
starto=rotate_pt((oradius,0.),start_angle)
stopi=rotate_pt((iradius,0.),stop_angle)
stopo=rotate_pt((oradius,0.),stop_angle)
#print "wafer info: iradis=%f, oradius=%f, start_angle=%f, stop_angle=%f" %(iradius,oradius,start_angle,stop_angle)
stop_angle+=360
opts=arc_pts(start_angle,stop_angle,oradius)
ipts=arc_pts(stop_angle,start_angle,iradius)
pts=opts
pts.append(opts[0])
pts.append(ipts[-1])
pts.extend(ipts)
pts.append(opts[0])
self.append(sdxf.PolyLine(pts))
#self.append(sdxf.Arc((0.,0.),iradius,start_angle,stop_angle))
#self.append(sdxf.Line( [ stopi,stopo]))
#self.append(sdxf.Arc((0.,0.),oradius,start_angle,stop_angle))
#self.append(sdxf.Line( [ starti,starto]))
#self.append(sdxf.PolyLine([stopi,starti,starto,stopo]))
self.chip_points=self.get_chip_points()
self.chip_slots=self.chip_points.__len__()
self.current_point=0
self.manifest=[]
self.num_chips=0
def randomize_layout(self):
"""Shuffle the order of the chip_points array so that chips will be inserted (pseudo-)randomly"""
seed=124279234
for ii in range(10000):
i1=randrange(self.chip_points.__len__())
i2=randrange(self.chip_points.__len__())
tp=self.chip_points[i1]
self.chip_points[i1]=self.chip_points[i2]
self.chip_points[i2]=tp
# def label_chip(self,chip,pt,maskid,chipid):
# """Labels chip on wafer at position pt where pt is the bottom left corner of chip"""
# AlphaNumText(self,maskid,chip.textsize,pt)
# AlphaNumText(self,chipid,chip.textsize,pt)
def add_chip(self,chip,copies):
"""Adds chip design 'copies' times into mask. chip must have a unique name as it will be inserted as a block"""
if self.etchtype:
ChipBorder(chip,self.dicing_border/2)
self.blocks.append(chip)
slots_remaining=self.chip_points.__len__()-self.current_point
for ii in range (copies):
if self.current_point>= self.chip_points.__len__():
raise MaskError("MaskError: Cannot add %d copies of chip '%s' Only %d slots on mask and %d remaining." % (copies,chip.name,self.chip_points.__len__(),slots_remaining))
p=self.chip_points[self.current_point]
self.current_point+=1
self.append(sdxf.Insert(chip.name,point=p))
chip.label_chip(self,maskid=self.name,chipid=chip.name+str(ii+1),offset=p)
self.num_chips+=1
self.manifest.append({'chip':chip,'name':chip.name,'copies':copies,'short_desc':chip.short_description(),'long_desc':chip.long_description()})
#print "%s\t%d\t%s" % (chip.name,copies,chip.short_description())
chip.save(fname=self.name+"-"+chip.name,maskid=self.name,chipid=chip.name)
def save_manifest(self,name=None):
if name is None: name=self.name
if name[-4:]!=".txt": name+="_manifest.txt"
f=open(name,'w')
f.write("Mask:\t%s\tTotal Chips:\t%d\n" % (self.name,self.current_point))
f.write("ID\tCopies\tShort Description\tChip Type\tChip Info\n")
for m in self.manifest:
f.write("%(name)s\t%(copies)d\t%(short_desc)s\n" %m )
for m in self.manifest:
f.write("______________________\n%(name)s\t%(copies)d\t%(long_desc)s\n\n" % m)
f.close()
def save_dxf(self,name=None):
if name is None: name=self.name
if name[-4:]!=".dxf": name+=".dxf"
#print name
f=open(name,'w')
f.write(str(self))
f.close()
def save(self,name=None):
#print "Saving mask"
self.save_dxf(name)
self.save_manifest(name)
def point_inside(self,pt):
"""True if point is on wafer"""
return (pt[0]**2+pt[1]**2<(self.diameter/2-self.buffer)**2) and (pt[1]>-self.flat_distance+self.buffer)
def die_inside(self,pt):
"""Tell if chip of size self.chip_size is completely on the wafer"""
return self.point_inside(pt) and self.point_inside(translate_pt(pt,(self.die_size[0],0))) and self.point_inside(translate_pt(pt,(self.die_size[0],self.die_size[1]))) and self.point_inside(translate_pt(pt,(0,self.die_size[1])))
def get_chip_points(self):
"""Get insertion points for all of the chips (layout wafer)"""
max_cols = int((self.diameter-2*self.buffer)/self.die_size[0])
max_rows = int((self.diameter-2*self.buffer)/self.die_size[1])
print("Maximum number of rows=%d and cols=%d" %(max_rows,max_cols))
#figure out offset for chips (centered on chip or between chips)
xoffset=-max_cols/2.*self.die_size[0]
yoffset=-max_rows/2.*self.die_size[1]
#if max_cols%2==1:
# print "offset X"
# xoffset+=self.chip_size[0]/2.
#if max_rows%2==1:
# yoffset+=self.chip_size[1]/2.
chip_points=[]
for ii in range(max_rows):
for jj in range(max_cols):
pt=(xoffset+jj*self.die_size[0],yoffset+ii*self.die_size[1])
if self.die_inside(pt):
chip_points.append(translate_pt(pt,(self.dicing_border/2,self.dicing_border/2)))
print("Room for %d chips on wafer." % chip_points.__len__())
return chip_points
class Chip(sdxf.Block):
"""Chip is a class which contains structures
Perhaps it will also be used to do some error checking
"""
def __init__(self,name,size=(7000.,2000.),mask_id_loc=(0,0),chip_id_loc=(0,0),textsize=(160,160)):
"""size is a tuple size=(xsize,ysize)"""
sdxf.Block.__init__(self,name)
self.size=size
self.mask_id_loc=mask_id_loc
self.chip_id_loc=chip_id_loc
# self.dicing_border=dicing_border
self.name=name
# self.maskid_struct=Structure(self,start=translate_pt(mask_id_loc,(dicing_border,dicing_border)),layer="id_text",color=3)
# self.chipid_struct=Structure(self,start=translate_pt(chip_id_loc,(dicing_border,dicing_border)),layer="id_text",color=3)
self.textsize=textsize
# if dicing_border>0:
# ChipBorder (self,border_thickness=dicing_border,layer="border",color=3)
# self.left_midpt=(dicing_border,(size[1]+2*dicing_border)/2)
# self.right_midpt=(size[0]+dicing_border,(size[1]+2*dicing_border)/2)
# self.top_midpt=((size[0]+2*dicing_border)/2,size[1]+dicing_border)
# self.bottom_midpt=((size[0]+2*dicing_border)/2,dicing_border)
self.left_midpt=(0,size[1]/2.)
self.right_midpt=(size[0],size[1]/2.)
self.top_midpt=(size[0]/2.,size[1])
self.bottom_midpt=(size[0]/2.,0)
self.midpt=(size[0]/2.,size[1]/2.)
self.bottomleft_corner=(0,0)
self.topleft_corner=(0,size[1])
self.topright_corner=(size[0],size[1])
self.bottomright_corner=(size[0],0)
def label_chip(self,drawing,maskid,chipid,offset=(0,0)):
"""Labels chip in drawing at locations given by mask_id_loc and chip_id_loc with an optional offset.
Note that the drawing can be a drawing or a Block including the chip itself"""
AlphaNumText(drawing,maskid,self.textsize,translate_pt(self.mask_id_loc,offset))
AlphaNumText(drawing,chipid,self.textsize,translate_pt(self.chip_id_loc,offset))
def save(self,fname=None,maskid=None,chipid=None):
"""Saves chip to .dxf, defaults naming file by the chip name, and will also label the chip, if a label is specified"""
if fname is None:
fname=self.name+'.dxf'
if fname[-4:]!='.dxf':
fname+='.dxf'
d=sdxf.Drawing()
d.blocks.append(self)
d.append(sdxf.Insert(self.name,point=(0,0)))
self.label_chip(d,maskid,chipid)
d.saveas(fname)
class Structure:
"""Structure keeps track of current location and direction,
defaults is a dictionary with default values that substructures can call
"""
def __init__(self,chip,start=(0,0),direction=0,layer="structures",color=1, defaults={}):
self.chip=chip
self.start=start
self.last=start
self.last_direction=direction
self.layer=layer
self.color=color
self.defaults=defaults.copy()
self.structures=[]
def append(self,shape):
"""gives a more convenient reference to the chips.append method"""
self.chip.append(shape)
#===============================================================================
# CPW COMPONENTS
#===============================================================================
class CPWStraight:
"""A straight section of CPW transmission line"""
def __init__(self, structure,length,pinw=None,gapw=None):
""" Adds a straight section of CPW transmission line of length = length to the structure"""
if length==0: return
s=structure
start=structure.last
if pinw is None: pinw=structure.defaults['pinw']
if gapw is None: gapw=structure.defaults['gapw']
gap1=[ (start[0],start[1]+pinw/2),
(start[0]+length,start[1]+pinw/2),
(start[0]+length,start[1]+pinw/2+gapw),
(start[0],start[1]+pinw/2+gapw),
(start[0],start[1]+pinw/2)
]
gap2=[ (start[0],start[1]-pinw/2),
(start[0]+length,start[1]-pinw/2),
(start[0]+length,start[1]-pinw/2-gapw),
(start[0],start[1]-pinw/2-gapw),
(start[0],start[1]-pinw/2)
]
gap1=rotate_pts(gap1,s.last_direction,start)
gap2=rotate_pts(gap2,s.last_direction,start)
stop=rotate_pt((start[0]+length,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(gap1))
s.append(sdxf.PolyLine(gap2))
class CPWStraight_Bridges_Layer1:
"A straight section of CPW transmission line, that has markers on either side for making bridges"
def __init__(self, structure,length,br_base,br_width,pinw=None,gapw=None):
" Adds a straight section of CPW transmission line of length = length to the structure"
if length==0: return
s=structure
start=structure.last
if pinw is None: pinw=structure.defaults['pinw']
if gapw is None: gapw=structure.defaults['gapw']
"This shifts the edge of the bridge from the edge of the ground plane"
br_shift = 10.
gap1=[ (start[0],start[1]+pinw/2),
(start[0]+length,start[1]+pinw/2),
(start[0]+length,start[1]+pinw/2+gapw),
(start[0],start[1]+pinw/2+gapw),
(start[0],start[1]+pinw/2)
]
gap2=[ (start[0],start[1]-pinw/2),
(start[0]+length,start[1]-pinw/2),
(start[0]+length,start[1]-pinw/2-gapw),
(start[0],start[1]-pinw/2-gapw),
(start[0],start[1]-pinw/2)
]
if length < 5*br_width:
raise MaskError("Consider fewer bridges!!")
"The commented code makes holes on either side of the resonators"
# br_11=[ (start[0] + length/4 - br_width/2., start[1] - pinw/2 - gapw - br_shift - br_base),
# (start[0] + length/4 + br_width/2., start[1] - pinw/2 - gapw - br_shift - br_base),
# (start[0] + length/4 + br_width/2., start[1] - pinw/2 - gapw - br_shift),
# (start[0] + length/4 - br_width/2., start[1] - pinw/2 - gapw -br_shift),
# (start[0] + length/4 - br_width/2., start[1] - pinw/2 -gapw - br_shift - br_base)
# ]
# br_12=[ (start[0] + length/4 - br_width/2., start[1] + pinw/2 + gapw + br_shift + br_base),
# (start[0] + length/4 + br_width/2., start[1] + pinw/2 + gapw + br_shift + br_base),
# (start[0] + length/4 + br_width/2., start[1] + pinw/2 + gapw + br_shift),
# (start[0] + length/4 - br_width/2., start[1] + pinw/2 + gapw + br_shift),
# (start[0] + length/4 - br_width/2., start[1] + pinw/2 + gapw + br_shift + br_base)
# ]
#
# br_21=[ (start[0] + 3*length/4 - br_width/2., start[1] - pinw/2 - gapw - br_shift - br_base),
# (start[0] + 3*length/4 + br_width/2., start[1] - pinw/2 - gapw - br_shift - br_base),
# (start[0] + 3*length/4 + br_width/2., start[1] - pinw/2 - gapw - br_shift),
# (start[0] + 3*length/4 - br_width/2., start[1] - pinw/2 - gapw -br_shift),
# (start[0] + 3*length/4 - br_width/2., start[1] - pinw/2 -gapw - br_shift - br_base)
# ]
# br_22=[ (start[0] + 3*length/4 - br_width/2., start[1] + pinw/2 + gapw + br_shift + br_base),
# (start[0] + 3*length/4 + br_width/2., start[1] + pinw/2 + gapw + br_shift + br_base),
# (start[0] + 3*length/4 + br_width/2., start[1] + pinw/2 + gapw + br_shift),
# (start[0] + 3*length/4 - br_width/2., start[1] + pinw/2 + gapw + br_shift),
# (start[0] + 3*length/4 - br_width/2., start[1] + pinw/2 + gapw + br_shift + br_base)
# ]
brTop_11=[ (start[0] + length/4. - br_width/2., start[1] - pinw/2. - gapw - br_shift),
(start[0] + length/4 + br_width/2., start[1] - pinw/2. - gapw - br_shift),
(start[0] + length/4 + br_width/2., start[1] + pinw/2. + gapw + br_shift),
(start[0] + length/4 - br_width/2., start[1] + pinw/2. + gapw + br_shift),
(start[0] + length/4. - br_width/2., start[1] - pinw/2. - gapw - br_shift)
]
brTop_12=[ (start[0] + length/4. - br_width/2., start[1] - pinw/2. - gapw - br_shift - br_base),
(start[0] + length/4 + br_width/2., start[1] - pinw/2. - gapw - br_shift - br_base),
(start[0] + length/4 + br_width/2., start[1] + pinw/2. + gapw + br_shift + br_base),
(start[0] + length/4 - br_width/2., start[1] + pinw/2. + gapw + br_shift + br_base),
(start[0] + length/4. - br_width/2., start[1] - pinw/2. - gapw - br_shift - br_base)
]
brTop_21=[ (start[0] + 3*length/4. - br_width/2., start[1] - pinw/2. - gapw - br_shift),
(start[0] + 3*length/4 + br_width/2., start[1] - pinw/2. - gapw - br_shift),
(start[0] + 3*length/4 + br_width/2., start[1] + pinw/2. + gapw + br_shift),
(start[0] + 3*length/4 - br_width/2., start[1] + pinw/2. + gapw + br_shift),
(start[0] + 3*length/4. - br_width/2., start[1] - pinw/2. - gapw - br_shift)
]
brTop_22=[ (start[0] + 3*length/4. - br_width/2., start[1] - pinw/2. - gapw - br_shift - br_base),
(start[0] + 3*length/4 + br_width/2., start[1] - pinw/2. - gapw - br_shift - br_base),
(start[0] + 3*length/4 + br_width/2., start[1] + pinw/2. + gapw + br_shift + br_base),
(start[0] + 3*length/4 - br_width/2., start[1] + pinw/2. + gapw + br_shift + br_base),
(start[0] + 3*length/4. - br_width/2., start[1] - pinw/2. - gapw - br_shift - br_base)
]
brTop_11=rotate_pts(brTop_11,s.last_direction,start)
brTop_21=rotate_pts(brTop_21,s.last_direction,start)
brTop_12=rotate_pts(brTop_12,s.last_direction,start)
brTop_22=rotate_pts(brTop_22,s.last_direction,start)
gap1=rotate_pts(gap1,s.last_direction,start)
gap2=rotate_pts(gap2,s.last_direction,start)
stop=rotate_pt((start[0]+length,start[1]),s.last_direction,start)
s.last=stop
# s.layers.append(sdxf.Layer(name="BridgeLayer1"))
s.append(sdxf.PolyLine(gap1))
s.append(sdxf.PolyLine(gap2))
s.append(sdxf.PolyLine(brTop_11,layer="BridgeLayer1"))
s.append(sdxf.PolyLine(brTop_21,layer="BridgeLayer1"))
s.append(sdxf.PolyLine(brTop_12,layer="BridgeLayer2"))
s.append(sdxf.PolyLine(brTop_22,layer="BridgeLayer2"))
class CPWQubitNotch:
"A version of CPWStraight that cuts out a notch for a qubit"
def __init__(self,structure,notch_width,notch_height,pinw=None,gapw=None):
"""
Parameters
length= total length of section of CPW
notch_height = height of the qubit notch
notch_width = width of the qubit notch
"""
if notch_width == 0: return
s=structure
start=s.last
if pinw is None: pinw=structure.defaults['pinw']
if gapw is None: gapw=structure.defaults['gapw']
align_shift = 20.
align_width = 10.
# gap1=[ (start[0],start[1]+pinw/2),
# (start[0],start[1]+pinw/2+gapw),
# (start[0]+notch_width,start[1]+pinw/2+gapw),
# (start[0]+notch_width,start[1]+pinw/2+gapw+notch_height),
# (start[0]+2*notch_width,start[1]+pinw/2+gapw+notch_height),
# (start[0]+2*notch_width,start[1]+pinw/2+gapw),
# (start[0]+3*notch_width,start[1]+pinw/2+gapw),
# (start[0]+3*notch_width,start[1]+pinw/2),
# (start[0],start[1]+pinw/2)
# ]
gap1=[ (start[0],start[1]+pinw/2),
(start[0],start[1]+pinw/2+notch_height),
(start[0]+notch_width,start[1]+pinw/2+notch_height),
(start[0]+notch_width,start[1]+pinw/2),
(start[0],start[1]+pinw/2)
]
gap2=[ (start[0],start[1]-pinw/2),
(start[0]+notch_width,start[1]-pinw/2),
(start[0]+notch_width,start[1]-pinw/2-gapw),
(start[0],start[1]-pinw/2-gapw),
(start[0],start[1]-pinw/2)
]
"Qbit alignment marker"
alignment_marker1=[ (start[0]- align_shift,start[1] + align_shift + notch_height),
(start[0] - align_shift - align_width, start[1] + align_shift+ notch_height),
(start[0] - align_shift - align_width,start[1] + align_shift + align_width+ notch_height),
(start[0] - align_shift - 2*align_width,start[1] + align_shift + align_width+ notch_height),
(start[0] - align_shift - 2*align_width,start[1] + align_shift + 2*align_width+ notch_height),
(start[0] - align_shift - align_width,start[1] + align_shift + 2*align_width+ notch_height),
(start[0] - align_shift - align_width,start[1] + align_shift + 3*align_width+ notch_height),
(start[0] - align_shift,start[1] + align_shift + 3*align_width+ notch_height),
(start[0] - align_shift,start[1] + align_shift + 2*align_width+ notch_height),
(start[0] - align_shift + align_width,start[1] + align_shift + 2*align_width+ notch_height),
(start[0] - align_shift + align_width,start[1] + align_shift + align_width+ notch_height),
(start[0] - align_shift,start[1] + align_shift + align_width+ notch_height),
(start[0] - align_shift,start[1] + align_shift+ notch_height)
]
"Qbit alignment marker"
alignment_marker2=[ (start[0]+ align_shift + notch_width,start[1] + align_shift + notch_height),
(start[0] + align_shift + align_width+ notch_width, start[1] + align_shift+ notch_height),
(start[0] + align_shift + align_width+ notch_width,start[1] + align_shift + align_width+ notch_height),
(start[0] + align_shift + 2*align_width+ notch_width,start[1] + align_shift + align_width+ notch_height),
(start[0] + align_shift + 2*align_width+ notch_width,start[1] + align_shift + 2*align_width+ notch_height),
(start[0] + align_shift + align_width+ notch_width,start[1] + align_shift + 2*align_width+ notch_height),
(start[0] + align_shift + align_width+ notch_width,start[1] + align_shift + 3*align_width+ notch_height),
(start[0] + align_shift+ notch_width,start[1] + align_shift + 3*align_width+ notch_height),
(start[0] + align_shift+ notch_width,start[1] + align_shift + 2*align_width+ notch_height),
(start[0] + align_shift - align_width+ notch_width,start[1] + align_shift + 2*align_width+ notch_height),
(start[0] + align_shift - align_width+ notch_width,start[1] + align_shift + align_width+ notch_height),
(start[0] + align_shift+ notch_width,start[1] + align_shift + align_width+ notch_height),
(start[0] + align_shift+ notch_width,start[1] + align_shift+ notch_height)
]
gap1=rotate_pts(gap1,s.last_direction,start)
gap2=rotate_pts(gap2,s.last_direction,start)
alignment_marker1=rotate_pts(alignment_marker1,s.last_direction,start)
alignment_marker2=rotate_pts(alignment_marker2,s.last_direction,start)
stop=rotate_pt((start[0]+notch_width,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(gap1))
s.append(sdxf.PolyLine(gap2))
s.append(sdxf.PolyLine(alignment_marker1))
s.append(sdxf.PolyLine(alignment_marker2))
class CPWQubitNotch_inverted:
"A version of CPWStraight that cuts out a notch for a qubit"
def __init__(self,structure,notch_width,notch_height,pinw=None,gapw=None):
"""
Parameters
length= total length of section of CPW
notch_height = height of the qubit notch
notch_width = width of the qubit notch
"""
if notch_width == 0: return
s=structure
start=s.last
if pinw is None: pinw=structure.defaults['pinw']
if gapw is None: gapw=structure.defaults['gapw']
align_shift = 20.
align_width = 10.
gap1=[ (start[0],start[1]-pinw/2),
(start[0],start[1]-pinw/2-notch_height),
(start[0]+notch_width,start[1]-pinw/2-notch_height),
(start[0]+notch_width,start[1]-pinw/2),
(start[0],start[1]-pinw/2)
]
gap2=[ (start[0],start[1]+pinw/2),
(start[0]+notch_width,start[1]+pinw/2),
(start[0]+notch_width,start[1]+pinw/2+gapw),
(start[0],start[1]+pinw/2+gapw),
(start[0],start[1]+pinw/2)
]
"Qbit alignment marker"
alignment_marker1=[ (start[0]- align_shift,start[1] - align_shift - notch_height),
(start[0] - align_shift - align_width, start[1] - align_shift- notch_height),
(start[0] - align_shift - align_width,start[1] - align_shift - align_width- notch_height),
(start[0] - align_shift - 2*align_width,start[1] - align_shift - align_width- notch_height),
(start[0] - align_shift - 2*align_width,start[1] - align_shift - 2*align_width- notch_height),
(start[0] - align_shift - align_width,start[1] - align_shift - 2*align_width- notch_height),
(start[0] - align_shift - align_width,start[1] - align_shift - 3*align_width- notch_height),
(start[0] - align_shift,start[1] - align_shift - 3*align_width - notch_height),
(start[0] - align_shift,start[1] - align_shift - 2*align_width - notch_height),
(start[0] - align_shift + align_width,start[1] - align_shift - 2*align_width- notch_height),
(start[0] - align_shift + align_width,start[1] - align_shift - align_width- notch_height),
(start[0] - align_shift,start[1] - align_shift - align_width - notch_height),
(start[0] - align_shift,start[1] - align_shift - notch_height)
]
"Qbit alignment marker"
alignment_marker2=[ (start[0]+ align_shift + notch_width,start[1] - align_shift - notch_height),
(start[0] + align_shift + align_width+ notch_width, start[1] - align_shift- notch_height),
(start[0] + align_shift + align_width+ notch_width,start[1] - align_shift - align_width - notch_height),
(start[0] + align_shift + 2*align_width+ notch_width,start[1] - align_shift - align_width - notch_height),
(start[0] + align_shift + 2*align_width+ notch_width,start[1] - align_shift - 2*align_width - notch_height),
(start[0] + align_shift + align_width+ notch_width,start[1] - align_shift - 2*align_width - notch_height),
(start[0] + align_shift + align_width+ notch_width,start[1] - align_shift - 3*align_width - notch_height),
(start[0] + align_shift+ notch_width,start[1] - align_shift - 3*align_width - notch_height),
(start[0] + align_shift+ notch_width,start[1] - align_shift - 2*align_width - notch_height),
(start[0] + align_shift - align_width+ notch_width,start[1] - align_shift - 2*align_width - notch_height),
(start[0] + align_shift - align_width+ notch_width,start[1] - align_shift - align_width - notch_height),
(start[0] + align_shift+ notch_width,start[1] - align_shift - align_width - notch_height),
(start[0] + align_shift+ notch_width,start[1] - align_shift - notch_height)
]
gap1=rotate_pts(gap1,s.last_direction,start)
gap2=rotate_pts(gap2,s.last_direction,start)
alignment_marker1=rotate_pts(alignment_marker1,s.last_direction,start)
alignment_marker2=rotate_pts(alignment_marker2,s.last_direction,start)
stop=rotate_pt((start[0]+notch_width,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(gap1))
s.append(sdxf.PolyLine(gap2))
s.append(sdxf.PolyLine(alignment_marker1))
s.append(sdxf.PolyLine(alignment_marker2))
class CPWLinearTaper:
"""A section of CPW which (linearly) tapers from one set of start_pinw and start_gapw to stop_pinw and stop_gapw over length=length"""
def __init__(self, structure,length,start_pinw,stop_pinw,start_gapw,stop_gapw):
if length==0: return
#load attributes
s=structure
start=s.last
#define geometry of gaps
gap1= [
(start[0],start[1]+start_pinw/2),
(start[0]+length,start[1]+stop_pinw/2),
(start[0]+length,start[1]+stop_pinw/2+stop_gapw),
(start[0],start[1]+start_pinw/2+start_gapw),
(start[0],start[1]+start_pinw/2)
]
gap2= [
(start[0],start[1]-start_pinw/2),
(start[0]+length,start[1]-stop_pinw/2),
(start[0]+length,start[1]-stop_pinw/2-stop_gapw),
(start[0],start[1]-start_pinw/2-start_gapw),
(start[0],start[1]-start_pinw/2)
]
#rotate structure to proper orientation
gap1=rotate_pts(gap1,s.last_direction,start)
gap2=rotate_pts(gap2,s.last_direction,start)
#create polylines and append to drawing
s.append(sdxf.PolyLine(gap1))
s.append(sdxf.PolyLine(gap2))
#update last anchor position
stop=rotate_pt((start[0]+length,start[1]),s.last_direction,start)
s.last=stop
#----------------------------------------------------------------------------------------------------
class Inner_end_cap:
def __init__(self,structure,cap_length,cap_gap,start_pinw,stop_pinw,start_gapw,stop_gapw,capBuffer):
if cap_length==0: return
"""
Class that draws a singlehexagonal endcap for one part of the 3way coupling capacitor
variables:
cap_length= linear length of end cap
cap_gap= width of capacitive gap between end caps
start_pinw= beginning width of end cap
stop_pinw= width of end cap before taper
"""
"""
The issue with this code is that for small cap_gap, the space between the capacitor and ground plane isn't big enough.
"""
"Load attributes"
s=structure
start=s.last
cap_length = cap_length - cap_gap/2
start_taperX= cap_length-(stop_pinw/2)/tan(60*pi/180) # X-pos of where the centerpin taper starts
start_taperY=((cap_length-start_taperX)+cap_gap/2)*tan(60*pi/180)
"Intersection point calculations"
x_intersect=(start_pinw/2 + start_gapw - sqrt(3)*(cap_length+cap_gap/2))/(-sqrt(3)-(stop_gapw/start_taperX))
y_intersect=-sqrt(3)*(x_intersect-(cap_length+cap_gap/2))
"draw points that form the end cap geometry"
EndCap=[
(start[0],start[1]+start_pinw/2),
(start[0],start[1]+start_pinw/2+start_gapw),
(start[0]+x_intersect,start[1]+y_intersect),
(start[0]+cap_length+cap_gap/2,start[1]),
(start[0]+x_intersect,start[1]-y_intersect),
(start[0],start[1]-start_pinw/2-start_gapw),
(start[0],start[1]-start_pinw/2),
(start[0]+start_taperX,start[1]-stop_pinw/2),
(start[0]+cap_length,start[1]),
(start[0]+start_taperX,start[1]+stop_pinw/2),
(start[0],start[1]+start_pinw/2)
]
"rotate structure to proper orientation"
EndCap=rotate_pts(EndCap,s.last_direction,start)
"create polylines and append to drawing /connect the dots"
s.append(sdxf.PolyLine(EndCap))
"update last anchor position"
stop=rotate_pt((start[0]+cap_length+cap_gap/2,start[1]),s.last_direction,start)
s.last=stop
class Inner_end_cap_buffer:
def __init__(self,structure,cap_length,cap_gap,start_pinw,stop_pinw,start_gapw,stop_gapw,capBuffer,bufferDistance):
if cap_length==0: return
"""
Class that draws a singlehexagonal endcap for one part of the 3way coupling capacitor
variables:
cap_length= linear length of end cap
cap_gap= width of capacitive gap between end caps
start_pinw= beginning width of end cap
stop_pinw= width of end cap before taper
"""
"""
The issue with this code is that for small cap_gap, the space between the capacitor and ground plane isn't big enough.
"""
"Load attributes"
s=structure
start=s.last
cap_length = cap_length - cap_gap/2
start_taperX= cap_length-(stop_pinw/2)/tan(60*pi/180) # X-pos of where the centerpin taper starts
start_taperY=((cap_length-start_taperX)+cap_gap/2)*tan(60*pi/180)
"Intersection point calculations"
x_intersect=(start_pinw/2 + start_gapw - sqrt(3)*(cap_length+cap_gap/2))/(-sqrt(3)-(stop_gapw/start_taperX))
y_intersect=-sqrt(3)*(x_intersect-(cap_length+cap_gap/2))
"draw points that form the end cap geometry"
EndCap=[
(start[0]+bufferDistance,start[1]+start_pinw/2),
(start[0],start[1]+start_pinw/2),
(start[0],start[1]+start_pinw/2+start_gapw),
(start[0]+x_intersect,start[1]+y_intersect),
(start[0]+cap_length+cap_gap/2,start[1]),
(start[0]+x_intersect,start[1]-y_intersect),
(start[0],start[1]-start_pinw/2-start_gapw),
(start[0],start[1]-start_pinw/2),
(start[0]+bufferDistance,start[1]-start_pinw/2),
(start[0]+start_taperX,start[1]-stop_pinw/2),
(start[0]+cap_length,start[1]),
(start[0]+start_taperX,start[1]+stop_pinw/2),
(start[0]+bufferDistance,start[1]+start_pinw/2),
]
"rotate structure to proper orientation"
EndCap=rotate_pts(EndCap,s.last_direction,start)
"create polylines and append to drawing /connect the dots"
s.append(sdxf.PolyLine(EndCap))
"update last anchor position"
stop=rotate_pt((start[0]+cap_length+cap_gap/2,start[1]),s.last_direction,start)
s.last=stop
class Inner_end_cap_bondpad:
def __init__(self,structure,cap_length,cap_gap,start_pinw,stop_pinw,start_gapw,stop_gapw,capBuffer,start_pinwLinear,start_gapwLinear):
if cap_length==0: return
"""
Class that draws a singlehexagonal endcap for one part of the 3way coupling capacitor
variables:
cap_length= linear length of end cap
cap_gap= width of capacitive gap between end caps
start_pinw= beginning width of end cap
stop_pinw= width of end cap before taper
"""
"""
The issue with this code is that for small cap_gap, the space between the capacitor and ground plane isn't big enough.
"""
"Load attributes"
s=structure
start=s.last
cap_length = cap_length - cap_gap/2
start_taperX= cap_length-(stop_pinw/2)/tan(60*pi/180) # X-pos of where the centerpin taper starts
start_taperY=((cap_length-start_taperX)+cap_gap/2)*tan(60*pi/180)
"Intersection point calculations"
x_intersect=(start_pinw/2 + start_gapw - sqrt(3)*(cap_length+cap_gap/2))/(-sqrt(3)-(stop_gapw/start_taperX))
y_intersect=-sqrt(3)*(x_intersect-(cap_length+cap_gap/2))
"draw points that form the end cap geometry"
EndCap=[
(start[0],start[1]+start_pinwLinear/2),
(start[0],start[1]+start_pinwLinear/2+start_gapwLinear),
(start[0]+x_intersect,start[1]+y_intersect),
(start[0]+cap_length+cap_gap/2,start[1]),
(start[0]+x_intersect,start[1]-y_intersect),
(start[0],start[1]-start_pinwLinear/2-start_gapwLinear),
(start[0],start[1]-start_pinwLinear/2),
(start[0]+start_taperX,start[1]-stop_pinw/2),
(start[0]+cap_length,start[1]),
(start[0]+start_taperX,start[1]+stop_pinw/2),
(start[0],start[1]+start_pinwLinear/2)
]
"rotate structure to proper orientation"
EndCap=rotate_pts(EndCap,s.last_direction,start)
"create polylines and append to drawing /connect the dots"
s.append(sdxf.PolyLine(EndCap))
"update last anchor position"
stop=rotate_pt((start[0]+cap_length+cap_gap/2,start[1]),s.last_direction,start)
s.last=stop
class Inner_end_cap_bondpad_buffer:
def __init__(self,structure,cap_length,cap_gap,start_pinw,stop_pinw,start_gapw,stop_gapw,capBuffer,start_pinwLinear,start_gapwLinear,bufferDistance):
if cap_length==0: return
"""
Class that draws a singlehexagonal endcap for one part of the 3way coupling capacitor
variables:
cap_length= linear length of end cap
cap_gap= width of capacitive gap between end caps
start_pinw= beginning width of end cap
stop_pinw= width of end cap before taper
"""
"""
The issue with this code is that for small cap_gap, the space between the capacitor and ground plane isn't big enough.
"""
"Load attributes"
s=structure
start=s.last
cap_length = cap_length - cap_gap/2
start_taperX= cap_length-(stop_pinw/2)/tan(60*pi/180) # X-pos of where the centerpin taper starts
start_taperY=((cap_length-start_taperX)+cap_gap/2)*tan(60*pi/180)
"Intersection point calculations"
x_intersect=(start_pinw/2 + start_gapw - sqrt(3)*(cap_length+cap_gap/2))/(-sqrt(3)-(stop_gapw/start_taperX))
y_intersect=-sqrt(3)*(x_intersect-(cap_length+cap_gap/2))
"draw points that form the end cap geometry"
EndCap=[
(start[0]+bufferDistance,start[1]+start_pinwLinear/2),
(start[0],start[1]+start_pinwLinear/2),
(start[0],start[1]+start_pinwLinear/2+start_gapwLinear),
(start[0]+x_intersect,start[1]+y_intersect),
(start[0]+cap_length+cap_gap/2,start[1]),
(start[0]+x_intersect,start[1]-y_intersect),
(start[0],start[1]-start_pinwLinear/2-start_gapwLinear),
(start[0],start[1]-start_pinwLinear/2),
(start[0]+bufferDistance,start[1]-start_pinwLinear/2),
(start[0]+start_taperX,start[1]-stop_pinw/2),
(start[0]+cap_length,start[1]),
(start[0]+start_taperX,start[1]+stop_pinw/2),
(start[0]+bufferDistance,start[1]+start_pinwLinear/2)
]
"rotate structure to proper orientation"
EndCap=rotate_pts(EndCap,s.last_direction,start)
"create polylines and append to drawing /connect the dots"
s.append(sdxf.PolyLine(EndCap))
"update last anchor position"
stop=rotate_pt((start[0]+cap_length+cap_gap/2,start[1]),s.last_direction,start)
s.last=stop
class Outer_Pacman_cap:
def __init__(self,structure,cap_length,cap_gap,start_pinw,stop_pinw,start_gapw,stop_gapw,cap_gap_ext=0):
if cap_length==0: return
"""
Draws a pacman shaped capacitor that fits to the end of the
variables:
cap_length= linear length of end cap
cap_gap= width of capacitive gap b/n end caps
start_pinw= beginning width of end cap
stop_pinw= width of end cap before taper
"""
"""
The issue with this code is that for small cap_gap, the space between the capacitor and ground plane isn't big enough.
"""
"Load attributes"
s=structure
start=s.last
cap_length = cap_length - cap_gap/2
start_taperX= cap_length-(stop_pinw/2)/tan(60*pi/180) # X-pos of where the centerpin taper starts
start_taperY=((cap_length-start_taperX)+cap_gap/2)*tan(60*pi/180)
"Intersection point calculations"
x_intersect=(start_pinw/2 + start_gapw - sqrt(3)*(cap_length+cap_gap/2))/(-sqrt(3)-(stop_gapw/start_taperX))
y_intersect=-sqrt(3)*(x_intersect-(cap_length+cap_gap/2))
"draw points that form the end cap geometry"
EndCap=[
(start[0], start[1]+start_pinw/2),
(start[0] + cap_length-(stop_pinw/2.)/sqrt(3), start[1]+stop_pinw/2),
(start[0] + cap_length-(stop_pinw/2.)/sqrt(3)+2.*(stop_pinw/2.)/sqrt(3.), start[1] + stop_pinw/2),
(start[0] + cap_length, start[1]),
(start[0] + cap_length-(stop_pinw/2.)/sqrt(3)+2.*(stop_pinw/2.)/sqrt(3.), start[1]-stop_pinw/2),
(start[0] + cap_length-(stop_pinw/2.)/sqrt(3), start[1]-stop_pinw/2),
(start[0] , start[1] - start_pinw/2),
(start[0], start[1]-start_pinw/2-start_gapw),
(start[0] + x_intersect, start[1] - y_intersect),
(start[0] + cap_length + cap_gap/2 + y_intersect/sqrt(3) + cap_gap_ext, start[1] - y_intersect),
(start[0] + cap_length + cap_gap/2 + cap_gap_ext,start[1]),
(start[0] + cap_length + cap_gap/2 + y_intersect/sqrt(3) + cap_gap_ext, start[1] + y_intersect),
(start[0] + x_intersect, start[1] + y_intersect),
(start[0],start[1]+start_pinw/2 + start_gapw),
(start[0], start[1] + start_pinw/2)
]
"rotate structure to proper orientation"
EndCap=rotate_pts(EndCap,s.last_direction,start)
"create polylines and append to drawing /connect the dots"
s.append(sdxf.PolyLine(EndCap))
"update last anchor position"
stop=rotate_pt((start[0] + cap_length + cap_gap/2 + cap_gap_ext,start[1]),s.last_direction,start)
s.last=stop
class CPWBend:
"""A CPW bend"""
def __init__(self,structure,turn_angle,pinw=None,gapw=None,radius=None,polyarc=True,segments=60):
"""creates a CPW bend with pinw/gapw/radius
@param turn_angle: turn_angle is in degrees, positive is CCW, negative is CW
"""
#load default values if necessary
if turn_angle==0: return
s=structure
# print('radius',radius)
if radius is None: radius=s.defaults['radius']
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
self.structure=structure
self.turn_angle=turn_angle
self.pinw=pinw
self.gapw=gapw
self.radius=radius
self.segments=segments
self.start=s.last
self.start_angle=s.last_direction
self.stop_angle=self.start_angle+self.turn_angle
if turn_angle>0: self.asign=1
else: self.asign=-1
#DXF uses the angle of the radial vector for its start and stop angles
#so we have to rotate our angles by 90 degrees to get them right
#also it only knows about arcs with CCW sense to them, so we have to rotate our angles appropriately
self.astart_angle=self.start_angle-self.asign*90
self.astop_angle=self.stop_angle-self.asign*90
#calculate location of Arc center
self.center=rotate_pt( (self.start[0],self.start[1]+self.asign*self.radius),self.start_angle,self.start)
if polyarc: self.poly_arc_bend()
else: self.arc_bend()
self.structure.last=rotate_pt(self.start,self.stop_angle-self.start_angle,self.center)
self.structure.last_direction=self.stop_angle
def arc_bend(self):
#print "start: %d, stop: %d" % (start_angle,stop_angle)
if self.turn_angle>0:
self.astart_angle=self.start_angle-90
self.astop_angle=self.stop_angle-90
#calculate location of Arc center
self.center=rotate_pt( (self.start[0],self.start[1]+self.radius),self.start_angle,self.start)
else:
self.astart_angle=self.stop_angle+90
self.astop_angle=self.start_angle+90
#make endlines for inner arc
#start first gap
points1=[ (self.start[0],self.start[1]+self.pinw/2.),
(self.start[0],self.start[1]+self.pinw/2.+self.gapw)
]
points1=rotate_pts(points1,self.start_angle,self.start)
points2=rotate_pts(points1,self.stop_angle-self.start_angle,self.center)
#start 2nd gap
points3=[ (self.start[0],self.start[1]-self.pinw/2.),
(self.start[0],self.start[1]-self.pinw/2.-self.gapw)
]
points3=rotate_pts(points3,self.start_angle,self.start)
points4=rotate_pts(points3,self.stop_angle-self.start_angle,self.center)
#make inner arcs
self.structure.append(sdxf.Line(points1))
self.structure.append(sdxf.Arc(self.center,self.radius+self.pinw/2.,self.astart_angle,self.astop_angle))
self.structure.append(sdxf.Arc(self.center,self.radius+self.pinw/2.+self.gapw,self.astart_angle,self.astop_angle))
self.structure.append(sdxf.Line(points2))
self.structure.append(sdxf.Line(points3))
self.structure.append(sdxf.Arc(self.center,self.radius-self.pinw/2.,self.astart_angle,self.astop_angle))
self.structure.append(sdxf.Arc(self.center,self.radius-self.pinw/2.-self.gapw,self.astart_angle,self.astop_angle))
self.structure.append(sdxf.Line(points4))
def poly_arc_bend(self):
#lower gap
pts1=arc_pts(self.astart_angle,self.astop_angle,self.radius+self.pinw/2.+self.gapw,self.segments)
pts1.extend(arc_pts(self.astop_angle,self.astart_angle,self.radius+self.pinw/2.,self.segments))
pts1.append(pts1[0])
pts2=arc_pts(self.astart_angle,self.astop_angle,self.radius-self.pinw/2.,self.segments)
pts2.extend(arc_pts(self.astop_angle,self.astart_angle,self.radius-self.pinw/2.-self.gapw,self.segments))
pts2.append(pts2[0])
self.structure.append(sdxf.PolyLine(translate_pts(pts1,self.center)))
self.structure.append(sdxf.PolyLine(translate_pts(pts2,self.center)))
#class TaperedCPWFingerCap:
# def __init__(self, structure,num_fingers,finger_length=None,finger_width=None,finger_gap=None,gapw=None):
class CPWWiggles:
"""CPW Wiggles (meanders)"""
def __init__(self,structure,num_wiggles,total_length,start_up=True,radius=None,pinw=None,gapw=None):
"""
@param num_wiggles: a wiggle is from the center pin up/down and back
@param total_length: The total length of the meander
@param start_up: Start with a CCW 90 degree turn or a CW turn
"""
s=structure
start=structure.last
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if radius is None: radius=s.defaults['radius']
#calculate vertical segment length:
#total length=number of 180 degree arcs + number of vertical segs + vertical radius spacers
#total_length=(1+num_wiggles)*(pi*radius)+2*num_wiggles*vlength+2*(num_wiggles-1)*radius
vlength=(total_length-((1+num_wiggles)*(pi*radius)+2*(num_wiggles-1)*radius))/(2*num_wiggles)
if vlength<0: print("Warning: length of vertical segments is less than 0, increase total_length or decrease num_wiggles")
if start_up: asign=1
else: asign=-1
CPWBend(s,asign*90,pinw,gapw,radius)
for ii in range(num_wiggles):
isign=2*(ii%2)-1
CPWStraight(s,vlength,pinw,gapw)
CPWBend(s,isign*asign*180,pinw,gapw,radius)
CPWStraight(s,vlength,pinw,gapw)
if ii<num_wiggles-1:
CPWStraight(s,2*radius,pinw,gapw)
CPWBend(s,-isign*asign*90,pinw,gapw,radius)
class CPWWigglesByLength:
"""An updated version of CPWWiggles which is more general.
Specifies a meander by length but allows for starting at different angles
and also allows meanders which are symmetric or asymmetric about the center pin.
"""
def __init__(self,structure,num_wiggles,total_length,start_bend_angle=None,symmetric=True,radius=None,pinw=None,gapw=None):
"""
@param num_wiggles: a wiggle is from the center pin up/down and back
@param total_length: The total length of the meander
@param start_bend_angle: Start with a start_bend_angle degree turn (CCW)
@param symmetric: If True then meander symmetric about current direction, other wise only above or below depending on start_bend_angle
"""
s=structure
start=structure.last
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if radius is None: radius=s.defaults['radius']
if num_wiggles == 0 or total_length == 0:
self.vlength=0
return
if start_bend_angle is None:
start_bend_angle=0
if start_bend_angle>0:
asign=1
else:
asign=-1
if symmetric:
vlength=(total_length-2*(start_bend_angle*pi/180*radius)-num_wiggles*pi*radius-2*radius*(num_wiggles-1))/(2*num_wiggles)
else:
vlength=(total_length-2*(start_bend_angle*pi/180*radius)-pi*radius*(2*num_wiggles-1))/(2*num_wiggles)
if vlength<0:
raise MaskError("Warning: length of vertical segments is less than 0, increase total_length or decrease num_wiggles")
self.vlength=vlength
CPWBend(s,start_bend_angle,pinw,gapw,radius)
for ii in range(num_wiggles):
if symmetric:
isign=2*(ii%2)-1
else:
isign=-1
CPWStraight(s,vlength,pinw,gapw)
CPWBend(s,isign*asign*180,pinw,gapw,radius)
CPWStraight(s,vlength,pinw,gapw)
if ii<num_wiggles-1:
if symmetric:
CPWStraight(s,2*radius,pinw,gapw) #if symmetric must account for initial bend height
else:
CPWBend(s,asign*180,pinw,gapw,radius) #if asymmetric must turn around
CPWBend(s,-isign*start_bend_angle,pinw,gapw,radius)
class CPWWigglesByLength_EndStraight:
"""An updated version of CPWWigglesByLength.
At the end of the wiggles, the cpw does not curve back to the original direction defined byt the star_bend_angle,
but stays straight along the current direction.
"""
def __init__(self,structure,num_wiggles,total_length,start_bend_angle=None,symmetric=True,radius=None,pinw=None,gapw=None):
"""
@param num_wiggles: a wiggle is from the center pin up/down and back
@param total_length: The total length of the meander
@param start_bend_angle: Start with a start_bend_angle degree turn (CCW)
@param symmetric: If True then meander symmetric about current direction, other wise only above or below depending on start_bend_angle
"""
s=structure
start=structure.last
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if radius is None: radius=s.defaults['radius']
if num_wiggles == 0 or total_length == 0:
self.vlength=0
return
if start_bend_angle is None:
start_bend_angle=0
if start_bend_angle>0:
asign=1
else:
asign=-1
if symmetric:
vlength=(total_length-2*(start_bend_angle*pi/180*radius)-num_wiggles*pi*radius-2*radius*(num_wiggles-1))/(2*num_wiggles)
else:
vlength=(total_length-2*(start_bend_angle*pi/180*radius)-pi*radius*(2*num_wiggles-1))/(2*num_wiggles)
if vlength<0:
raise MaskError("Warning: length of vertical segments is less than 0, increase total_length or decrease num_wiggles")
self.vlength=vlength
CPWBend(s,start_bend_angle,pinw,gapw,radius)
for ii in range(num_wiggles):
if symmetric:
isign=2*(ii%2)-1
else:
isign=-1
CPWStraight(s,vlength,pinw,gapw)
CPWBend(s,isign*asign*180,pinw,gapw,radius)
CPWStraight(s,vlength,pinw,gapw)
if ii<num_wiggles-1:
if symmetric:
CPWStraight(s,2*radius,pinw,gapw) #if symmetric must account for initial bend height
else:
CPWBend(s,asign*180,pinw,gapw,radius) #if asymmetric must turn around#
CPWBend(s,start_bend_angle,pinw,gapw,radius)
print(('vlength=', vlength))
class drawBondPad:
def __init__(self,drawing,pos,Ang,pinw,gapw,bond_pad_length=None,launcher_pinw=None,launcher_gapw=None,taper_length=None,launcher_padding=None,launcher_radius=None):
"""
Created on 08/09/2011
@author: Brendon Rose
Script appends a BondPad on drawing and position pos and Angle Ang relative to the positive x-axis CCW is positive
"""
"Set Self-attributes"
#Launcher parameters set to default if nothing was input
if bond_pad_length == None: bond_pad_length = 400.
if launcher_pinw == None: launcher_pinw = 150.
if launcher_gapw == None: launcher_gapw = 67.305
if taper_length == None: taper_length = 300.
if launcher_padding == None: launcher_padding = 67.
if launcher_radius == None: launcher_radius = 125.
s = drawing #define structure for writting bond pad to
s.last = pos #Position to put bond pad
s.last_direction = Ang #Angle to put bond pad
launcher_length=taper_length+bond_pad_length+launcher_padding
"Draw the BondPad and a curly wire to offset launcher"
CPWStraight(s,length=launcher_padding,pinw=0,gapw=launcher_pinw/2 + launcher_gapw)
CPWStraight(s,length=bond_pad_length,pinw=launcher_pinw,gapw=launcher_gapw)
CPWLinearTaper(s,length=taper_length,start_pinw=launcher_pinw,start_gapw=launcher_gapw,stop_pinw=pinw,stop_gapw=gapw)
class CPWWigglesByLength_KagRes1:
"""
An updated version of CPWWigglesByLength.
"""
def __init__(self,structure,num_wiggles,total_length,br_base,br_width,lattice_shift=None,start_bend_angle=None,symmetric=True,radius=None,pinw=None,gapw=None):
# def __init__(self,structure,num_wiggles,total_length,lattice_shift=None,start_bend_angle=None,symmetric=True,radius=None,pinw=None,gapw=None):
"""
@param num_wiggles: a wiggle is from the center pin up/down and back
@param total_length: The total length of the meander
@param start_bend_angle: Start with a start_bend_angle degree turn (CCW)
@param symmetric: If True then meander symmetric about current direction, other wise only above or below depending on start_bend_angle
"""
s=structure
start=structure.last
if lattice_shift is None: lattice_shift = 0
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if radius is None: radius=s.defaults['radius']
if num_wiggles == 0 or total_length == 0:
self.vlength=0
return
if start_bend_angle is None:
start_bend_angle=0
if start_bend_angle>0:
asign=1
else:
asign=-1
if symmetric:
vlength=(total_length- radius*(num_wiggles)*pi - 2.*radius*(num_wiggles- 1.))/(2.*num_wiggles -1.)
else:
vlength=(total_length-(start_bend_angle*pi/180*radius)-pi*radius*(2*num_wiggles-1))/(2*num_wiggles)
if vlength<0:
raise MaskError("Warning: length of vertical segments is less than 0, increase total_length or decrease num_wiggles")
self.vlength=vlength
CPWBend(s,start_bend_angle,pinw,gapw,radius)
for ii in range(num_wiggles):
if symmetric:
isign=2*(ii%2)-1
else:
isign=-1
if ii <num_wiggles- 1:
CPWStraight_Bridges_Layer1(s,vlength-lattice_shift/(2*(num_wiggles - 1)),br_base,br_width,pinw,gapw)
# CPWStraight(s,vlength-lattice_shift/(2*(num_wiggles - 1)),pinw,gapw)
CPWBend(s,isign*asign*180,pinw,gapw,radius)
CPWStraight_Bridges_Layer1(s,vlength-lattice_shift/(2*(num_wiggles - 1)),br_base,br_width,pinw,gapw)
# CPWStraight(s,vlength-lattice_shift/(2*(num_wiggles - 1)),pinw,gapw)
if symmetric:
CPWStraight(s,2*radius,pinw,gapw) #if symmetric must account for initial bend height
else:
CPWBend(s,asign*180,pinw,gapw,radius) #if asymmetric must turn around
else:
CPWStraight_Bridges_Layer1(s,vlength,br_base,br_width,pinw,gapw)
# CPWStraight(s,vlength,pinw,gapw)
CPWBend(s,isign*asign*90,pinw,gapw,radius)
class CPWWigglesByLength_KagRes2:
"""An updated version of CPWWigglesByLength.
"""
def __init__(self,structure,num_wiggles,total_length,br_base,br_width,lattice_shift=None,start_bend_angle=None,symmetric=True,radius=None,pinw=None,gapw=None):
# def __init__(self,structure,num_wiggles,total_length,lattice_shift=None,start_bend_angle=None,symmetric=True,radius=None,pinw=None,gapw=None):
"""
@param num_wiggles: a wiggle is from the center pin up/down and back
@param total_length: The total length of the meander
@param start_bend_angle: Start with a start_bend_angle degree turn (CCW)
@param symmetric: If True then meander symmetric about current direction, other wise only above or below depending on start_bend_angle
"""
s=structure
start=structure.last
if lattice_shift is None: lattice_shift = 0
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if radius is None: radius=s.defaults['radius']
if num_wiggles == 0 or total_length == 0:
self.vlength=0
return
if start_bend_angle is None:
start_bend_angle=0
if start_bend_angle>0:
asign=1
else:
asign=-1
vlength_overflow=0.0
if symmetric:
vlength=(total_length- radius*(num_wiggles + 2./3.)*pi - 2.*radius*(num_wiggles- 1. + 1./sqrt(3.)))/(2.*num_wiggles -1. +2./sqrt(3.))
else:
vlength=(total_length-(start_bend_angle*pi/180*radius)-pi*radius*(2*num_wiggles-1))/(2*num_wiggles)
if vlength<0:
raise MaskError("Warning: length of vertical segments is less than 0, increase total_length or decrease num_wiggles")
self.vlength=vlength
CPWBend(s,start_bend_angle,pinw,gapw,radius)
for ii in range(num_wiggles):
if symmetric:
isign=2*(ii%2)-1
else:
isign=-1
if ii<num_wiggles-1:
CPWStraight_Bridges_Layer1(s,vlength-lattice_shift/(2*(num_wiggles - 1)),br_base,br_width,pinw,gapw)
# CPWStraight(s,vlength-lattice_shift/(2*(num_wiggles-1)),pinw,gapw)
CPWBend(s,isign*asign*180,pinw,gapw,radius)
CPWStraight_Bridges_Layer1(s,vlength-lattice_shift/(2*(num_wiggles - 1)),br_base,br_width,pinw,gapw)
# CPWStraight(s,vlength-lattice_shift/(2*(num_wiggles-1)),pinw,gapw)
if symmetric:
CPWStraight(s,2*radius,pinw,gapw) #if symmetric must account for initial bend height
else:
CPWBend(s,asign*180,pinw,gapw,radius) #if asymmetric must turn around
else:
CPWStraight_Bridges_Layer1(s,vlength,br_base,br_width,pinw,gapw)
# CPWStraight(s,vlength,pinw,gapw)
CPWBend(s,isign*asign*90,pinw,gapw,radius)
CPWBend(s,isign*asign*60,pinw,gapw,radius)
CPWStraight(s,2.0/sqrt(3)*(vlength+radius),pinw,gapw)
CPWBend(s,-1*isign*asign*60,pinw,gapw,radius)
# if ii == num_wiggles:
# CPWBend(s,90,pinw,gapw,radius)
#CPWBend(s,-isign*start_bend_angle,pinw,gapw,radius)
class CPWWigglesByArea:
"""CPW Wiggles which fill an area specified by (length,width)"""
def __init__(self,structure,length,width,start_up=True,radius=None,pinw=None,gapw=None):
s=structure
if pinw is None:
pinw=s.defaults['pinw']
if gapw is None:
gapw=s.defaults['gapw']
if radius is None:
radius=s.defaults['radius']
#figure out how many wiggles you can fit
#length=2*(num_wiggles+1)*radius
num_wiggles=int(floor(length/(2*radius)-1))
padding=length-2*(num_wiggles+1)*radius
vlength=(width-4*radius)/2
total_length=(1+num_wiggles)*(pi*radius)+2*num_wiggles*vlength+2*(num_wiggles -1)*radius
self.num_wiggles=num_wiggles
self.padding=padding
self.vlength=vlength
self.total_length=total_length
self.properties= { 'num_wiggles':num_wiggles,'padding':padding,'vlength':vlength,'total_length':total_length}
CPWStraight(s,padding/2,pinw,gapw)
CPWWiggles(s,num_wiggles,total_length,start_up,radius,pinw,gapw)
CPWStraight(s,padding/2,pinw,gapw)
class CPWPaddedWiggles:
def __init__(self,structure,length,width,cpw_length,start_up=True,radius=None,pinw=None,gapw=None):
s=structure
if pinw is None:
pinw=s.defaults['pinw']
if gapw is None:
gapw=s.defaults['gapw']
if radius is None:
radius=s.defaults['radius']
if cpw_length<length+(2*pi-4)*radius:
raise MaskError("Error in CPWPaddedWiggles: cpw_length=%f needs less than one wiggle!" %(cpw_length))
#calculate maximum length possible in area
num_wiggles=int(floor(length/(2*radius)-1))
padding=length-2*(num_wiggles+1)*radius
vlength=(width-4*radius)/2
max_length=(1+num_wiggles)*(pi*radius)+2*num_wiggles*vlength+2*(num_wiggles-1)*radius
if cpw_length > max_length:
raise MaskError("Error in CPWPaddedWiggles: cpw_length=%f > max_length=%f that can be fit into alotted area!" %(cpw_length,max_length))
#to be finished
class ChipBorder(Structure):
"""Chip border for dicing"""
def __init__(self,chip,border_thickness,layer="border",color=1):
Structure.__init__(self,chip,layer=layer,color=color)
chip_size=(chip.size[0]+2*border_thickness,chip.size[1]+2*border_thickness)
pts1=[ (0,chip_size[1]),
(chip_size[0],chip_size[1]),
(chip_size[0],chip_size[1]-border_thickness),
(0,chip_size[1]-border_thickness),
(0,chip_size[1])
]
pts1=translate_pts(pts1,(-border_thickness,-border_thickness))
pts2=[ (0,0),
(chip_size[0],0),
(chip_size[0],border_thickness),
(0,border_thickness),
(0,0)
]
pts2=translate_pts(pts2,(-border_thickness,-border_thickness))
pts3=[ (0,border_thickness),
(border_thickness,border_thickness),
(border_thickness,chip_size[1]-border_thickness),
(0,chip_size[1]-border_thickness),
(0,border_thickness)
]
pts3=translate_pts(pts3,(-border_thickness,-border_thickness))
pts4=[ (chip_size[0]-border_thickness,border_thickness),
(chip_size[0],border_thickness),
(chip_size[0],chip_size[1]-border_thickness),
(chip_size[0]-border_thickness,chip_size[1]-border_thickness),
(chip_size[0]-border_thickness,border_thickness)
]
pts4=translate_pts(pts4,(-border_thickness,-border_thickness))
self.append(sdxf.PolyLine(pts1))
self.append(sdxf.PolyLine(pts2))
self.append(sdxf.PolyLine(pts3))
self.append(sdxf.PolyLine(pts4))
class CPWGapCap:
"""A CPW gap capacitor (really just a gap in the CPW center pin with no padding)"""
def __init__(self, gap,pinw=None,gapw=None,capacitance=0.0):
self.type='gap'
self.gap=gap
self.pinw=pinw
self.gapw=gapw
self.capacitance=capacitance
self.length=gap
def description(self):
return "Type:\t%s\tAssumed Capacitance:\t%f\tGap Distance:\t%f\tPin Width:\t%f\t,Gap Width:\t%f\t" % (
self.type,self.capacitance,self.gap,self.pinw,self.gapw
)
def draw(self,structure):
s=structure
start=structure.last
if self.pinw is None: self.pinw=structure.defaults['pinw']
if self.gapw is None: self.gapw=structure.defaults['gapw']
pinw=self.pinw
gapw=self.gapw
## gpoints=[ (start[0],start[1]+pinw/2+gapw),
## (start[0]+self.gap,start[1]+pinw/2+gapw),
## (start[0]+self.gap,start[1]-pinw/2-gapw),
## (start[0],start[1]-pinw/2-gapw),
## (start[0],start[1]+pinw/2+gapw)
## ]
##
## gpoints=rotate_pts(gpoints,s.last_direction,start)
gpoints=[ (0,pinw/2+gapw),
(self.gap,pinw/2+gapw),
(self.gap,-pinw/2-gapw),
(0,-pinw/2-gapw),
(0,pinw/2+gapw)
]
gpoints=orient_pts(gpoints,s.last_direction,start)
#create polylines and append to drawing
s.append(sdxf.PolyLine(gpoints))
#update last anchor position
#stop=rotate_pt((start[0]+self.gap,start[1]),s.last_direction,start)
s.last=orient_pt((self.gap,0),s.last_direction,start)
def ext_Q(frequency,impedance=50,resonator_type=0.5):
if self.capacitance==0:
return 0
frequency=frequency*1e9
q=2.*pi*frequency*self.capacitance*impedance
Q=0
if q!=0:
Q=resonator_type*pi*1/(q**2)
return Q
class CPWInductiveShunt:
"""An inductive shunt"""
def __init__(self,num_segments, segment_length, segment_width, segment_gap, taper_length = 0, pinw=None, inductance = 0.0):
self.type='inductive shunt'
self.inductance = inductance
self.num_segments = num_segments
self.segment_length = segment_length
self.segment_width = segment_width
self.segment_gap = segment_gap
self.taper_length = taper_length
self.pinw=pinw
#self.gapw=gapw
if (num_segments >0 ):
self.gapw = (num_segments+1)*segment_gap+num_segments*segment_width
else:
self.gapw = segment_length
def description(self):
#print self.type,self.inductance,self.num_segments,self.segment_length,self.segment_width,self.segment_gap,self.pinw,self.gapw
return "type:\t%s\tAssumed Inductance:\t%f pH\t# of segments:\t%d\tSegment length:\t%f\tSegment width:\t%f\tSegment gap:\t%f\tTotal inductor length:\t%f\tPin width:\t%f\tGap width:\t%f\tTaper length:\t%f" % (
self.type,self.inductance*1e12,self.num_segments,self.segment_length,self.segment_width,self.segment_gap,self.segment_length*self.num_segments+(self.num_segments+1)*self.segment_gap,self.pinw,self.gapw,self.taper_length
)
def draw(self,structure,pad_to_length = 0, flipped= False):
s=structure
if self.pinw is None: self.pinw=s.defaults['pinw']
pinw=self.pinw
gapw=self.gapw
self.flipped = flipped
if pad_to_length < self.segment_length+self.taper_length:
self.padding=0
else:
self.padding=pad_to_length-self.segment_length-self.taper_length
if not self.flipped: CPWStraight(s,self.padding)
CPWLinearTaper(s,length=self.taper_length,start_pinw=s.defaults['pinw'],start_gapw=s.defaults['gapw'],stop_pinw=pinw,stop_gapw=gapw)
start=structure.last
if self.num_segments >0:
gap = [ (0,0), (self.segment_length-self.segment_width,0), (self.segment_length-self.segment_width,self.segment_gap), (0,self.segment_gap), (0,0) ]
gaps=[]
if self.flipped:
flipped=1
else:
flipped=0
for ii in range (self.num_segments+1):
gaps.append(
orient_pts(
translate_pts(gap, (self.segment_width*((ii+flipped)%2),+pinw/2.0+ii*(self.segment_gap+self.segment_width))),
s.last_direction,start)
)
gaps.append(
orient_pts(
translate_pts(gap,(self.segment_width*((ii+flipped)%2),-(pinw/2.0+self.segment_gap+ii*(self.segment_gap+self.segment_width)))),
s.last_direction,start)
)
for pts in gaps:
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((self.segment_length,0),s.last_direction,start)
else: #If num_segments == 0 then
ugap1 = [ (0,pinw/2.), (0, pinw/2.+self.segment_length), (self.segment_gap, pinw/2.+self.segment_length), (self.segment_gap, pinw/2.), (0,pinw/2.0) ]
ugap2 = translate_pts(ugap1,(self.segment_width+self.segment_gap,0))
lgap1 = mirror_pts(ugap1,0,(self.segment_width+self.segment_gap,0))
lgap2 = mirror_pts(ugap2,0,(self.segment_width+self.segment_gap,0))
ugap1 = orient_pts(ugap1,s.last_direction,s.last)
ugap2 = orient_pts(ugap2,s.last_direction,s.last)
lgap1 = orient_pts(lgap1,s.last_direction,s.last)
lgap2 = orient_pts(lgap2,s.last_direction,s.last)
for pts in [ugap1,ugap2,lgap1,lgap2]:
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((2*self.segment_gap+self.segment_width,0),s.last_direction,s.last)
CPWLinearTaper(s,length=self.taper_length,start_pinw=pinw,start_gapw=gapw,stop_pinw=s.defaults['pinw'],stop_gapw=s.defaults['gapw'])
if self.flipped: CPWStraight(s,self.padding)
def ext_Q (self,frequency, impedance=50, resonator_type=0.5):
if (self.inductance !=0):
if resonator_type==0.5:
return (2/pi)*(impedance/(self.inductance*2*pi*frequency*1e9))**2
if resonator_type==0.25:
return (2./pi)*(impedance/(2*pi*frequency*1e9*self.inductance))**2
else:
return 0.0
def rectangle_points(size,orientation=0,center=(0,0)):
return orient_pts([ (-size[0]/2.,-size[1]/2.),(size[0]/2.,-size[1]/2.),(size[0]/2.,size[1]/2.),(-size[0]/2.,size[1]/2.),(-size[0]/2.,-size[1]/2.)],orientation,center)
class CPWFingerCap:
"""A CPW finger capacitor"""
def __init__(self,num_fingers,finger_length,finger_width,finger_gap,taper_length = 0, gapw=None,capacitance=0.0):
self.type='finger'
self.capacitance=capacitance #simulated capacitance
self.num_fingers=num_fingers #number of fingers
if num_fingers<2:
raise MaskError("CPWFingerCap must have at least 2 fingers!")
self.finger_length=finger_length #length of fingers
self.finger_width=finger_width #width of each finger
self.finger_gap=finger_gap
self.gapw = gapw #gap between "center pin" and gnd planes
self.pinw = num_fingers*finger_width+ (num_fingers-1)*finger_gap #effective center pin width sum of finger gaps and widths
self.length=finger_length+finger_gap
self.taper_length=taper_length
self.total_length=finger_length+finger_gap+2.*taper_length
def description(self):
return "type:\t%s\tAssumed Capacitance:\t%f\t# of fingers:\t%d\tFinger Length:\t%f\tFinger Width:\t%f\tFinger Gap:\t%f\tTotal Pin Width:\t%f\tGap Width:\t%f\tTaper Length:\t%f" % (
self.type,self.capacitance*1e15,self.num_fingers,self.finger_length,self.finger_width,self.finger_gap,self.pinw,self.gapw,self.taper_length
)
def draw(self,structure):
s=structure
pinw=self.pinw
if self.gapw is None: self.gapw=self.pinw*s.defaults['gapw']/s.defaults['pinw']
gapw=self.gapw
CPWLinearTaper(structure,length=self.taper_length,start_pinw=s.defaults['pinw'],start_gapw=s.defaults['gapw'],stop_pinw=pinw,stop_gapw=gapw)
start=structure.last
center_width=self.num_fingers*self.finger_width+ (self.num_fingers-1)*self.finger_gap
length=self.finger_length+self.finger_gap
gap1=[ (start[0],start[1]-center_width/2),
(start[0]+length,start[1]-center_width/2),
(start[0]+length,start[1]-center_width/2-gapw),
(start[0],start[1]-center_width/2-gapw),
(start[0],start[1]-center_width/2)
]
gap2=[ (start[0],start[1]+center_width/2),
(start[0]+length,start[1]+center_width/2),
(start[0]+length,start[1]+center_width/2+gapw),
(start[0],start[1]+center_width/2+gapw),
(start[0],start[1]+center_width/2)
]
gap1=rotate_pts(gap1,s.last_direction,start)
gap2=rotate_pts(gap2,s.last_direction,start)
stop=rotate_pt((start[0]+length,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(gap1))
s.append(sdxf.PolyLine(gap2))
#draw finger gaps
for ii in range(self.num_fingers-1):
if ii%2==0:
pts=self.left_finger_points(self.finger_width,self.finger_length,self.finger_gap)
else:
pts=self.right_finger_points(self.finger_width,self.finger_length,self.finger_gap)
pts=translate_pts(pts,start)
pts=translate_pts(pts,(0,ii*(self.finger_width+self.finger_gap)-self.pinw/2))
pts=rotate_pts(pts,s.last_direction,start)
s.append(sdxf.PolyLine(pts))
#draw last little box to separate sides
pts = [ (0,0),(0,self.finger_width),(self.finger_gap,self.finger_width),(self.finger_gap,0),(0,0)]
pts=translate_pts(pts,start)
#if odd number of fingers add box on left otherwise on right
pts=translate_pts(pts,( ((self.num_fingers+1) %2)*(length-self.finger_gap),(self.num_fingers-1)*(self.finger_width+self.finger_gap)-self.pinw/2))
pts=rotate_pts(pts,s.last_direction,start)
s.append(sdxf.PolyLine(pts))
CPWLinearTaper(s,length=self.taper_length,start_pinw=pinw,start_gapw=gapw,stop_pinw=s.defaults['pinw'],stop_gapw=s.defaults['gapw'])
def left_finger_points(self,finger_width,finger_length,finger_gap):
pts= [ (0,0),
(0,finger_width+finger_gap),
(finger_length+finger_gap,finger_width+finger_gap),
(finger_length+finger_gap,finger_width),
(finger_gap,finger_width),
(finger_gap,0),
(0,0)
]
return pts
def right_finger_points(self,finger_width,finger_length,finger_gap):
pts = [ (finger_length+finger_gap,0),
(finger_length+finger_gap,finger_width+finger_gap),
(0,finger_width+finger_gap),
(0,finger_width),
(finger_length,finger_width),
(finger_length,0),
(finger_length+finger_gap,0)
]
return pts
def ext_Q(self,frequency,impedance=50,resonator_type=0.5):
if self.capacitance==0:
return 0
frequency=frequency*1e9
q=2.*pi*frequency*self.capacitance*impedance
Q=0
if q!=0:
Q=1/(resonator_type*pi) *1/ (q**2)
return Q
class CPWLCoupler:
"""A structure which is coupled to a CPW via an L coupler, used for medium to high Q hangers"""
def __init__(self,coupler_length,separation,flipped=False,padding_type=None,pad_to_length=None,pinw=None,gapw=None,radius=None,spinw=None,sgapw=None,capacitance=0.0):
self.type='L'
self.coupler_length=coupler_length
self.separation=separation
self.padding_type=padding_type
self.pad_to_length=pad_to_length
self.pinw=pinw
self.gapw=gapw
self.radius=radius
self.spinw=spinw
self.sgapw=sgapw
self.capacitance=capacitance
self.flipped=flipped
def description(self):
return "Type:\t%s\tEstimated Capacitance:\t%f\tCoupler Length:\t%f\tCoupler Separation:\t%f\tPin Width:\t%f\tGap Width:\t%f\tRadius:\t%f\tFeedline Pin Width:\t%f\tFeedline Gap Width:\t%f\t" % (
self.type,self.capacitance,self.coupler_length,self.separation,self.pinw,self.gapw,self.radius,self.spinw,self.sgapw
)
def draw(self,structure,padding_type=None,pad_to_length=0):
"""Draws the coupler and creates the new structure (self.coupled_structure) for building onto"""
s=structure
if self.pinw is None: self.pinw=s.defaults['pinw']
if self.gapw is None: self.gapw=s.defaults['gapw']
if self.radius is None: self.radius=s.defaults['radius']
self.padding_type=padding_type
self.pad_to_length=pad_to_length
self.spinw=s.defaults['pinw']
self.sgapw=s.defaults['gapw']
start=s.last
start_dir=s.last_direction
lstart_dir=start_dir+180
if self.flipped: flip_sign=-1
else: flip_sign=1
offset_length=0
if padding_type=='center': offset_length=pad_to_length/2
lstart=(offset_length+self.coupler_length+self.gapw+self.radius,flip_sign*self.separation)
if padding_type=='right': lstart=(pad_to_length-gapw,lstart[1])
lstart=rotate_pt(lstart,start_dir)
lstart=translate_pt(lstart,start)
self.coupled_structure=Structure(s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
cs=self.coupled_structure
cs.defaults['pinw']=self.pinw
cs.defaults['gapw']=self.gapw
cs.defaults['radius']=self.radius
#Continue the feedline
self.feed_length=self.coupler_length+self.radius
if (not self.pad_to_length is None) and (self.pad_to_length > self.feed_length):
self.feed_length=self.pad_to_length
CPWStraight(s,self.feed_length,self.spinw,self.sgapw)
#make the coupler
CPWGapCap(gap=self.gapw).draw(cs)
CPWStraight(cs,self.coupler_length)
CPWBend(cs,-90*flip_sign)
def ext_Q(self,frequency,impedance=50,resonator_type=0.5):
if self.capacitance==0:
return 0
frequency=frequency*1e9
q=2.*pi*frequency*self.capacitance*impedance
Q=0
if q!=0:
Q=resonator_type*pi*1/(q**2)
return Q
class CPWTee(Structure):
"""CPWTee makes a Tee structure with padding"""
def __init__(self,structure,stub_length=None,feed_length=None,flipped=False,pinw=None,gapw=None,spinw=None,sgapw=None):
"""
stub_length is from center
flipped determines whether stub is on left or right of wrt current direction
pinw/gapw are the usual for the stub
spinw/sgapw are the usual for the continuing part
"""
s=structure
#print sgapw
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if spinw is None: spinw=s.defaults['pinw']
if sgapw is None: sgapw=s.defaults['gapw']
#print "pinw: %f, gapw: %f, spinw: %f, sgapw: %f" % (pinw,gapw,spinw,sgapw)
#minimum feed_length is
if (feed_length is None) or (feed_length < 2*gapw+pinw):
feed_length=2*gapw+pinw
#minimum stub_length is
if (stub_length is None) or (stub_length < gapw+spinw):
stub_length=gapw+spinw/2
#print "pinw: %f, gapw: %f, spinw: %f, sgapw: %f" % (pinw,gapw,spinw,sgapw)
start=s.last
start_dir=s.last_direction
if flipped:
lstart_dir=start_dir-90
angle=start_dir+180
else:
lstart_dir=start_dir+90
angle=start_dir
#Bottom part of feed_line
pts1=[ (-feed_length/2.,-spinw/2.), (-feed_length/2.,-sgapw-spinw/2.0), (feed_length/2.,-sgapw-spinw/2.0),(feed_length/2.,-spinw/2.), (-feed_length/2.,-spinw/2.)]
#Top of feed_line
pts2=[ (-feed_length/2,spinw/2.), (-pinw/2.-gapw,spinw/2.), (-pinw/2.-gapw,gapw+spinw/2.), (-feed_length/2.,gapw+spinw/2.), (-feed_length/2,spinw/2.) ]
pts3=[ (feed_length/2,spinw/2.), (pinw/2.+gapw,spinw/2.), (pinw/2.+gapw,gapw+spinw/2.), (feed_length/2.,gapw+spinw/2.), (feed_length/2,spinw/2.) ]
#stub
pts4=[ (-pinw/2.,spinw/2.), (-pinw/2.,stub_length), (-pinw/2.-gapw,stub_length), (-pinw/2.-gapw,spinw/2.), (-pinw/2.,spinw/2.) ]
pts5=[ (pinw/2.,spinw/2.), (pinw/2.,stub_length), (pinw/2.+gapw,stub_length), (pinw/2.+gapw,spinw/2.), (pinw/2.,spinw/2.) ]
shapes=[pts1,pts2,pts3,pts4,pts5]
center=orient_pt((feed_length/2.,0),s.last_direction,s.last)
for pts in shapes:
pts=orient_pts(pts,angle,center)
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((feed_length,0),s.last_direction,s.last)
lstart=orient_pt((stub_length,0),lstart_dir,center)
Structure.__init__(self,s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
self.defaults['pinw']=pinw
self.defaults['gapw']=gapw
class FingerCoupler(Structure):
"""Finger coupler a CPWTee plus finger capacitor...not used yet..."""
def __init__(self,structure,cap_desc,stub_length=None,padding_length=None,flipped=False,pinw=None,gapw=None,taper_length=0,spinw=None,sgapw=None):
CPWTee.__init__(structure,stub_length,padding_length,flipped,spinw,sgapw)
if pinw is None: pinw=structure['pinw']
if gapw is None: gapw=structure['gapw']
CPWLinearTaper(self,taper_length,self.defaults['pinw'],cap_desc.pinw,self.defaults['gapw'],cap_desc.gapw)
cap_desc.draw_cap(self)
CPWLinearTaper(self,taper_length,cap_desc.pinw,pinw,cap_desc.gapw,gapw)
#===============================================================================
# NEW CLASSES FOR CHANNEL STRUCTURES & TWO-LAYER PHOTOLITHOGRAPHY
#===============================================================================
class LShapeAlignmentMarks:
def __init__(self,structure,width,armlength):
"""creates an L shaped alignment marker of width and armlength for photolitho"""
if width==0: return
if armlength==0: return
s=structure
start=s.last
box1=[ (start[0]-width/2.,start[1]-width/2.),
(start[0]+armlength-width/2.,start[1]-width/2.),
(start[0]+armlength-width/2.,start[1]+width/2.),
(start[0]+width/2.,start[1]+width/2.),
(start[0]+width/2.,start[1]+armlength-width/2.),
(start[0]-width/2.,start[1]+armlength-width/2.),
(start[0]-width/2.,start[1]-width/2.)]
box1=rotate_pts(box1,s.last_direction,start)
stop=rotate_pt((start[0]+armlength,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(box1))
#----------------------------------------------------------------------------
class ArrowAlignmentMarks_L1:
def __init__(self,structure,height,width,buffer=30):
"""creates an arrow/triangle of height and base width for alignment"""
if height==0: return
if width==0: return
s=structure
start=s.last
triangle=[(start[0]+buffer,start[1]),(start[0]+buffer,start[1]+width),(start[0]+buffer+height,start[1]+width/2),(start[0]+buffer,start[1])]
triangle=rotate_pts(triangle,s.last_direction,start)
stop=rotate_pt((start[0]+height,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(triangle))
#----------------------------------------------------------------------------
class ArrowAlignmentMarks_L2:
def __init__(self,structure,height,width,buffer=30):
"""creates an arrow/triangle of height and base width for alignment"""
if height==0: return
if width==0: return
s=structure
start=s.last
box=[(start[0],start[1]),(start[0],start[1]+width),(start[0]+buffer+height,start[1]+width),(start[0]+buffer+height,start[1]),(start[0],start[1])]
triangle=[(start[0]+buffer+height,start[1]+width/2),(start[0]+buffer+height+height,start[1]),(start[0]+buffer+height+height,start[1]+width),(start[0]+buffer+height,start[1]+width/2)]
box=rotate_pts(box,s.last_direction,start)
triangle=rotate_pts(triangle,s.last_direction,start)
stop=rotate_pt((start[0]+height,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(box))
s.append(sdxf.PolyLine(triangle))
#----------------------------------------------------------------------------
class Channel:
"""A simple channel of given width and length"""
def __init__(self, structure,length,channelw):
""" Adds a channel of width=channelw and of length = length to the structure"""
if length==0: return
if channelw==0: return
s=structure
start=structure.last
ch1=[ (start[0],start[1]-channelw/2),
(start[0]+length,start[1]-channelw/2.),
(start[0]+length,start[1]+channelw/2),
(start[0],start[1]+channelw/2.),
(start[0],start[1]-channelw/2.)
]
ch1=rotate_pts(ch1,s.last_direction,start)
stop=rotate_pt((start[0]+length,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(ch1))
#----------------------------------------------------------------------------
class ChannelLinearTaper:
"""A section of channel which (linearly) tapers from width=start_channelw to stop_channelw over length=length"""
def __init__(self, structure,length,start_channelw,stop_channelw):
if length==0: return
#load attributes
s=structure
start=s.last
#define geometry of channel
ch1= [
(start[0],start[1]-start_channelw/2),
(start[0]+length,start[1]-stop_channelw/2),
(start[0]+length,start[1]+stop_channelw/2),
(start[0],start[1]+start_channelw/2),
(start[0],start[1]-start_channelw/2)
]
#rotate structure to proper orientation
ch1=rotate_pts(ch1,s.last_direction,start)
#create polylines and append to drawing
s.append(sdxf.PolyLine(ch1))
#update last anchor position
stop=rotate_pt((start[0]+length,start[1]),s.last_direction,start)
s.last=stop
#------------------------------------------------------------------------------------------
#-------------------------------------------------------------------------------------------------
class ChannelLauncher:
"""creates a channel launcher with a pad of length=pad_length and width=padwidth and a taper of length=taper_length which
linearly tapers from padwidth to channelwidth"""
def __init__(self,structure,flipped=False,pad_length=500,taper_length=400,pad_to_length=1000,padwidth=300,channelwidth=None):
s=structure
padding = pad_to_length-pad_length-taper_length
if padding <0:
padding=0
self.length=pad_length+taper_length
else:
self.length=pad_to_length
if not flipped:
Channel(s,length=pad_length,channelw=padwidth)
ChannelLinearTaper(s,length=taper_length,start_channelw=padwidth,stop_channelw=channelwidth)
Channel(s,length=padding,channelw=channelwidth)
else:
Channel(s,length=padding,channelw=channelwidth)
ChannelLinearTaper(s,length=taper_length,start_channelw=channelwidth,stop_channelw=padwidth)
Channel(s,length=pad_length,channelw=padwidth)
#-------------------------------------------------------------------------------------------------
class ChannelBend:
"""A Channel bend - adapted from CPWBend"""
def __init__(self,structure,turn_angle,channelw=None,radius=None,polyarc=True,segments=60):
"""creates a channel bend with channelw/radius
@param turn_angle: turn_angle is in degrees, positive is CCW, negative is CW
"""
#load default values if necessary
if turn_angle==0: return
s=structure
if radius is None: radius=s.defaults['radius']
if channelw is None: channelw=s.defaults['channelw']
self.structure=structure
self.turn_angle=turn_angle
self.channelw=channelw
self.radius=radius
self.segments=segments
self.pinw=0
self.gapw=channelw/2
self.start=s.last
self.start_angle=s.last_direction
self.stop_angle=self.start_angle+self.turn_angle
if turn_angle>0: self.asign=1
else: self.asign=-1
#DXF uses the angle of the radial vector for its start and stop angles
#so we have to rotate our angles by 90 degrees to get them right
#also it only knows about arcs with CCW sense to them, so we have to rotate our angles appropriately
self.astart_angle=self.start_angle-self.asign*90
self.astop_angle=self.stop_angle-self.asign*90
#calculate location of Arc center
self.center=rotate_pt( (self.start[0],self.start[1]+self.asign*self.radius),self.start_angle,self.start)
if polyarc: self.poly_arc_bend()
else: self.arc_bend()
self.structure.last=rotate_pt(self.start,self.stop_angle-self.start_angle,self.center)
self.structure.last_direction=self.stop_angle
def arc_bend(self):
#print "start: %d, stop: %d" % (start_angle,stop_angle)
if self.turn_angle>0:
self.astart_angle=self.start_angle-90
self.astop_angle=self.stop_angle-90
#calculate location of Arc center
self.center=rotate_pt( (self.start[0],self.start[1]+self.radius),self.start_angle,self.start)
else:
self.astart_angle=self.stop_angle+90
self.astop_angle=self.start_angle+90
#make endlines for inner arc
#start first gap
#points1=[ (self.start[0],self.start[1]+self.pinw/2.),
# (self.start[0],self.start[1]+self.pinw/2.+self.gapw)
#]
points1=[ (self.start[0],self.start[1]+self.gapw),
(self.start[0],self.start[1]-self.gapw)
]
points1=rotate_pts(points1,self.start_angle,self.start)
points2=rotate_pts(points1,self.stop_angle-self.start_angle,self.center)
#start 2nd gap
#points3=[ (self.start[0],self.start[1]-self.pinw/2.),
# (self.start[0],self.start[1]-self.pinw/2.-self.gapw)
# ]
#points3=rotate_pts(points3,self.start_angle,self.start)
#points4=rotate_pts(points3,self.stop_angle-self.start_angle,self.center)
#make inner arcs
self.structure.append(sdxf.Line(points1))
self.structure.append(sdxf.Arc(self.center,self.radius+self.pinw/2.,self.astart_angle,self.astop_angle))
self.structure.append(sdxf.Arc(self.center,self.radius+self.pinw/2.+self.gapw,self.astart_angle,self.astop_angle))
self.structure.append(sdxf.Line(points2))
#self.structure.append(sdxf.Line(points3))
#self.structure.append(sdxf.Arc(self.center,self.radius-self.pinw/2.,self.astart_angle,self.astop_angle))
#self.structure.append(sdxf.Arc(self.center,self.radius-self.pinw/2.-self.gapw,self.astart_angle,self.astop_angle))
#self.structure.append(sdxf.Line(points4))
def poly_arc_bend(self):
#lower gap
pts1=arc_pts(self.astart_angle,self.astop_angle,self.radius+self.pinw/2.+self.gapw,self.segments)
pts1.extend(arc_pts(self.astop_angle,self.astart_angle,self.radius+self.pinw/2.,self.segments))
pts1.append(pts1[0])
pts2=arc_pts(self.astart_angle,self.astop_angle,self.radius-self.pinw/2.,self.segments)
pts2.extend(arc_pts(self.astop_angle,self.astart_angle,self.radius-self.pinw/2.-self.gapw,self.segments))
pts2.append(pts2[0])
self.structure.append(sdxf.PolyLine(translate_pts(pts1,self.center)))
self.structure.append(sdxf.PolyLine(translate_pts(pts2,self.center)))
#-------------------------------------------------------------------------------------------------
class ChannelWiggles:
"""Channel Wiggles (meanders) = adapted from CPWWiggles"""
def __init__(self,structure,num_wiggles,total_length,start_up=True,radius=None,channelw=None,endbending1=True,endbending2=True,inverted=False):
"""
@param num_wiggles: a wiggle is from the center pin up/down and back
@param total_length: The total length of the meander
@param start_up: Start with a CCW 90 degree turn or a CW turn
@param endbending: gives you the option of wheither or not to have an additional 90 degree bend back to horizontal at the two ends
"""
s=structure
start=structure.last
if channelw is None: channelw=s.defaults['channelw']
if radius is None: radius=s.defaults['radius']
#calculate vertical segment length:
#total length=number of 180 degree arcs + number of vertical segs + vertical radius spacers
#total_length=(1+num_wiggles)*(pi*radius)+2*num_wiggles*vlength+2*(num_wiggles-1)*radius
vlength=(total_length-((1+num_wiggles)*(pi*radius)+2*(num_wiggles-1)*radius))/(2*num_wiggles)
if vlength<0: print("Warning: length of vertical segments is less than 0, increase total_length or decrease num_wiggles")
if start_up: asign=1
else: asign=-1
if endbending1:
ChannelBend(s,asign*90,channelw,radius)
for ii in range(num_wiggles):
isign=2*(ii%2)-1
if inverted:
isign=-(2*(ii%2)-1)
Channel(s,vlength,channelw)
ChannelBend(s,isign*asign*180,channelw,radius)
Channel(s,vlength,channelw)
if ii<num_wiggles-1:
Channel(s,2*radius,channelw)
if endbending2:
ChannelBend(s,-isign*asign*90,channelw,radius)
#-------------------------------------------------------------------------------------------------
class ChannelTee(Structure):
"""ChannelTee makes a Tee structure with padding"""
def __init__(self,structure,stub_length=None,feed_length=None,flipped=False,channelw=None):
"""
stub_length is from center
flipped determines whether stub is on left or right of wrt current direction
pinw/gapw are the usual for the stub
spinw/sgapw are the usual for the continuing part
"""
s=structure
if channelw is None: channelw=s.defaults['channelw']
#minimum feed_length is
if (feed_length is None) or (feed_length < channelw):
feed_length=channelw
#minimum stub_length is
if (stub_length is None) or (stub_length < channelw):
stub_length=channelw
#print "pinw: %f, gapw: %f, spinw: %f, sgapw: %f" % (pinw,gapw,spinw,sgapw)
start=s.last
start_dir=s.last_direction
if flipped:
lstart_dir=start_dir-90
angle=start_dir+180
else:
lstart_dir=start_dir+90
angle=start_dir
#feed_line
pts1=[ (-feed_length/2.,-channelw/2.), (-feed_length/2.,channelw/2.), (feed_length/2.,channelw/2.),(feed_length/2.,-channelw/2.), (-feed_length/2.,-channelw/2.)]
#stub
pts2=[ (-channelw/2.,channelw/2),(-channelw/2.,stub_length),(channelw/2.,stub_length),(channelw/2.,channelw/2.),(-channelw/2.,channelw/2.) ]
shapes=[pts1,pts2]
center=orient_pt((feed_length/2.,0),s.last_direction,s.last)
for pts in shapes:
pts=orient_pts(pts,angle,center)
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((feed_length,0),s.last_direction,s.last)
lstart=orient_pt((stub_length,0),lstart_dir,center)
Structure.__init__(self,s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
self.defaults['channelw']=channelw
#----------------------------------------------------------------------------------
class CenterPinTee(Structure):
"""CCDChannelTee makes a Tee structure with microchannels attached"""
def __init__(self,structure,stub_length=None,feed_length=None,flipped=False,pinw=None,gapw=None,spinw=None,sgapw=None,notchwidth=10,couplinglength=100,channelwidth=8):
"""
stub_length is from center
flipped determines whether stub is on left or right of wrt current direction
pinw/gapw are the usual for the stub
spinw/sgapw are the usual for the continuing part
"""
s=structure
#print sgapw
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if spinw is None: spinw=s.defaults['pinw']
if sgapw is None: sgapw=s.defaults['gapw']
#print "pinw: %f, gapw: %f, spinw: %f, sgapw: %f" % (pinw,gapw,spinw,sgapw)
#minimum feed_length is
if (feed_length is None) or (feed_length < 2*gapw+pinw):
feed_length=2*gapw+pinw
#minimum stub_length is
if (stub_length is None) or (stub_length < gapw+spinw):
stub_length=gapw+spinw/2
#print "pinw: %f, gapw: %f, spinw: %f, sgapw: %f" % (pinw,gapw,spinw,sgapw)
start=s.last
start_dir=s.last_direction
if flipped:
lstart_dir=start_dir-90
angle=start_dir+180
else:
lstart_dir=start_dir+90
angle=start_dir
#Bottom part of feed_line
pts1=[ (-feed_length/2.,-spinw/2.), (-feed_length/2.,-sgapw-spinw/2.0), (feed_length/2.,-sgapw-spinw/2.0),(feed_length/2.,-spinw/2.), (-feed_length/2.,-spinw/2.)]
#Top of feed_line
pts2=[ (-feed_length/2,spinw/2.), (-pinw/2.-gapw,spinw/2.), (-pinw/2.-gapw,gapw+spinw/2.), (-feed_length/2.,gapw+spinw/2.), (-feed_length/2,spinw/2.) ]
pts3=[ (feed_length/2,spinw/2.), (pinw/2.+gapw,spinw/2.), (pinw/2.+gapw,gapw+spinw/2.), (feed_length/2.,gapw+spinw/2.), (feed_length/2,spinw/2.) ]
#stub
pts4=[ (-pinw/2.,spinw/2.), (-pinw/2.,stub_length), (-pinw/2.-gapw,stub_length), (-pinw/2.-gapw,spinw/2.), (-pinw/2.,spinw/2.) ]
pts5=[ (pinw/2.,spinw/2.), (pinw/2.,stub_length), (pinw/2.+gapw,stub_length), (pinw/2.+gapw,spinw/2.), (pinw/2.,spinw/2.) ]
pts6=[ (-pinw/2.,stub_length), (-pinw/2.,stub_length+couplinglength), (-pinw/2.-notchwidth,stub_length+couplinglength), (-pinw/2.-notchwidth,stub_length), (-pinw/2.,stub_length) ]
pts7=[ (pinw/2.,stub_length), (pinw/2.,stub_length+couplinglength), (pinw/2.+notchwidth,stub_length+couplinglength), (pinw/2.+notchwidth,stub_length), (pinw/2.,stub_length) ]
shapes=[pts1,pts2,pts3,pts4,pts5,pts6,pts7]
center=orient_pt((0,0),s.last_direction,s.last)
for pts in shapes:
pts=orient_pts(pts,angle,center)
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((feed_length,0),s.last_direction,s.last)
lstart=orient_pt((stub_length,0),lstart_dir,center)
Structure.__init__(self,s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
self.defaults['pinw']=pinw
self.defaults['gapw']=gapw
#-------------------------------------------------------------------------------------------------
class CCDChannelTee(Structure):
"""CCDChannelTee makes a tee structure with microchannels attached;
This is the first layer structure, i.e. everything that's connected
to the center pin of the cavity, second layer see below"""
def __init__(self,structure,stub_length=None,feed_length=None,flipped=False,pinw=None,gapw=None,spinw=None,sgapw=None,ccdwidth=100,ccdlength=100,channelwidth=8):
s=structure
#print sgapw
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if spinw is None: spinw=s.defaults['pinw']
if sgapw is None: sgapw=s.defaults['gapw']
#minimum feed_length is
if (feed_length is None) or (feed_length < 2*gapw+pinw):
feed_length=2*gapw+pinw
#minimum stub_length is
if (stub_length is None) or (stub_length < gapw+spinw):
stub_length=gapw+spinw/2
start=s.last
start_dir=s.last_direction
if flipped:
lstart_dir=start_dir-90
angle=start_dir+180
else:
lstart_dir=start_dir+90
angle=start_dir
#Bottom part of feed_line
pts1=[ (-feed_length/2.,-spinw/2.), (-feed_length/2.,-sgapw-spinw/2.0), (feed_length/2.,-sgapw-spinw/2.0),(feed_length/2.,-spinw/2.), (-feed_length/2.,-spinw/2.)]
#Top of feed_line
pts2=[ (-feed_length/2,spinw/2.), (-pinw/2.-gapw,spinw/2.), (-pinw/2.-gapw,gapw+spinw/2.), (-feed_length/2.,gapw+spinw/2.), (-feed_length/2,spinw/2.) ]
pts3=[ (feed_length/2,spinw/2.), (pinw/2.+gapw,spinw/2.), (pinw/2.+gapw,gapw+spinw/2.), (feed_length/2.,gapw+spinw/2.), (feed_length/2,spinw/2.) ]
#stub
pts4=[ (-pinw/2.,spinw/2.), (-pinw/2.,stub_length), (-pinw/2.-gapw,stub_length), (-pinw/2.-gapw,spinw/2.), (-pinw/2.,spinw/2.) ]
pts5=[ (pinw/2.,spinw/2.), (pinw/2.,stub_length), (pinw/2.+gapw,stub_length), (pinw/2.+gapw,spinw/2.), (pinw/2.,spinw/2.) ]
#channels/CCD
pts6=[(-pinw/2.,stub_length),(-pinw/2.,stub_length+gapw),(-pinw/2.-ccdwidth/2.,stub_length+gapw),(-pinw/2.-ccdwidth/2.,stub_length),(-pinw/2.,stub_length)]
pts7=[(pinw/2.,stub_length),(pinw/2.,stub_length+gapw),(pinw/2.+ccdwidth/2.,stub_length+gapw),(pinw/2.+ccdwidth/2.,stub_length),(pinw/2.,stub_length)]
pts8=[(-pinw/2.-ccdwidth/2.+gapw,stub_length+gapw),(-pinw/2.-ccdwidth/2.+gapw,stub_length+gapw+ccdlength-gapw),(-pinw/2.-ccdwidth/2.,stub_length+gapw+ccdlength-gapw),(-pinw/2.-ccdwidth/2.,stub_length+gapw),(-pinw/2.-ccdwidth/2.+gapw,stub_length+gapw)]
pts9=[(pinw/2.+ccdwidth/2.-gapw,stub_length+gapw),(pinw/2.+ccdwidth/2.-gapw,stub_length+gapw+ccdlength-gapw),(pinw/2.+ccdwidth/2.,stub_length+gapw+ccdlength-gapw),(pinw/2.+ccdwidth/2.,stub_length+gapw),(pinw/2.+ccdwidth/2.-gapw,stub_length+gapw)]
pts10=[(-pinw/2.,stub_length+ccdlength),(-pinw/2.,stub_length+gapw+ccdlength),(-pinw/2.-ccdwidth/2.,stub_length+gapw+ccdlength),(-pinw/2.-ccdwidth/2.,stub_length+ccdlength),(-pinw/2.,stub_length+ccdlength)]
pts11=[(pinw/2.,stub_length+ccdlength),(pinw/2.,stub_length+gapw+ccdlength),(pinw/2.+ccdwidth/2.,stub_length+gapw+ccdlength),(pinw/2.+ccdwidth/2.,stub_length+ccdlength),(pinw/2.,stub_length+ccdlength)]
shapes=[pts1,pts2,pts3,pts4,pts5,pts6,pts7,pts8,pts9,pts10,pts11]
numberofchannels=(ccdwidth-2*gapw+pinw-channelwidth)/(2*channelwidth)
numberofchannels=int(round(float(numberofchannels)))
totalchannelwidth=(2*numberofchannels-1)*channelwidth
padding=((ccdwidth+pinw-2*gapw)-totalchannelwidth)/2.
innerwidthstart=-pinw/2.-ccdwidth/2.+2*channelwidth+gapw #inner width of structure measured from left
self.numberofchannels=numberofchannels
self.channelwidth=channelwidth
for j in range(numberofchannels):
pts_temp=[(innerwidthstart+channelwidth+padding,stub_length+gapw+channelwidth),
(innerwidthstart+channelwidth+padding,stub_length+gapw+ccdlength-2*channelwidth-gapw),
(innerwidthstart+padding,stub_length+gapw+ccdlength-2*channelwidth-gapw),
(innerwidthstart+padding,stub_length+gapw+channelwidth),
(innerwidthstart+channelwidth+padding,stub_length+gapw+channelwidth)]
pts_temp=translate_pts(pts_temp,((j-1)*2*channelwidth,0))
shapes.append(pts_temp)
pts12=[(-innerwidthstart-padding+2*channelwidth,stub_length+gapw+ccdlength-2*channelwidth-gapw),
(-innerwidthstart-padding+2*channelwidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+channelwidth),
(innerwidthstart+padding-2*channelwidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+channelwidth),
(innerwidthstart+padding-2*channelwidth,stub_length+gapw+ccdlength-2*channelwidth-gapw),
(-innerwidthstart-padding+2*channelwidth,stub_length+gapw+ccdlength-2*channelwidth-gapw)]
shapes.append(pts12)
center=orient_pt((0,0),s.last_direction,s.last)
for pts in shapes:
pts=orient_pts(pts,angle,center)
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((feed_length,0),s.last_direction,s.last)
lstart=orient_pt((stub_length,0),lstart_dir,center)
Structure.__init__(self,s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
self.defaults['pinw']=pinw
self.defaults['gapw']=gapw
#-------------------------------------------------------------------------------------------------
class CCDChannelTeeL2(Structure):
"""CCDChannelTee makes a tee structure with microchannels attached
this is the second layer for the thin electrodes"""
def __init__(self,structure,stub_length=None,feed_length=None,flipped=False,pinw=None,gapw=None,spinw=None,sgapw=None,ccdwidth=100,ccdlength=100,channelwidth=8,electrodewidth=3):
"""
stub_length is from center
flipped determines whether stub is on left or right of wrt current direction
pinw/gapw are the usual for the stub
spinw/sgapw are the usual for the continuing part
"""
s=structure
#print sgapw
if pinw is None: pinw=s.defaults['pinw']
if gapw is None: gapw=s.defaults['gapw']
if spinw is None: spinw=s.defaults['pinw']
if sgapw is None: sgapw=s.defaults['gapw']
#print "pinw: %f, gapw: %f, spinw: %f, sgapw: %f" % (pinw,gapw,spinw,sgapw)
#minimum feed_length is
if (feed_length is None) or (feed_length < 2*gapw+pinw):
feed_length=2*gapw+pinw
#minimum stub_length is
if (stub_length is None) or (stub_length < gapw+spinw):
stub_length=gapw+spinw/2
#print "pinw: %f, gapw: %f, spinw: %f, sgapw: %f" % (pinw,gapw,spinw,sgapw)
start=s.last
start_dir=s.last_direction
if flipped:
lstart_dir=start_dir-90
angle=start_dir+180
else:
lstart_dir=start_dir
angle=start_dir
#useful definitions
numberofchannels=(ccdwidth-2*gapw+pinw-channelwidth)/(2*channelwidth)
numberofchannels=int(round(float(numberofchannels)))
totalchannelwidth=(2*numberofchannels-1)*channelwidth
padding=((ccdwidth+pinw-2*gapw)-totalchannelwidth)/2.
innerwidthstart=-pinw/2.-ccdwidth/2.+2*channelwidth+gapw #inner width of structure measured from left
self.numberofchannels=numberofchannels
self.channelwidth=channelwidth
shapes=[]
#make the fingers
for j in range(numberofchannels):
pts_temp=[(innerwidthstart+channelwidth+padding-electrodewidth,stub_length+gapw+channelwidth+electrodewidth),
(innerwidthstart+channelwidth+padding-electrodewidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+electrodewidth),
(innerwidthstart+padding+electrodewidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+electrodewidth),
(innerwidthstart+padding+electrodewidth,stub_length+gapw+channelwidth+electrodewidth),
(innerwidthstart+channelwidth+padding-electrodewidth,stub_length+gapw+channelwidth+electrodewidth)]
pts_temp=translate_pts(pts_temp,((j-1)*2*channelwidth,0))
shapes.append(pts_temp)
pts1=[(-innerwidthstart+2*channelwidth-padding-electrodewidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+electrodewidth),
(-innerwidthstart+2*channelwidth-padding-electrodewidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+channelwidth-electrodewidth),
(innerwidthstart-2*channelwidth+padding+electrodewidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+channelwidth-electrodewidth),
(innerwidthstart-2*channelwidth+padding+electrodewidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+electrodewidth),
(-innerwidthstart+2*channelwidth-padding-electrodewidth,stub_length+gapw+ccdlength-2*channelwidth-gapw+electrodewidth)]
shapes.append(pts1)
center=orient_pt((0,0),s.last_direction,s.last)
for pts in shapes:
pts=orient_pts(pts,angle,center)
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((feed_length,0),s.last_direction,s.last)
lstart=orient_pt((stub_length,0),lstart_dir,center)
Structure.__init__(self,s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
self.defaults['pinw']=pinw
self.defaults['gapw']=gapw
#-------------------------------------------------------------------------------------------------
class ChannelReservoirL1(Structure):
"""ChannelReservoir - first layer
width=total width of reservoir
length=total length of reservoir
channelw=width of individual channels"""
def __init__(self,structure,flipped=False,width=100,length=100,channelw=8):
s=structure
start=s.last
start_dir=s.last_direction
if flipped:
lstart_dir=start_dir-90
angle=start_dir+180
else:
lstart_dir=start_dir+90
angle=start_dir
#note: numberofchannels is twice the true number of channels since
#it also contains the spacing between the channels
numberofchannels=length/(2*channelw)
numberofchannels=int(round(float(numberofchannels)))
length=numberofchannels*2*channelw-channelw
self.numberofchannels=numberofchannels
leftchannel=[(-width/2.,0),(-channelw/2.,0),(-channelw/2.,channelw),(-width/2.,channelw),(-width/2.,0)]
rightchannel=[(width/2.,0),(channelw/2.,0),(channelw/2.,channelw),(width/2.,channelw),(width/2.,0)]
# add the first channels on lhs and rhs side of center
shapes=[leftchannel,rightchannel]
# add the other channels by translation
for j in range(1,numberofchannels):
pts_lhs=translate_pts(leftchannel,(0,j*2*channelw))
pts_rhs=translate_pts(rightchannel,(0,j*2*channelw))
shapes.append(pts_lhs)
shapes.append(pts_rhs)
centerbox=[(-channelw/2,0),(channelw/2.,0),(channelw/2.,length),(-channelw/2.,length),(-channelw/2.,0)]
shapes.append(centerbox)
center=orient_pt((0,0),s.last_direction,s.last)
for pts in shapes:
pts=orient_pts(pts,angle,center)
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((0,length),s.last_direction,s.last)
lstart=orient_pt((0,0),lstart_dir,center)
Structure.__init__(self,s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
#-------------------------------------------------------------------------------------------------
class ChannelReservoirL2(Structure):
"""ChannelReservoir - second layer
width=total width of reservoir
length=total length of reservoir
channelw=width of individual channels"""
def __init__(self,structure,flipped=False,width=100,length=100,channelw=8,electrodewidth=2):
s=structure
start=s.last
start_dir=s.last_direction
if flipped:
lstart_dir=start_dir-90
angle=start_dir+180
else:
lstart_dir=start_dir+90
angle=start_dir
#note: numberofchannels is twice the true number of channels since
#it also contains the spacing between the channels
numberofchannels=length/(2*channelw)
numberofchannels=int(round(float(numberofchannels)))
length=numberofchannels*2*channelw-channelw
self.numberofchannels=numberofchannels
delta=(channelw-electrodewidth)/2.
leftchannel=[(-width/2.+delta,delta),(-channelw/2.+delta,delta),(-channelw/2.+delta,delta+electrodewidth),(-width/2.+delta,delta+electrodewidth),(-width/2.+delta,delta)]
rightchannel=[(width/2.-delta,delta),(channelw/2.-delta,delta),(channelw/2.-delta,delta+electrodewidth),(width/2.-delta,delta+electrodewidth),(width/2.-delta,delta)]
# add the first channels on lhs and rhs side of center
shapes=[leftchannel,rightchannel]
# add the other channels by translation
for j in range(1,numberofchannels):
pts_lhs=translate_pts(leftchannel,(0,j*2*channelw))
pts_rhs=translate_pts(rightchannel,(0,j*2*channelw))
shapes.append(pts_lhs)
shapes.append(pts_rhs)
centerbox=[(-electrodewidth/2,0),(electrodewidth/2.,0),(electrodewidth/2.,length),(-electrodewidth/2.,length),(-electrodewidth/2.,0)]
shapes.append(centerbox)
center=orient_pt((0,0),s.last_direction,s.last)
for pts in shapes:
pts=orient_pts(pts,angle,center)
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((0,length),s.last_direction,s.last)
lstart=orient_pt((0,0),lstart_dir,center)
Structure.__init__(self,s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
#-------------------------------------------------------------------------------------------------
class ChannelFingerCap:
"""A Channel finger capacitor"""
def __init__(self,num_fingers,finger_length,finger_width,finger_gap,taper_length=10,channelw=2,capacitance=0.0):
self.type='Channel finger cap'
self.capacitance=capacitance #simulated capacitance
self.num_fingers=num_fingers #number of fingers
if num_fingers<2:
raise MaskError("ChannelFingerCap must have at least 2 fingers!")
self.finger_length=finger_length #length of fingers
self.finger_width=finger_width #width of each finger
self.finger_gap=finger_gap
self.pinw = num_fingers*finger_width+ (num_fingers-1)*finger_gap #effective center pin width sum of finger gaps and widths
self.length=finger_length+finger_gap
self.taper_length=taper_length
self.gapw=channelw
def description(self):
return "type:\t%s\tAssumed Capacitance:\t%f\t# of fingers:\t%d\tFinger Length:\t%f\tFinger Width:\t%f\tFinger Gap:\t%f\tTotal Pin Width:\t%f\tTaper Length:\t%f" % (
self.type,self.capacitance*1e15,self.num_fingers,self.finger_length,self.finger_width,self.finger_gap,self.pinw,self.taper_length
)
def draw(self,structure):
s=structure
pinw=self.pinw
ChannelLinearTaper(s,length=self.taper_length,start_channelw=self.gapw,stop_channelw=self.pinw)
start=s.last
center_width=self.num_fingers*self.finger_width+ (self.num_fingers-1)*self.finger_gap
length=self.finger_length+self.finger_gap
#draw finger gaps
pts1=self.left_finger_points(self.finger_width,self.finger_length,self.finger_gap)
pts1=translate_pts(pts1,start)
pts1=rotate_pts(pts1,s.last_direction,start)
#pts1=translate_pts(pts1,(self.finger_length+self.finger_gap,0))
s.append(sdxf.PolyLine(pts1))
pts2=self.right_finger_points(self.finger_width,self.finger_length,self.finger_gap)
pts2=translate_pts(pts2,start)
pts2=rotate_pts(pts2,s.last_direction,start)
#pts2=translate_pts(pts2,(self.finger_length+self.finger_gap,0))
s.append(sdxf.PolyLine(pts2))
stop=rotate_pt((start[0]+self.finger_length+self.finger_gap,start[1]),s.last_direction,start)
s.last=stop
ChannelLinearTaper(s,length=self.taper_length,start_channelw=self.pinw,stop_channelw=self.gapw+2.5)
def left_finger_points(self,finger_width,finger_length,finger_gap):
pts= [ (0,self.pinw/2.),
(finger_length,self.pinw/2.),
(finger_length,self.pinw/2.-finger_width),
(0,self.pinw/2.-finger_width),
(0,self.pinw/2.)
]
return pts
def right_finger_points(self,finger_width,finger_length,finger_gap):
pts= [ (finger_gap,-self.pinw/2.),
(finger_gap+finger_length,-self.pinw/2.),
(finger_gap+finger_length,-self.pinw/2.+finger_width),
(finger_gap,-self.pinw/2.+finger_width),
(finger_gap,-self.pinw/2.)
]
return pts
def ext_Q(self,frequency,impedance=50,resonator_type=0.5):
if self.capacitance==0:
return 0
frequency=frequency*1e9
q=2.*pi*frequency*self.capacitance*impedance
Q=0
if q!=0:
Q=1/(resonator_type*pi) *1/ (q**2)
return Q
#-------------------------------------------------------------------------------------------------
class ForkCoupler(Structure):
"""makes a fork-shaped structure of electrodes
fork_width is the total width of the fork"""
def __init__(self,structure,fork_width=None,fork_length=None,flipped=False,finger_width=None,channelw=None):
"""
"""
s=structure
start=s.last
start_dir=s.last_direction
if channelw is None: channelw=s.defaults['channelw']
#minimum fork_width is
if (fork_width is None) or (fork_width < channelw):
fork_width=channelw
if (fork_length is None) or (fork_length < channelw):
fork_length=channelw
if finger_width is None:
finger_width=channelw/2.
if flipped:
lstart_dir=start_dir-90
angle=start_dir+180
else:
lstart_dir=start_dir
angle=start_dir-90
#fork vertical
pts1=[ (-fork_width/2.,0), (-fork_width/2.,finger_width), (fork_width/2.,finger_width),(fork_width/2.,0), (-fork_width/2.,0)]
#fork finger one
pts2=[ (-fork_width/2.,finger_width),(-fork_width/2.,finger_width+fork_length),(-fork_width/2.+finger_width,finger_width+fork_length),(-fork_width/2.+finger_width,finger_width),(-fork_width/2.,finger_width)]
#fork finger two
pts3=[ (fork_width/2.,finger_width),(fork_width/2.,finger_width+fork_length),(fork_width/2.-finger_width,finger_width+fork_length),(fork_width/2.-finger_width,finger_width),(fork_width/2.,finger_width)]
shapes=[pts1,pts2,pts3]
center=orient_pt((0,0),s.last_direction,s.last)
for pts in shapes:
pts=orient_pts(pts,angle,center)
s.append(sdxf.PolyLine(pts))
s.last=orient_pt((fork_length,0),s.last_direction,s.last)
lstart=orient_pt((0,0),lstart_dir,center)
Structure.__init__(self,s.chip,start=lstart,direction=lstart_dir,layer=s.layer,color=s.color,defaults=s.defaults)
#s.last=orient_pt((0,0),s.last_direction,s.last)
#lstart=orient_pt((0,0),s.last_direction,s.last)
#Structure.__init__(self,s.chip,start=lstart,direction=0,layer=s.layer,color=s.color,defaults=s.defaults)
#self.defaults['channelw']=channelw
#=======================================================================
# MISC COMPONENTS/CLASSES
#=======================================================================
class CapDesc:
"""Description of a capacitor, including physical geometry and simulated capacitance
valid types are ('gap','finger','L')
!deprecated!CPWLinearTaper
"""
def __init__(self,capacitance,cap_gap,gapw,num_fingers=0,finger_length=0,finger_width=0,type='gap'):
self.capacitance=capacitance #simulated capacitance
self.num_fingers=num_fingers #number of fingers (0 means gap cap)
self.finger_length=finger_length #length of fingers
self.finger_width=finger_width #width of each finger
self.cap_gap = cap_gap #gap between fingers or center pins
self.finger_gap=cap_gap #for convenience set this to finger_gap
self.gapw = gapw #gap between "center pin" and gnd planes
self.pinw = num_fingers*finger_width+ (num_fingers-1)*cap_gap #effective center pin width sum of finger gaps and widths
def draw_cap(self,structure):
if self.num_fingers>0:
CPWFingerCap(structure,self.num_fingers,self.finger_length,self.finger_width,self.cap_gap,self.gapw)
else:
CPWGapCap(structure,self.cap_gap)
class AlphaNum:
"""A polyline representation of an alphanumeric character, does not use structures"""
def __init__(self,drawing,letter,size,point,direction=0):
if (letter=='') or (letter==' '):
return
#s=structure
scaled_size=(size[0]/16.,size[1]/16.)
for pts in alphanum_dict[letter.lower()]:
mpts = scale_pts(pts,scaled_size)
mpts = orient_pts(mpts,direction,point)
drawing.append(sdxf.PolyLine(mpts))
#s.last=orient_pt( (size[0],0),s.last_direction,s.last)
class AlphaNumText:
"""Renders a text string in polylines, does not use structures"""
def __init__(self,drawing,text,size,point,centered=False,direction=0):
self.text=text
if text is None:
return
if centered:
offset=(-size[0]*text.__len__()/2.,0)
point=orient_pt(offset,direction,point)
for letter in text:
AlphaNum(drawing,letter,size,point,direction)
point=orient_pt( (size[0],0),direction,point)
class AlignmentCross:
def __init__(self,drawing,linewidth,size,point):
lw=linewidth/2.
w=size[0]/2.
h=size[1]/2.
pts=[ (-lw,-h), (lw,-h), (lw,-lw),(w,-lw),(w,lw),(lw,lw),(lw,h),(-lw,h),(-lw,lw),(-w,lw),(-w,-lw),(-lw,-lw),(-lw,-h)]
pts=translate_pts(pts,point)
drawing.append(sdxf.PolyLine(pts))
def arc_pts(start_angle,stop_angle,radius,segments=360):
pts=[]
for ii in range(segments):
theta=(start_angle+ii/(segments-1.)*(stop_angle-start_angle))*pi/180.
p=(radius*cos(theta),radius*sin(theta))
pts.append(p)
return pts
class fluxWebBlock(sdxf.Block):
"""fluxWebBlock is block that will be tiled to
create the flux webbing
"""
def __init__(self,name,holeL=5.,period=10.,chipSize=(7000.,2000.)):
self.name=name
self.holeL=holeL
self.period=period
self.chipSize=chipSize
self.cols=int(floor(chipSize[0]/period))
self.rows=int(floor(chipSize[1]/period))
self.layer='fluxweb'
self.color=4
self.base=(0,0)
sdxf.Block.__init__(self,self.name,self.layer,self.base)
offset = (period-holeL)/2.
holePoints =[ (offset,offset),
(offset+holeL,offset),
(offset+holeL,offset+holeL),
(offset,offset+holeL),
(offset,offset),
]
self.append(sdxf.PolyLine(holePoints,layer=self.layer,color=self.color))
class QuarterMask(sdxf.Drawing):
"""Mask class for placing chips on a 1"x1" sapphire quarter.
"""
def __init__(self,name,chip_size=(7000.,2000.),dicing_border=350,cols=3,rows=10,labelToggle=True):
sdxf.Drawing.__init__(self)
self.name=name
self.fileName=name+".dxf"
self.chip_size=chip_size
self.dicing_border=dicing_border
self.cols=cols
self.rows=rows
self.labelToggle = labelToggle
#Creates Border Box
patternW = cols*chip_size[0]+(cols+1)*dicing_border
patternH = rows*chip_size[1]+(rows+1)*dicing_border
borderPadding = 5000.
border=Structure(self,start=(0,0),color=3,layer="border")
box=[ (0-borderPadding,0-borderPadding),
(patternW+borderPadding,0-borderPadding),
(patternW+borderPadding,patternH+borderPadding),
(0-borderPadding,patternH+borderPadding),
(0-borderPadding,0-borderPadding)
]
border.append(sdxf.PolyLine(box,layer=border.layer,color=border.color))
#Creates list of chip insert locations
chip_points=[]
for ii in range(rows):
for jj in range(cols):
x=jj*(chip_size[0]+dicing_border)+dicing_border
y=ii*(chip_size[1]+dicing_border)+dicing_border
pt = (x,y)
chip_points.append(pt)
self.chip_points=chip_points
self.chip_slots=chip_points.__len__()
self.current_point=0
self.manifest=[]
self.num_chips=0
def add_chip(self,chip,copies=1):
"""Adds chip design 'copies' times into mask. chip must have a unique name as it will be inserted as a block"""
#generate flux web block definition
if chip.makeWeb:
flux=fluxWebBlock(chip.name+'WEB',holeL=chip.fluxHoleLength,period=chip.fluxPeriod,chipSize=chip.size)
self.blocks.append(flux)
#add blocks to drawing
self.blocks.append(chip)
slots_remaining=self.chip_points.__len__()-self.current_point
for ii in range (copies):
if self.current_point>= self.chip_points.__len__():
raise MaskError("MaskError: Cannot add %d copies of chip '%s' Only %d slots on mask and %d remaining." % (copies,chip.name,self.chip_points.__len__(),slots_remaining))
p=self.chip_points[self.current_point]
self.current_point+=1
self.append(sdxf.Insert(chip.name,point=p))
if chip.makeWeb: self.append(sdxf.Insert(flux.name,point=p,cols=flux.cols,colspacing=flux.period,rows=flux.rows,rowspacing=flux.period))
if self.labelToggle: chip.label_chip(self,maskid=self.name,chipid=chip.name+str(ii+1),offset=p)
self.num_chips+=1
#self.manifest.append({'chip':chip,'name':chip.name,'copies':copies,'short_desc':chip.short_description(),'long_desc':chip.long_description()})
#print "%s\t%d\t%s" % (chip.name,copies,chip.short_description())
chip.save(fname=self.name+"-"+chip.name,maskid=self.name,chipid=chip.name)
def randomize_layout(self):
"""Shuffle the order of the chip_points array so that chips will be inserted (pseudo-)randomly"""
seed=124279234
for ii in range(10000):
i1=randrange(self.chip_points.__len__())
i2=randrange(self.chip_points.__len__())
tp=self.chip_points[i1]
self.chip_points[i1]=self.chip_points[i2]
self.chip_points[i2]=tp
"""
Updated functions to create various structures with advance protection options
they'll have the same name except with a p in front
"""
class pDrawBondPad:
def __init__(self,drawing,pos,Ang,bond_pad_length=None,launcher_pinw=None,launcher_gapw=None,taper_length=None, pinw=None, gapw=None):
"""
Created on 08/09/2011
@author: Brendon Rose
Script appends a BondPad on drawing and position pos and Angle Ang relative to the positive x-axis CCW is positive
"""
"Set Self-attributes"
#Launcher parameters set to default if nothing was input
if bond_pad_length == None: bond_pad_length = 400.
if launcher_pinw == None: launcher_pinw = 150.
if launcher_gapw == None: launcher_gapw = 67.305
if taper_length == None: taper_length = 300.
#if launcher_padding == None: launcher_padding = 350.
#if launcher_radius == None: launcher_radius = 125.
if pinw == None: pinw = drawing.defaults['pinw']
if gapw == None: gapw = drawing.defaults['gapw']
s = drawing #define structure for writting bond pad to
s.last = pos #Position to put bond pad
s.last_direction = Ang #Angle to put bond pad
#launcher_length=taper_length+bond_pad_length+launcher_padding
"Draw the BondPad and a curly wire to offset launcher"
pCPWStraight(s,length=bond_pad_length,pinw=launcher_pinw,gapw=launcher_gapw)
pCPWLinearTaper(s,length=taper_length,start_pinw=launcher_pinw,start_gapw=launcher_gapw,stop_pinw=pinw,stop_gapw=gapw)
class pCPWStraight:
"""A straight section of CPW transmission line"""
def __init__(self, structure,length,pinw=None,gapw=None,protect=None,centerPinHoleWidth=None):
""" Adds a straight section of CPW transmission line of length = length to the structure"""
if length==0: return
s=structure
start=structure.last
if pinw is None: pinw=structure.defaults['pinw']
if gapw is None: gapw=structure.defaults['gapw']
if protect is None: protect=structure.defaults['protect']
if centerPinHoleWidth is None: centerPinHoleWidth=structure.defaults['centerPinHoleWidth']
gap1=[ (start[0],start[1]+pinw/2),
(start[0]+length,start[1]+pinw/2),
(start[0]+length,start[1]+pinw/2+gapw),
(start[0],start[1]+pinw/2+gapw),
(start[0],start[1]+pinw/2)
]
gap2=[ (start[0],start[1]-pinw/2),
(start[0]+length,start[1]-pinw/2),
(start[0]+length,start[1]-pinw/2-gapw),
(start[0],start[1]-pinw/2-gapw),
(start[0],start[1]-pinw/2)
]
gap1=rotate_pts(gap1,s.last_direction,start)
gap2=rotate_pts(gap2,s.last_direction,start)
stop=rotate_pt((start[0]+length,start[1]),s.last_direction,start)
s.last=stop
s.append(sdxf.PolyLine(gap1))
s.append(sdxf.PolyLine(gap2))
"""adding code to create protect box"""
prow = pinw+2*gapw+2*protect
pro_inner = pinw/2-centerPinHoleWidth
pro1=[ (start[0],start[1]+pro_inner),
(start[0]+length,start[1]+pro_inner),
(start[0]+length,start[1]+prow/2),
(start[0],start[1]+prow/2),
(start[0],start[1]+pro_inner)
]
pro2=[ (start[0],start[1]-pro_inner),
(start[0]+length,start[1]-pro_inner),
(start[0]+length,start[1]-prow/2),
(start[0],start[1]-prow/2),
(start[0],start[1]-pro_inner)
]
pro1=rotate_pts(pro1,s.last_direction,start)
pro2=rotate_pts(pro2,s.last_direction,start)
s.append(sdxf.PolyLine(pro1,layer="ProtectLayer"))
s.append(sdxf.PolyLine(pro2,layer="ProtectLayer"))
class pCPWLinearTaper:
"""A section of CPW which (linearly) tapers from one set of start_pinw and start_gapw to stop_pinw and stop_gapw over length=length"""
def __init__(self, structure,length,start_pinw,stop_pinw,start_gapw,stop_gapw,protect=None,centerPinHoleWidth=None):
if length==0: return
if protect is None: protect=structure.defaults['protect']
if centerPinHoleWidth is None: centerPinHoleWidth=structure.defaults['centerPinHoleWidth']
#load attributes
s=structure
start=s.last
#define geometry of gaps
gap1= [
(start[0],start[1]+start_pinw/2),
(start[0]+length,start[1]+stop_pinw/2),
(start[0]+length,start[1]+stop_pinw/2+stop_gapw),
(start[0],start[1]+start_pinw/2+start_gapw),
(start[0],start[1]+start_pinw/2)
]
gap2= [
(start[0],start[1]-start_pinw/2),
(start[0]+length,start[1]-stop_pinw/2),
(start[0]+length,start[1]-stop_pinw/2-stop_gapw),
(start[0],start[1]-start_pinw/2-start_gapw),
(start[0],start[1]-start_pinw/2)
]
#rotate structure to proper orientation
gap1=rotate_pts(gap1,s.last_direction,start)
gap2=rotate_pts(gap2,s.last_direction,start)
#create polylines and append to drawing
s.append(sdxf.PolyLine(gap1))
s.append(sdxf.PolyLine(gap2))
#update last anchor position
stop=rotate_pt((start[0]+length,start[1]),s.last_direction,start)
s.last=stop
"""adding code to create protect box"""
start_prow = start_pinw+2*start_gapw+2*protect
start_pro_inner = start_pinw/2-centerPinHoleWidth
stop_prow = stop_pinw+2*stop_gapw+2*protect
stop_pro_inner = stop_pinw/2-centerPinHoleWidth
pro1=[ (start[0],start[1]+start_pro_inner),
(start[0]+length,start[1]+stop_pro_inner),
(start[0]+length,start[1]+stop_prow/2),
(start[0],start[1]+start_prow/2),
(start[0],start[1]+start_pro_inner)
]
pro2=[ (start[0],start[1]-start_pro_inner),
(start[0]+length,start[1]-stop_pro_inner),
(start[0]+length,start[1]-stop_prow/2),
(start[0],start[1]-start_prow/2),
(start[0],start[1]-start_pro_inner)
]
pro1=rotate_pts(pro1,s.last_direction,start)
pro2=rotate_pts(pro2,s.last_direction,start)
s.append(sdxf.PolyLine(pro1,layer="ProtectLayer"))
s.append(sdxf.PolyLine(pro2,layer="ProtectLayer")) | [
"maritter@umd.edu"
] | maritter@umd.edu |
aced241806907aec705128d3774a0a81da9b26ed | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5706278382862336_0/Python/neilw4/base.py | 6cd2e86d9431e61edda3533f65f23cfb2d36240a | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,035 | py | #!/usr/bin/python
import sys
def memo(f):
cache = {}
def memf(*x):
if not x in cache:
cache[x] = f(*x)
return cache[x]
return memf
def memo(*x):
if not x in cache:
cache[x] = f(*x)
return cache[x]
return memf
def valid(p, q, g):
return (p * (2**g)) % q == 0
def solve(l):
l = l.split('/')
p = int(l[0])
q = int(l[1])
g = 40
if not valid(p, q, g):
return "impossible"
for i in xrange(0, g):
if p * (2**i) >= q:
return i
#needs an input file
infname = sys.argv[1]
inf = open(infname)
#assumes infname ends with .in
outfname = infname[:-3] + ".out"
#output file can be specified separately
if len(sys.argv) > 2:
outfname = sys.argv[2]
outf = open(outfname, "w")
case = 1
#ignore 1st line
inf.readline()
while True:
line = inf.readline()
if line == '':
break
sol = "Case #" + str(case) + ": " + str(solve(line.strip()))
print sol
outf.write(sol + "\n")
case += 1
| [
"eewestman@gmail.com"
] | eewestman@gmail.com |
9082848ae2d0cc2948f499a7e0d5ab47e3aea76a | 7109eecfb78e0123b534ef960dbf42be38e49514 | /x7-src/engine/engine/db/__init__.py | 092a2b6c0406d609cd15150f7c8c97faf8669621 | [
"Apache-2.0"
] | permissive | wendy-king/x7_compute_venv | a6eadd9a06717090acea3312feebcbc9d3925e88 | 12d74f15147868463954ebd4a8e66d5428b6f56d | refs/heads/master | 2016-09-06T16:58:13.897069 | 2012-01-31T01:26:27 | 2012-01-31T01:26:27 | 3,310,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 883 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DB abstraction for Engine
"""
from engine.db.api import *
| [
"king_wendy@sina.com"
] | king_wendy@sina.com |
012bb09a813815387f8b28d569baf76eef101074 | 7b983f76554c0357fa0d6ec4087b953e1357b242 | /venv/bin/Swarm Dock/swarm_dock_get_results.py | 43712dcf7ddc06f100c151bb76fbc3c44b2769c9 | [
"MIT"
] | permissive | lpreuett/ser499_bioinformatics | 15967e3f043a6dabce9a239e7596ef3cfeff8ddf | 93fbed08a49851bb6cc484594fe2180b8a6bce1f | refs/heads/master | 2021-04-30T05:58:39.194963 | 2018-05-02T06:04:30 | 2018-05-02T06:04:30 | 121,423,012 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,885 | py | import urllib, sys, os, tarfile
debug = False
OUTPUT_DIR = './Swarm Dock/output'
RESULTS_FILENAME = 'sds/clusters_standard.txt'
if len(sys.argv) != 4:
print('Usage: swarm_dock_get_results.py <link_to_results> <receptor pdb id> <ligand pdb id>')
sys.exit(1)
# get download_link
download_link = sys.argv[1]
# build download_link to results download
download_link_split = download_link.split('/')
download_link_split[6] = download_link_split[5] + '.tar.gz'
download_link = "/".join(download_link_split)
# store receptor
rec = sys.argv[2]
if ':' in rec:
rec = rec.split(':')[0]
rec = rec.upper()
# store ligand
lig = sys.argv[3]
if ':' in lig:
lig = lig.split(':')[0]
lig = lig.upper()
if debug:
print('download_link: {}'.format(download_link))
urlOpener = urllib.request.URLopener()
# test for existence of pdb dir
if not os.path.isdir(OUTPUT_DIR):
os.makedirs(OUTPUT_DIR)
# name file
out_file_name = rec + '_' + lig + '.tar.gz'
# check for file existence
if not os.path.isfile(OUTPUT_DIR + '/' + out_file_name):
# download file
urlOpener.retrieve(download_link, OUTPUT_DIR + '/' + out_file_name)
# extract .tar.gz
tar = tarfile.open(OUTPUT_DIR + '/' + out_file_name)
results_file = tar.extractfile(RESULTS_FILENAME)
results = None
is_first_line = True
for line in results_file:
if is_first_line:
is_first_line = False
continue
# select first result with 3 or less members
# decode line
decoded_line = line.decode('utf8')
# count number of members
members_start = decoded_line.find('[')
members_finish = decoded_line.find(']')
members = decoded_line[members_start+1:members_finish].split('|')
if len(members) <= 3:
results = decoded_line.split(' ')
results = results[1:3] + results[4:]
results = ' '.join(results)
break
print('results: {}'.format(results)) | [
"lpreuett@asu.edu"
] | lpreuett@asu.edu |
6240e8f84b5a0173d47c8deafca8d65d264171b5 | 324f9b6017a15daf37114061458904b7d9c75e25 | /Homework/Homework7/sudokuHelper.py | 72da0ed5492c6b29e3d79f72d7d22c01d83b1c16 | [] | no_license | deasa/CS_6962 | 5f7fe2db4ad8bde417a83b550aa575b47c4102bc | 6c3b1e9fc320d2b9d2a6d02cafae28bddced5dba | refs/heads/master | 2020-04-02T12:31:15.862163 | 2018-12-05T14:18:47 | 2018-12-05T14:18:47 | 154,437,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,947 | py | def ReadInFile(strpath):
file_object = open(strpath, "r")
l = []
for line in file_object:
for c in line:
if c.isdigit():
l.append(int(c))
return l
def Print(matrix):
i = 1
for v in matrix:
print(v, end=" ")
if i % 9 == 0:
print("\n")
i += 1
def UpdateGridWithNum(matrix, row, column, num):
index = GetIndex(row, column)
if PassesAllSudokuRules(matrix, row, column, num):
matrix[index] = num
def GetIndex(row, col):
return int(row) * 9 + int(col)
def PassesRowRule(matrix, row, num):
startIndex = row * 9
endIndex = row * 9 + 8
for i in range(startIndex, endIndex):
if num == matrix[i]:
return False
return True
def PassesColumnRule(matrix, column, num):
for i in range(column, (column + (9 * 8)), 9):
if num == matrix[i]:
return False
return True
def PassesSubgridRule(matrix, row, column, num):
subGridFirstRow = (row - (row % 3)) if row > 0 else 0
subGridLastRow = (row + (3 - (row % 3))) if row < 8 else 8
subGridFirstCol = (column - (column % 3)) if column > 0 else 0
subGridLastCol = (column + (3 - (column % 3))) if column < 8 else 0
for i in range(subGridFirstRow, subGridLastRow):
for j in range(subGridFirstCol, subGridLastCol):
index = GetIndex(i, j)
if matrix[index] == num:
return False
return True
def PassesAllSudokuRules(matrix, row, column, num):
if not PassesRowRule(matrix, row, num):
print("Number " + str(num) + " twice in row")
return False
elif not PassesColumnRule(matrix, column, num):
print("Number " + str(num) + " twice in column")
return False
elif not PassesSubgridRule(matrix, row, column, num):
print("Number " + str(num) + " twice in subgrid")
return False
return True
| [
"bgeorgeashton@gmail.com"
] | bgeorgeashton@gmail.com |
741589e2c36bb1ecd0e7f7062a505f0e372325fd | cbe24f0473f2b6212dbb165657dff0d99bc4d108 | /buildPythonBindings.py | a947414f8143f2c15c5d3ced5657168bf3892b22 | [
"MIT"
] | permissive | SArehalli/SimpleLED | 4612cd4c35e42f8bc986075424885580b2373a6d | d26d72a289a6b4853acaf5d24167a83ab580db3d | refs/heads/master | 2021-01-10T16:16:00.279724 | 2016-03-22T03:42:03 | 2016-03-22T03:42:03 | 43,258,133 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,745 | py | #! /usr/bin/env python
import sys
from pybindgen import Module, retval, param
def module_gen(name):
mod = Module('display')
mod.add_include('"display.h"')
display = mod.add_class('Display')
display.add_constructor([param('int', 'height'),
param('int', 'width')])
display.add_method('stop', None, [])
display.add_method('getHeight', retval('int'), [])
display.add_method('getWidth', retval('int'), [])
display.add_method('getValue', retval('int'),
[param('int', 'row'),
param('int', 'col')])
display.add_method('setValue', None, [param('int', 'row'),
param('int', 'col'),
param('int', 'color')])
display.add_method('drawRectangle', None, [param('int', 'x'),
param('int', 'y'),
param('int', 'width'),
param('int', 'height'),
param('int', 'color')])
display.add_method('drawTriangle', None, [param('int', 'x'),
param('int', 'y'),
param('int', 'width'),
param('int', 'height'),
param('int', 'color')])
display.add_method('clear', None, [])
mod.add_function('loop', retval('int'), [param('Display *', 'disp',
transfer_ownership=False)])
mod.generate(name)
if __name__ == "__main__":
module_gen()
| [
"sgarehal@ucsd.edu"
] | sgarehal@ucsd.edu |
d8d9bb0d0d9d1b227024baf5aeb248df7c7324a5 | 1c824de3a57e8a62961d1a70e2473cb2d1b8654c | /1.1 Is Unique.py | 3b6585a10fb36959c6706ebab7fbf89f15c663ef | [] | no_license | Lawrencehudson23/ctci-algorithms | 8a1168ce87402d06fbfe264c6a67903e43dc1cdb | b31438402f98c54f426e1ab0a2019829cf1f7472 | refs/heads/master | 2022-05-22T10:01:51.419166 | 2020-04-28T18:42:49 | 2020-04-28T18:42:49 | 259,419,796 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | # 1.1 Is Unique: Implement an algorithm to determine if a string has all unique characters. What if you
# cannot use additional data structures?
# Hints: #44, #117, #132
def isUnique(str):
dic = {}
for i in str:
if i in dic:
return False
if i not in dic:
dic[i] = 1
return True
print(isUnique("abcd"))
| [
"Lawrencehudson23@gmail.com"
] | Lawrencehudson23@gmail.com |
882039e20687f5fc342b590337a912e08d38dc56 | 9a50f51a58e9a21e39e69ae09ddb9425a6bbffe4 | /widgets/PlayerBoxUI.py | e106a09bcdd6494d966f1831fa9e523367f25faf | [] | no_license | qizidog/vehicle_identification | fe5328aa0a10b3850e401ed2daa45bcc16596c0d | 50327c775d1d403675279b3cb89484dcc13782f9 | refs/heads/master | 2021-07-11T09:57:39.988469 | 2020-09-27T08:52:46 | 2020-09-27T08:52:46 | 199,362,628 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,659 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'PlayerBoxUI.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1538, 671)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.label_frameBox = QtWidgets.QLabel(self.centralwidget)
self.label_frameBox.setMinimumSize(QtCore.QSize(1021, 205))
self.label_frameBox.setSizeIncrement(QtCore.QSize(100, 60))
self.label_frameBox.setBaseSize(QtCore.QSize(1021, 205))
self.label_frameBox.setAutoFillBackground(False)
self.label_frameBox.setFrameShape(QtWidgets.QFrame.Box)
self.label_frameBox.setFrameShadow(QtWidgets.QFrame.Plain)
self.label_frameBox.setText("")
self.label_frameBox.setScaledContents(False)
self.label_frameBox.setObjectName("label_frameBox")
self.verticalLayout.addWidget(self.label_frameBox)
self.label2 = QtWidgets.QLabel(self.centralwidget)
self.label2.setMinimumSize(QtCore.QSize(1021, 205))
self.label2.setSizeIncrement(QtCore.QSize(100, 60))
self.label2.setBaseSize(QtCore.QSize(1021, 205))
self.label2.setAutoFillBackground(False)
self.label2.setFrameShape(QtWidgets.QFrame.Box)
self.label2.setFrameShadow(QtWidgets.QFrame.Plain)
self.label2.setText("")
self.label2.setScaledContents(False)
self.label2.setObjectName("label2")
self.verticalLayout.addWidget(self.label2)
self.horizontalLayout_2.addLayout(self.verticalLayout)
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setMaximumSize(QtCore.QSize(605, 16777215))
self.tabWidget.setObjectName("tabWidget")
self.tab_1 = QtWidgets.QWidget()
self.tab_1.setObjectName("tab_1")
self.tableWidget_micro = QtWidgets.QTableWidget(self.tab_1)
self.tableWidget_micro.setGeometry(QtCore.QRect(0, 0, 481, 471))
self.tableWidget_micro.setMinimumSize(QtCore.QSize(431, 0))
self.tableWidget_micro.setObjectName("tableWidget_micro")
self.tableWidget_micro.setColumnCount(3)
self.tableWidget_micro.setRowCount(10)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setVerticalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_micro.setHorizontalHeaderItem(2, item)
self.tabWidget.addTab(self.tab_1, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.tableWidget_macro = QtWidgets.QTableWidget(self.tab_2)
self.tableWidget_macro.setGeometry(QtCore.QRect(0, 0, 481, 471))
self.tableWidget_macro.setMinimumSize(QtCore.QSize(431, 0))
self.tableWidget_macro.setObjectName("tableWidget_macro")
self.tableWidget_macro.setColumnCount(3)
self.tableWidget_macro.setRowCount(8)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_macro.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
self.tableWidget_macro.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget_macro.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
self.tableWidget_macro.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget_macro.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
self.tableWidget_macro.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget_macro.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
self.tableWidget_macro.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget_macro.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
self.tableWidget_macro.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget_macro.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
self.tableWidget_macro.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget_macro.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
self.tableWidget_macro.setItem(6, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget_macro.setItem(6, 1, item)
self.tabWidget.addTab(self.tab_2, "")
self.horizontalLayout_2.addWidget(self.tabWidget)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
self.horizontalLayout.setSpacing(9)
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton_play_pause = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_play_pause.setMinimumSize(QtCore.QSize(25, 25))
self.pushButton_play_pause.setMaximumSize(QtCore.QSize(25, 25))
self.pushButton_play_pause.setFocusPolicy(QtCore.Qt.NoFocus)
self.pushButton_play_pause.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/button/.img/pause.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap(":/button/.img/play.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
icon.addPixmap(QtGui.QPixmap(":/button/.img/pause.png"), QtGui.QIcon.Selected, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap(":/button/.img/play.png"), QtGui.QIcon.Selected, QtGui.QIcon.On)
self.pushButton_play_pause.setIcon(icon)
self.pushButton_play_pause.setCheckable(True)
self.pushButton_play_pause.setChecked(True)
self.pushButton_play_pause.setDefault(False)
self.pushButton_play_pause.setObjectName("pushButton_play_pause")
self.horizontalLayout.addWidget(self.pushButton_play_pause)
self.horizontalSlider = QtWidgets.QSlider(self.centralwidget)
self.horizontalSlider.setMaximumSize(QtCore.QSize(16777215, 10))
self.horizontalSlider.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider.setObjectName("horizontalSlider")
self.horizontalLayout.addWidget(self.horizontalSlider)
self.label_cur_num = QtWidgets.QLabel(self.centralwidget)
self.label_cur_num.setMaximumSize(QtCore.QSize(16777215, 25))
self.label_cur_num.setObjectName("label_cur_num")
self.horizontalLayout.addWidget(self.label_cur_num)
self.verticalLayout_2.addLayout(self.horizontalLayout)
self.gridLayout.addLayout(self.verticalLayout_2, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1538, 30))
self.menubar.setObjectName("menubar")
self.menu_file = QtWidgets.QMenu(self.menubar)
self.menu_file.setObjectName("menu_file")
self.menu_edit = QtWidgets.QMenu(self.menubar)
self.menu_edit.setObjectName("menu_edit")
self.menu_help = QtWidgets.QMenu(self.menubar)
self.menu_help.setObjectName("menu_help")
self.menu_filter = QtWidgets.QMenu(self.menubar)
self.menu_filter.setObjectName("menu_filter")
self.menu = QtWidgets.QMenu(self.menubar)
self.menu.setObjectName("menu")
MainWindow.setMenuBar(self.menubar)
self.toolBar = QtWidgets.QToolBar(MainWindow)
self.toolBar.setMouseTracking(True)
self.toolBar.setObjectName("toolBar")
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.statusBar = QtWidgets.QStatusBar(MainWindow)
self.statusBar.setObjectName("statusBar")
MainWindow.setStatusBar(self.statusBar)
self.action_open = QtWidgets.QAction(MainWindow)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/action/.img/open.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_open.setIcon(icon1)
self.action_open.setObjectName("action_open")
self.action_exit = QtWidgets.QAction(MainWindow)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/action/.img/exit.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_exit.setIcon(icon2)
self.action_exit.setObjectName("action_exit")
self.action_rotate = QtWidgets.QAction(MainWindow)
self.action_rotate.setCheckable(False)
self.action_rotate.setEnabled(False)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/action/.img/rotate.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_rotate.setIcon(icon3)
self.action_rotate.setObjectName("action_rotate")
self.action_viewCap = QtWidgets.QAction(MainWindow)
self.action_viewCap.setCheckable(False)
self.action_viewCap.setEnabled(False)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(":/action/.img/clip.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_viewCap.setIcon(icon4)
self.action_viewCap.setObjectName("action_viewCap")
self.action_drawbox = QtWidgets.QAction(MainWindow)
self.action_drawbox.setCheckable(False)
self.action_drawbox.setEnabled(False)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(":/action/.img/select.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_drawbox.setIcon(icon5)
self.action_drawbox.setObjectName("action_drawbox")
self.action_save = QtWidgets.QAction(MainWindow)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(":/action/.img/save.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_save.setIcon(icon6)
self.action_save.setObjectName("action_save")
self.action_ColorThreshholder = QtWidgets.QAction(MainWindow)
self.action_ColorThreshholder.setCheckable(True)
self.action_ColorThreshholder.setChecked(True)
self.action_ColorThreshholder.setObjectName("action_ColorThreshholder")
self.action_ContourFilter = QtWidgets.QAction(MainWindow)
self.action_ContourFilter.setCheckable(True)
self.action_ContourFilter.setChecked(True)
self.action_ContourFilter.setObjectName("action_ContourFilter")
self.action_MorphologyEx = QtWidgets.QAction(MainWindow)
self.action_MorphologyEx.setObjectName("action_MorphologyEx")
self.action_Blur = QtWidgets.QAction(MainWindow)
self.action_Blur.setObjectName("action_Blur")
self.action_readConfig = QtWidgets.QAction(MainWindow)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(":/action/.img/readconfig.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_readConfig.setIcon(icon7)
self.action_readConfig.setObjectName("action_readConfig")
self.action_saveConfig = QtWidgets.QAction(MainWindow)
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(":/action/.img/saveconfig.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_saveConfig.setIcon(icon8)
self.action_saveConfig.setObjectName("action_saveConfig")
self.action_getbkg = QtWidgets.QAction(MainWindow)
self.action_getbkg.setObjectName("action_getbkg")
self.action_frameDiff = QtWidgets.QAction(MainWindow)
self.action_frameDiff.setCheckable(True)
self.action_frameDiff.setObjectName("action_frameDiff")
self.action_MOG2 = QtWidgets.QAction(MainWindow)
self.action_MOG2.setCheckable(True)
self.action_MOG2.setObjectName("action_MOG2")
self.action_autoDetect = QtWidgets.QAction(MainWindow)
self.action_autoDetect.setCheckable(True)
self.action_autoDetect.setChecked(True)
self.action_autoDetect.setObjectName("action_autoDetect")
self.action_calib = QtWidgets.QAction(MainWindow)
self.action_calib.setEnabled(False)
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(":/action/.img/ruler.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_calib.setIcon(icon9)
self.action_calib.setObjectName("action_calib")
self.action_trace = QtWidgets.QAction(MainWindow)
self.action_trace.setObjectName("action_trace")
self.action_bkgDiff = QtWidgets.QAction(MainWindow)
self.action_bkgDiff.setCheckable(True)
self.action_bkgDiff.setObjectName("action_bkgDiff")
self.action_help = QtWidgets.QAction(MainWindow)
self.action_help.setObjectName("action_help")
self.action_selectROI = QtWidgets.QAction(MainWindow)
self.action_selectROI.setEnabled(False)
icon10 = QtGui.QIcon()
icon10.addPixmap(QtGui.QPixmap(":/action/.img/roi.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_selectROI.setIcon(icon10)
self.action_selectROI.setObjectName("action_selectROI")
self.action_lanes = QtWidgets.QAction(MainWindow)
self.action_lanes.setEnabled(False)
icon11 = QtGui.QIcon()
icon11.addPixmap(QtGui.QPixmap(":/action/.img/lane.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_lanes.setIcon(icon11)
self.action_lanes.setObjectName("action_lanes")
self.action_morph = QtWidgets.QAction(MainWindow)
self.action_morph.setCheckable(True)
self.action_morph.setObjectName("action_morph")
self.menu_file.addAction(self.action_open)
self.menu_file.addAction(self.action_save)
self.menu_file.addAction(self.action_exit)
self.menu_file.addSeparator()
self.menu_file.addAction(self.action_readConfig)
self.menu_file.addAction(self.action_saveConfig)
self.menu_edit.addAction(self.action_rotate)
self.menu_edit.addAction(self.action_viewCap)
self.menu_edit.addAction(self.action_selectROI)
self.menu_edit.addAction(self.action_calib)
self.menu_edit.addAction(self.action_lanes)
self.menu_edit.addSeparator()
self.menu_edit.addAction(self.action_getbkg)
self.menu_help.addAction(self.action_help)
self.menu_filter.addAction(self.action_morph)
self.menu_filter.addAction(self.action_ContourFilter)
self.menu_filter.addSeparator()
self.menu_filter.addAction(self.action_trace)
self.menu.addAction(self.action_drawbox)
self.menu.addAction(self.action_autoDetect)
self.menu.addAction(self.action_ColorThreshholder)
self.menu.addAction(self.action_frameDiff)
self.menu.addAction(self.action_MOG2)
self.menu.addAction(self.action_bkgDiff)
self.menubar.addAction(self.menu_file.menuAction())
self.menubar.addAction(self.menu_edit.menuAction())
self.menubar.addAction(self.menu.menuAction())
self.menubar.addAction(self.menu_filter.menuAction())
self.menubar.addAction(self.menu_help.menuAction())
self.toolBar.addAction(self.action_open)
self.toolBar.addAction(self.action_save)
self.toolBar.addAction(self.action_exit)
self.toolBar.addSeparator()
self.toolBar.addAction(self.action_rotate)
self.toolBar.addAction(self.action_viewCap)
self.toolBar.addAction(self.action_selectROI)
self.toolBar.addAction(self.action_calib)
self.toolBar.addAction(self.action_lanes)
self.toolBar.addAction(self.action_drawbox)
self.toolBar.addSeparator()
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(1)
self.action_exit.triggered.connect(MainWindow.close)
self.horizontalSlider.valueChanged['int'].connect(self.label_cur_num.setNum)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
item = self.tableWidget_micro.verticalHeaderItem(0)
item.setText(_translate("MainWindow", "1"))
item = self.tableWidget_micro.verticalHeaderItem(1)
item.setText(_translate("MainWindow", "2"))
item = self.tableWidget_micro.verticalHeaderItem(2)
item.setText(_translate("MainWindow", "3"))
item = self.tableWidget_micro.verticalHeaderItem(3)
item.setText(_translate("MainWindow", "4"))
item = self.tableWidget_micro.verticalHeaderItem(4)
item.setText(_translate("MainWindow", "5"))
item = self.tableWidget_micro.verticalHeaderItem(5)
item.setText(_translate("MainWindow", "6"))
item = self.tableWidget_micro.verticalHeaderItem(6)
item.setText(_translate("MainWindow", "7"))
item = self.tableWidget_micro.verticalHeaderItem(7)
item.setText(_translate("MainWindow", "8"))
item = self.tableWidget_micro.verticalHeaderItem(8)
item.setText(_translate("MainWindow", "9"))
item = self.tableWidget_micro.verticalHeaderItem(9)
item.setText(_translate("MainWindow", "10"))
item = self.tableWidget_micro.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "车辆编号"))
item = self.tableWidget_micro.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "位置"))
item = self.tableWidget_micro.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "速度"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_1), _translate("MainWindow", "车辆信息"))
item = self.tableWidget_macro.verticalHeaderItem(0)
item.setText(_translate("MainWindow", "1"))
item = self.tableWidget_macro.verticalHeaderItem(1)
item.setText(_translate("MainWindow", "2"))
item = self.tableWidget_macro.verticalHeaderItem(2)
item.setText(_translate("MainWindow", "3"))
item = self.tableWidget_macro.verticalHeaderItem(3)
item.setText(_translate("MainWindow", "4"))
item = self.tableWidget_macro.verticalHeaderItem(4)
item.setText(_translate("MainWindow", "5"))
item = self.tableWidget_macro.verticalHeaderItem(5)
item.setText(_translate("MainWindow", "6"))
item = self.tableWidget_macro.verticalHeaderItem(6)
item.setText(_translate("MainWindow", "7"))
item = self.tableWidget_macro.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "项目"))
item = self.tableWidget_macro.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "数值"))
__sortingEnabled = self.tableWidget_macro.isSortingEnabled()
self.tableWidget_macro.setSortingEnabled(False)
item = self.tableWidget_macro.item(0, 0)
item.setText(_translate("MainWindow", " 车辆计数"))
item = self.tableWidget_macro.item(1, 0)
item.setText(_translate("MainWindow", " 交通量"))
item = self.tableWidget_macro.item(2, 0)
item.setText(_translate("MainWindow", " 空间平均速度"))
item = self.tableWidget_macro.item(3, 0)
item.setText(_translate("MainWindow", " 时间平均速度"))
item = self.tableWidget_macro.item(4, 0)
item.setText(_translate("MainWindow", " 车流密度"))
item = self.tableWidget_macro.item(5, 0)
item.setText(_translate("MainWindow", " 车头间距"))
item = self.tableWidget_macro.item(6, 0)
item.setText(_translate("MainWindow", " 车头时距"))
self.tableWidget_macro.setSortingEnabled(__sortingEnabled)
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "交通信息"))
self.pushButton_play_pause.setShortcut(_translate("MainWindow", "Space"))
self.label_cur_num.setText(_translate("MainWindow", "frame num"))
self.menu_file.setTitle(_translate("MainWindow", "文件"))
self.menu_edit.setTitle(_translate("MainWindow", "编辑"))
self.menu_help.setTitle(_translate("MainWindow", "帮助"))
self.menu_filter.setTitle(_translate("MainWindow", "调整"))
self.menu.setTitle(_translate("MainWindow", "目标检测"))
self.toolBar.setWindowTitle(_translate("MainWindow", "toolBar"))
self.action_open.setText(_translate("MainWindow", "打开视频"))
self.action_open.setStatusTip(_translate("MainWindow", "选择需要打开的视频文件"))
self.action_exit.setText(_translate("MainWindow", "退出"))
self.action_exit.setStatusTip(_translate("MainWindow", "退出系统"))
self.action_exit.setWhatsThis(_translate("MainWindow", "退出系统"))
self.action_rotate.setText(_translate("MainWindow", "调整画面角度"))
self.action_rotate.setStatusTip(_translate("MainWindow", "绘制一条基准线,自动将基准线调整为水平位置"))
self.action_viewCap.setText(_translate("MainWindow", "截选观察区"))
self.action_viewCap.setStatusTip(_translate("MainWindow", "选取视频图像的显示区域"))
self.action_drawbox.setText(_translate("MainWindow", "手动框选目标"))
self.action_drawbox.setStatusTip(_translate("MainWindow", "手动框选追踪目标"))
self.action_save.setText(_translate("MainWindow", "保存路径"))
self.action_save.setToolTip(_translate("MainWindow", "选择保存路径"))
self.action_save.setStatusTip(_translate("MainWindow", "选择保存车辆信息的路径,若未选择,则默认不保存车辆数据"))
self.action_save.setWhatsThis(_translate("MainWindow", "选择保存车辆信息的路径,若未选择,则默认不保存车辆数据"))
self.action_save.setShortcut(_translate("MainWindow", "Ctrl+S"))
self.action_ColorThreshholder.setText(_translate("MainWindow", "色彩分割"))
self.action_ContourFilter.setText(_translate("MainWindow", "轮廓筛选"))
self.action_ContourFilter.setStatusTip(_translate("MainWindow", "设置参数对自动检测到的车辆目标进行过滤"))
self.action_MorphologyEx.setText(_translate("MainWindow", "形态学处理"))
self.action_Blur.setText(_translate("MainWindow", "模糊处理"))
self.action_readConfig.setText(_translate("MainWindow", "读取配置文件"))
self.action_readConfig.setStatusTip(_translate("MainWindow", "读取电脑中的.ini配置文件"))
self.action_saveConfig.setText(_translate("MainWindow", "保存配置文件"))
self.action_saveConfig.setStatusTip(_translate("MainWindow", "选择一个路径用于将当前设置参数保存为.ini配置文件"))
self.action_getbkg.setText(_translate("MainWindow", "背景提取"))
self.action_frameDiff.setText(_translate("MainWindow", "帧间差分"))
self.action_MOG2.setText(_translate("MainWindow", "MOG2"))
self.action_autoDetect.setText(_translate("MainWindow", "自动检测模式"))
self.action_autoDetect.setStatusTip(_translate("MainWindow", "开启该功能,则系统将使用指定方式自动检测显示区域中的车辆目标"))
self.action_calib.setText(_translate("MainWindow", "比例标注"))
self.action_calib.setStatusTip(_translate("MainWindow", "在图像中绘制直线,并为其标注实际长度"))
self.action_trace.setText(_translate("MainWindow", "路径绘制"))
self.action_bkgDiff.setText(_translate("MainWindow", "背景差分"))
self.action_help.setText(_translate("MainWindow", "使用说明"))
self.action_selectROI.setText(_translate("MainWindow", "选取检测区"))
self.action_lanes.setText(_translate("MainWindow", "车道标注"))
self.action_morph.setText(_translate("MainWindow", "形态学处理"))
import widgets.ui_icon_rc
| [
"451559450q@gmail.com"
] | 451559450q@gmail.com |
7504a286ab73b4c060e27ddd13924ec97e028b97 | d77896ec7aebc4f1da5ad820ce98068681bc7b9d | /Space_Shooter/alien.py | 1942c81ca635498829525db5adbcbe743306329c | [] | no_license | aquaman48/Python | 5474c6a79420149469660be0435e271820b3bd92 | 884c8c62945b35fc96aaa9a36a9b6c3507ce1e76 | refs/heads/main | 2023-04-14T13:13:06.694630 | 2021-04-18T05:26:50 | 2021-04-18T05:26:50 | 330,811,963 | 0 | 0 | null | 2021-03-14T18:31:13 | 2021-01-18T23:32:53 | Python | UTF-8 | Python | false | false | 1,181 | py | """This file will be the contents of the Alien class"""
import pygame
from pygame.sprite import Sprite
class Alien(Sprite):
"""This class will represent a single alien in the fleet"""
def __init__(self, ai_game):
"""Initialize the alien and set the starting position"""
super().__init__()
self.screen = ai_game.screen
self.settings = ai_game.settings
#Load the image for the alien and set the rect attribute
self.image = pygame.image.load('images/alien.bmp')
self.rect = self.image.get_rect()
#When filling screen with aliens for the fleet, this will fill in near the top left of the screen
self.rect.x = self.rect.width
self.rect.y = self.rect.height
#store the aliens horizontal positions
self.x = float(self.rect.x)
def check_edges(self):
"""returns true if the alien is at the edge of screen"""
screen_rect = self.screen.get_rect()
if self.rect.right >= screen_rect.right or self.rect.left <= 0:
return True
def update(self):
"""Move the alien to the right or left depending on screen location"""
self.x += (self.settings.alien_speed * self.settings.fleet_direction)
self.rect.x = self.x
| [
"noreply@github.com"
] | noreply@github.com |
512f01a1261eb1c96485dc9c80c20b5d387c5e0a | 71ddc215db07f311e7028cedcaaaaa08b92d5022 | /how_to_find_in_list_int_float_str.py | 61b074edfa7607a79552fc823c11540059116f88 | [] | no_license | kabitakumari20/list_logical | 026a17e80c8feeeccf9f4141882eb6a31b80b082 | af86c6609a2b20f0019e0bd33e498ab34c546fbd | refs/heads/main | 2023-05-31T23:49:08.922831 | 2021-06-08T11:15:30 | 2021-06-08T11:15:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | list=[2, 3.5,4.3,"hello world", 5, 4.3]
empty1=[]
empty2=[]
empty3=[]
i = 0
while i<len(list):
if list[i]==str(list[i]):
empty1.append(list[i])
elif list[i]==int(list[i]):
empty2.append(list[i])
elif list[i]==float(list[i]):
empty3.append(list[i])
else:
print(i)
i+=1
print(empty1)
print(empty2)
print(empty3) | [
"kabita20@navgurukul.org"
] | kabita20@navgurukul.org |
56adc16268f6d244ae04e2901531e17daad768f0 | 45457e15240a580bd24d5973ba23d368a2f34bbe | /Final sample/sample_2.py | 3142dcb3f7dad19189d200a18a53517b2a2423c9 | [] | no_license | brmuch/COMP9021 | 44a58deac1c7455fb13f9651ac5ac67da8d9fc87 | 2572756722a6b67b6facd021b02191a20b705d38 | refs/heads/master | 2020-04-04T11:08:43.889574 | 2019-06-19T02:58:17 | 2019-06-19T02:58:17 | 155,879,828 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,212 | py | banknotes = {}
banknote_values = [1, 2, 5, 10, 20, 50, 100]
def recursive_method(num, amount):
if num == 0:
if amount > 0:
banknotes[1] = amount
return
else:
if amount >= banknote_values[num]:
banknotes[banknote_values[num]] = amount // banknote_values[num]
recursive_method(num - 1, amount % banknote_values[num])
else:
recursive_method(num - 1, amount)
def f(N):
'''
>>> f(20)
Here are your banknotes:
$20: 1
>>> f(40)
Here are your banknotes:
$20: 2
>>> f(42)
Here are your banknotes:
$2: 1
$20: 2
>>> f(43)
Here are your banknotes:
$1: 1
$2: 1
$20: 2
>>> f(45)
Here are your banknotes:
$5: 1
$20: 2
>>> f(2537)
Here are your banknotes:
$2: 1
$5: 1
$10: 1
$20: 1
$100: 25
'''
banknote_values = [1, 2, 5, 10, 20, 50, 100]
# Insert your code here
print('Here are your banknotes:')
banknotes.clear()
recursive_method(6, N)
keys = sorted(banknotes.keys())
for key in keys:
print(f"${key}: {banknotes[key]}")
if __name__ == '__main__':
import doctest
doctest.testmod()
| [
"br20130906@gmail.com"
] | br20130906@gmail.com |
697c73285ea86570272d2a116bb2cf7710ad416f | d82cd4bc0121c913f0c7d4b86b8ef635c3ca9a98 | /Snakefile | e418a704963c0ef42a694754765138f1f7b656b5 | [
"Apache-2.0"
] | permissive | SamBryce-Smith/polyAsite_workflow | 3548a98d70f7f92d3312a8049694e9847817c021 | 6ea639e16d176e81591339cf657201a61aebd7a8 | refs/heads/master | 2023-05-09T21:26:57.879106 | 2021-06-03T17:13:41 | 2021-06-03T17:13:41 | 373,532,751 | 0 | 0 | Apache-2.0 | 2021-06-03T17:16:20 | 2021-06-03T14:17:54 | Python | UTF-8 | Python | false | false | 99,601 | #configfile: "config.yaml"
# don't provide a default config file
# to ensure that the executing person knows what it is doing
# and provides the intended information
from snakemake.utils import makedirs
from snakemake.utils import listfiles
import pandas as pd
import numpy as np
import string
import random
import os
################################################################################
# Mapping subpipeline
################################################################################
include: "segemehl/Snakefile"
################################################################################
# Functions for exceptions/branching
################################################################################
#-------------------------------------------------------------------------------
# A-seq2 samples go through 5p-Adapter trimming (select_for_valid_5p_configuration)
#-------------------------------------------------------------------------------
def trim_5p_adapter_input(wildcards):
if samples.loc[wildcards.sample, "protocol"] == "QuantSeq_REV":
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".fa.gz")
else:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".fa.gz")
def trim_adapter_input(wildcards):
if samples.loc[wildcards.sample, "protocol"] == "A-seq2":
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".5ptrimmed.A-seq2.fa.gz")
elif samples.loc[wildcards.sample, "protocol"] == "3'READS":
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".5ptrimmed.3READS.fa.gz")
elif samples.loc[wildcards.sample, "protocol"] == "PAS-Seq":
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".leadingTs_trimmed.fa.gz")
else:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".fa.gz")
def get_reverse_compl_input(wildcards):
if( samples.loc[wildcards.sample, "protocol"] == "PAS-Seq" or
samples.loc[wildcards.sample, "protocol"] == "SAPAS"):
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed_add_nuc.fa.gz")
elif( samples.loc[wildcards.sample, "protocol"] == "DRS" or
samples.loc[wildcards.sample, "protocol"] == "3P-Seq"):
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".fa.gz")
elif( samples.loc[wildcards.sample, "protocol"] == "QuantSeq_REV"):
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".5ptrimmed.fa.gz")
else:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed.fa.gz")
def get_valid_3p_3PSeq_file(wildcards):
if samples.loc[wildcards.sample, "reverse_compl"]:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed.rev_cmpl.fa.gz")
else:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".fa.gz")
def get_input_polyAtail_removal(wildcards):
if samples.loc[wildcards.sample, "protocol"] == "3P-Seq":
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".valid_3p_configuration.fa.gz")
elif samples.loc[wildcards.sample, "protocol"] == "PAPERCLIP":
if pd.isna( samples.loc[wildcards.sample, "fiveAdapter"]):
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".fa.gz")
else:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".5ptrimmed.fa.gz")
elif samples.loc[wildcards.sample, "protocol"] == "3'-Seq (Mayr)":
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed.fa.gz")
else:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed.rev_cmpl.fa.gz")
#--------------------------------------------------------------------------------
# Reverse complement will NOT be applied for Aseq, Mayr and 3P-seq;
# those go from trim_3p_adapter directly to get_valid_reads
# 3READS,3P-Seq,QuantSeq_REV,PAPERCLIP, Mayr: additionally, the poly(A) tail is trimmed
#--------------------------------------------------------------------------------
def get_reads_after_trimming(wildcards):
if(samples.loc[wildcards.sample, "protocol"] == "A-seq" or
samples.loc[wildcards.sample, "protocol"] == "3P-seq"):
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed.fa.gz")
elif samples.loc[wildcards.sample, "protocol"] == "3'-Seq (Mayr)":
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed_tail.fa.gz")
elif( samples.loc[wildcards.sample, "protocol"] == "3'READS" or
samples.loc[wildcards.sample, "protocol"] == "3P-Seq" or
samples.loc[wildcards.sample, "protocol"] == "PAPERCLIP" or
samples.loc[wildcards.sample, "protocol"] == "QuantSeq_REV"):
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed_tail.fa.gz")
else:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".trimmed.rev_cmpl.fa.gz")
#-------------------------------------------------------------------------------
# the valid reads change depending on whether reads are
# also length filtered
#-------------------------------------------------------------------------------
def get_valid_reads(wildcards):
if(samples.loc[wildcards.sample, "protocol"] == "A-seq" or
samples.loc[wildcards.sample, "protocol"] == "3'-Seq (Mayr)"):
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".valid_maxLength.fa.gz")
else:
return os.path.join(config["samples_dir"],
wildcards.sample,
wildcards.sample + ".valid.fa.gz")
def get_ds_patterns_for_ipAssignment(wildcards):
if samples.loc[wildcards.sample, "protocol"] == "3'READS":
return " ".join(["--ds_pattern=%s"
% pat for pat in config['IP.downstream_patterns.3READS'] ]) \
if config['IP.downstream_patterns.3READS'] is not None else ""
else:
return " ".join(["--ds_pattern=%s"
% pat for pat in config['IP.downstream_patterns'] ]) \
if config['IP.downstream_patterns'] is not None else ""
def get_excluded_chromosomes(wildcards):
if config['excluded_chr'] is not None:
excluded = {i:1 for i in config['excluded_chr']}
else:
excluded = {}
if samples.loc[wildcards.sample, "sex"] == "F":
excluded[ config['female_chr'] ] = 1
else:
if config['female_chr'] in excluded:
del excluded[ config['female_chr'] ]
excluded_list = list(excluded.keys())
if len(excluded_list) == 0:
return ""
else:
return("--exclude=" + ":".join(excluded_list))
#-------------------------------------------------------------------------------
# local rules
#-------------------------------------------------------------------------------
localrules: create_log_dir, create_log_dir_atlas, download_fastq_se, download_fastq_pe,
change_fastq_to_fq_se, download_genome, download_annotation, fetch_chr_sizes_ucsc,
make_track_info, complete_preprocessing, complete_clustering, complete_tracks, finish
samples = pd.read_table(config['atlas.samples_table'], index_col=0, comment='#')
atlas_outputdir = os.path.join(config['atlas_dir'],
config['organism'],
config['genome'],
config['atlas.release_name'])
################################################################################
# target rule
################################################################################
rule finish:
##LOCAL##
##No Singularity support required##
input:
#ip_cnt = expand( os.path.join(config["samples_dir"],
# "counts",
# "{sample}_" + config['genome'] + ".ip3pSites.out"),
# sample = samples.index),
#track_info = expand( os.path.join(config["samples_dir"],
# "{sample}",
# config['genome'],
# "{sample}.track_info.txt"),
# sample = samples.index),
prepro_cmplt = expand( os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.prepro_cmplt.txt"),
sample = samples.index),
clst_cmplt = os.path.join(atlas_outputdir,
"clst_cmplt.txt"),
tracks_cmplt = os.path.join(atlas_outputdir,
"tracks_cmplt.txt")
#noBG_cnt = expand( os.path.join(config["samples_dir"],
# "counts",
# "{sample}_" + config['genome'] + ".noBG3pSites.out"),
# sample = samples.index),
#cluster_stats = os.path.join( atlas_outputdir,
# "counts",
# "clusters.stats.out" ),
#track_info_cl = os.path.join(atlas_outputdir,
# ("clusters."+ config['genome'] + "_"
# + config['atlas.release_name']
# + ".track_info.txt")),
#final_atlas = os.path.join(atlas_outputdir,
# "clusters.bed.gz")
################################################################################
# individual rules (if possible in chronological order)
################################################################################
#-------------------------------------------------------------------------------
# create dir for logfiles (samples)
#-------------------------------------------------------------------------------
rule create_log_dir:
##LOCAL##
##No Singularity support required##
''' This step creates the log directory, if necessary.
This is required when jobs are submitted and the
job output should be written to these files.
'''
params:
cluster_samples_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs"),
cluster_countings_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting")
output:
dirs_samples_created = touch(os.path.join(config["samples_dir"],
"logs",
"created_log_dir.out"))
shell:
'''
mkdir -p {params.cluster_samples_log}
mkdir -p {params.cluster_countings_log}
'''
#-------------------------------------------------------------------------------
# create dir for logfiles (atlas)
#-------------------------------------------------------------------------------
rule create_log_dir_atlas:
##LOCAL##
##No Singularity support required##
''' This step creates the log directory, if necessary.
This is required when jobs are submitted and the
job output should be written to these files.
'''
params:
cluster_atlas_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs")
output:
dirs_atlas_created = touch(os.path.join(atlas_outputdir,
"logs",
"created_log_dir.out")),
shell:
'''
mkdir -p {params.cluster_atlas_log}
'''
#-------------------------------------------------------------------------------
# download the genome sequence
#-------------------------------------------------------------------------------
rule download_genome:
##LOCAL##
##Singularity provided: zavolab_minimal:1, not tested##
output:
genome = os.path.join(config['genome_fasta_raw']),
temp_genome = temp( "genome." + ''.join(random.choice(string.ascii_uppercase) for _ in range(6)) + ".fa.gz"),
clean = os.path.join(config['genome_fasta'])
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
url = config['genome_fasta_url']
resources:
load = 20 # With "--resources load=100", max 5 instances of this rule are run in parallel!
shell:
'''
wget -O {output.temp_genome} \
{params.url} \
&> /dev/null &&
gzip -cd {output.temp_genome} \
> {output.genome} &&
sed 's/\s.*//' {output.genome} \
> {output.clean}
'''
#-------------------------------------------------------------------------------
# download the gene annotation file
#-------------------------------------------------------------------------------
rule download_annotation:
##LOCAL##
##Singularity provided: zavolab_minimal:1, not tested##
output:
anno = config['gene_annotation'],
temp_anno = temp( "gene_anno." + ''.join(random.choice(string.ascii_uppercase) for _ in range(6)) + ".gtf.gz")
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
url = config['gene_anno_url']
resources:
load = 20 # With "--resources load=100", max 5 instances of this rule are run in parallel!
shell:
'''
wget -O {output.temp_anno} \
{params.url} \
&> /dev/null &&
gzip -cd {output.temp_anno} \
> {output.anno}
'''
#-------------------------------------------------------------------------------
# get filtered version of annotation
#-------------------------------------------------------------------------------
rule get_filtered_annotation:
##Singularity needed: perl, gzip##
## Singularity provided: zavolab_minimal:1, not tested ##
input:
anno = os.path.join(config['gene_annotation']),
script = os.path.join( config["script_dir"],
"rs-filter-gtf-by-type-and-support.pl")
output:
filtered_anno = config['gene_annotation_filtered']
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
type_id = config['transcript_biotype_id'],
types = lambda wildcards: " ".join(["--type=" + i for i in config['transcript_type']]),
tr_supp_level_id = "--support_level_id=" + config['transcript_support_level_id'] \
if config['transcript_support_level_id'] is not None else "",
tr_supp_level = "--support_level=" + config['transcript_support_level'] \
if config['transcript_support_level'] is not None else "",
cluster_log = os.path.join(config['annotation_dir'],
"filter_anno.log")
shell:
'''
perl {input.script} \
--type_id={params.type_id} \
{params.types} \
{params.tr_supp_level_id} {params.tr_supp_level} \
{input.anno} \
> {output.filtered_anno}
'''
# ################################################################################
# # preprae mongoDB collection
# ################################################################################
# rule check_samples_in_mongoDB:
# # check for each sample if it is already in the mongoDB
# # (genome and organism specific)
# input:
# dirs_created = os.path.join(atlas_outputdir, "logs", "created_log.tmp" )
# output:
# checked_db = os.path.join(atlas_outputdir,
# "logs",
# "mongoDB_samples_checked.log")
# params:
# samples = samples.index,
# organism = config['organism'],
# genome = config['genome']
################################################################################
# samples preprocessing
################################################################################
#-------------------------------------------------------------------------------
# download fastq files (paired-end data)
#-------------------------------------------------------------------------------
rule download_fastq_pe:
##LOCAL##
input:
script = os.path.join( config["script_dir"],
"rs-download-fastq-files-from-ena-via-ascp.py")
output:
sample_fq = expand(os.path.join(config["samples_dir"],
"{{sample_name}}",
"{{sample_id}}_{read}.fastq.gz"),
read = [1,2])
singularity:
"docker://cjh4zavolab/aspera:5"
params:
outdir = os.path.join(config["samples_dir"],
"{sample_name}"),
srr_id = "{sample_id}"
resources:
load = 20 # With "--resources load=100", max 5 instances of this rule are run in parallel!
log:
os.path.join(config["samples_dir"],
"logs",
"download_fq",
"{sample_name}.log")
shell:
'''
python3 {input.script} \
--srr_id {params.srr_id} \
--outdir {params.outdir}
--paired \
2> {log}
'''
#-------------------------------------------------------------------------------
# download fastq file (single_end data)
#-------------------------------------------------------------------------------
rule download_fastq_se:
##LOCAL##
input:
script = os.path.join( config["script_dir"],
"rs-download-fastq-files-from-ena-via-ascp.py")
output:
sample_fq = os.path.join(config["samples_dir"],
"{sample_name}",
"{sample_id}.fastq.gz")
singularity:
"docker://cjh4zavolab/aspera:5"
params:
outdir = os.path.join(config["samples_dir"],
"{sample_name}"),
srr_id = "{sample_id}"
resources:
load = 20 # With "--resources load=100", max 5 instances of this rule are run in parallel!
log:
os.path.join(config["samples_dir"],
"logs",
"download_fq",
"{sample_name}.log")
shell:
'''
python3 {input.script} \
--srr_id {params.srr_id} \
--outdir {params.outdir} \
2> {log}
'''
#-------------------------------------------------------------------------------
# convert file names:
# - from SRR id to GSM/SRA id
# - from fastq.gz to fq.gz
#-------------------------------------------------------------------------------
rule change_fastq_to_fq_se:
##LOCAL##
#ATTENTION: For some samples, multiple SRR run files (aka fastq files) belong to
# a single sample. If this is the case, they are concatenated here
# before the softlink is established
##No Singularity support required##
input:
sample_fq = lambda wildcards: expand(os.path.join(config["samples_dir"],
wildcards.sample,
"{srr}.fastq.gz"),
srr = samples.loc[wildcards.sample, "SRR"].split(","))
output:
sample_fq = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.fq.gz")
params:
file_dir = os.path.join(config["samples_dir"],
"{sample}"),
sample_srr = lambda wildcards: samples.loc[wildcards.sample, "SRR"],
first_srr = lambda wildcards: samples.loc[wildcards.sample, "SRR"].split(",")[0],
sample_id = "{sample}"
shell:
'''
cd {params.file_dir}
IFS=',' read -ra SRR <<< "{params.sample_srr}"
if [[ "${{#SRR[@]}}" > "1" ]];then
first_file="${{SRR[0]}}.fastq.gz"
for i in $(seq 1 $((${{#SRR[@]}}-1))); do curr_file="${{SRR[$i]}}.fastq.gz"; cat ${{curr_file}} >> ${{first_file}};done
fi
ln -fs {params.first_srr}.fastq.gz {params.sample_id}.fq.gz
cd -
'''
#-------------------------------------------------------------------------------
# convert fq to fasta
# hint: I (Ralf) do not use fastq_to_fasta anymore because I had issues with the
# number of output reads and I believe it has to do with the -Q33 option
# I use to indicate the correct offset for the quality scores
# (see here: https://www.biostars.org/p/120311/ )
#-------------------------------------------------------------------------------
rule fq2fasta_se:
input:
dirs_samples_created = os.path.join(config["samples_dir"],
"logs",
"created_log_dir.out"),
sample_fq = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.fq.gz"),
script = os.path.join( config["script_dir"],
"rs-fastq_to_fasta_awk.sh")
output:
sample_fa = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.fa.gz")
singularity:
"docker://cjh4zavolab/fastx:0.0.14"
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"fq2fa",
"{sample}.log")
shell:
'''
(zcat {input.sample_fq} \
| {input.script} \
| fastx_renamer -n COUNT -z \
> {output.sample_fa})
2> {log}
'''
#-------------------------------------------------------------------------------
# get number of raw reads
#-------------------------------------------------------------------------------
rule raw_read_cnt_se:
##No Singularity support required##
input:
sample_fa = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.fa.gz")
output:
raw_cnt = temp(os.path.join(config["samples_dir"],
"counts",
"{sample}.raw.nr.out" ) )
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting",
"{sample}.log")
run:
import gzip
n = 0
with gzip.open(input.sample_fa, "rt") as infile:
n = sum([1 for line in infile if line.startswith(">")])
with open(output.raw_cnt, "w") as out:
out.write("reads.raw.nr\t%i\n" % n)
#-------------------------------------------------------------------------------
# get length of raw reads
#-------------------------------------------------------------------------------
rule raw_read_length_se:
##No Singularity support required##
input:
raw_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}.raw.nr.out" ),
input_fa = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.fa.gz")
output:
raw_len = temp(os.path.join(config["samples_dir"],
"counts",
"{sample}.raw.len.out" ) )
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting",
"{sample}.log")
script:
os.path.join(config['snakemake_script_dir'],
"raw-read-length.py")
#-------------------------------------------------------------------------------
# filter reads without expected 5' start
#-------------------------------------------------------------------------------
rule select_for_valid_5p_configuration:
input:
sample_fa = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.fa.gz"),
script = os.path.join( config["script_dir"],
"rs-filter-by-5p-adapter.pl")
output:
selected_5p = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.5ptrimmed.A-seq2.fa.gz")
singularity:
"docker://cjh4zavolab/select_valid_5p:3"
params:
adapt = config['to_trim_from_5p_Aseq2'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"trim_5p_part",
"{sample}.log")
shell:
'''
(zcat {input.sample_fa} \
| perl {input.script} \
--adapter={params.adapt} \
| gzip > {output.selected_5p}) 2> {log}
'''
#-------------------------------------------------------------------------------
# trim 4 nucleotides from the read start (used for 3' READS)
# 3' READS: according to doi:10.1038/nmeth.2288
# each valid read (from rev sequencing which was applied in these samples;
# BUT: new samples have to be checked whether they were still reverse sequenced)
# should have 4 random nt at the 5' end followed by remaining Ts from the
# reverse transcription of the poly(A) tail;
# according to the published protocol, only reads with at least 2 nongenomic
# As were considered valid
# hence, here we select valid 5' configuration as: ....TT and the remaining
# part of the poly(A) tail is trimmed later after reverse complementation
#-------------------------------------------------------------------------------
rule select_for_valid_5p_configuration_3READS:
input:
sample_fa = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.fa.gz"),
script = os.path.join( config["script_dir"],
"rs-filter-by-5p-adapter.pl")
output:
trimmed_5p = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.5ptrimmed.3READS.fa.gz")
singularity:
"docker://cjh4zavolab/select_valid_5p:3"
params:
adapt = lambda wildcards: samples.loc[ wildcards.sample, "fiveAdapter"],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"trim_5p_part",
"{sample}.log")
shell:
'''
(zcat {input.sample_fa} \
| perl {input.script} \
--adapter={params.adapt} \
| gzip > {output.trimmed_5p}) 2> {log}
'''
#-------------------------------------------------------------------------------
# trim leading Ts
# (used for samples from PAS-Seq)
#-------------------------------------------------------------------------------
rule trim_leading_Ts:
input:
sample_fa = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.fa.gz"),
script = os.path.join( config["script_dir"],
"rs-trim-5p-T.pl")
output:
nuc_trimmed = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.leadingTs_trimmed.fa.gz")
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
adapt = "T",
minLen=config['min_length'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"trim_5p_T",
"{sample}.log")
shell:
'''
zcat {input.sample_fa} \
| perl {input.script} \
--minLen={params.minLen} \
--nuc={params.adapt} \
| gzip > {output.nuc_trimmed}
'''
#-------------------------------------------------------------------------------
# trim the 5' adapter
# Currently only QuantSeq_REV
#-------------------------------------------------------------------------------
rule trim_5p_adapter_se:
input:
in_fa = trim_5p_adapter_input
output:
no_5p_adapter = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.5ptrimmed.fa.gz")
singularity:
"docker://zavolab/cutadapt:1.16"
params:
adapt = lambda wildcards: samples.loc[ wildcards.sample, "fiveAdapter"] if not "*" in samples.loc[ wildcards.sample, "fiveAdapter"] else samples.loc[ wildcards.sample, "fiveAdapter"].split("*")[1],
minLen=config['min_length'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
resources:
time = 6
log:
os.path.join(config["samples_dir"],
"logs",
"trim_5p_adapter",
"{sample}.log")
shell:
'''
cutadapt \
-g {params.adapt} \
--minimum-length {params.minLen} \
-o {output.no_5p_adapter} \
{input.in_fa} \
&> {log}
'''
#-------------------------------------------------------------------------------
# trim the 3' adapter
#-------------------------------------------------------------------------------
rule trim_adapter_se:
input:
in_fa = trim_adapter_input
output:
no_3p_adapter = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.trimmed.fa.gz")
singularity:
"docker://zavolab/cutadapt:1.16"
params:
adapt = lambda wildcards: samples.loc[ wildcards.sample, "threeAdapter"] if not "*" in samples.loc[ wildcards.sample, "threeAdapter"] else samples.loc[ wildcards.sample, "threeAdapter"].split("*")[1],
five_p_adapt = lambda wildcards: "" if (pd.isna( samples.loc[ wildcards.sample, "fiveAdapter"]) or (samples.loc[ wildcards.sample, "protocol"] == "3'READS")) else "-g " + samples.loc[ wildcards.sample, "fiveAdapter"],
minLen=config['min_length'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
resources:
time = 6
log:
os.path.join(config["samples_dir"],
"logs",
"trim_3p_adapter",
"{sample}.log")
shell:
'''
cutadapt \
-a {params.adapt} \
{params.five_p_adapt} \
--minimum-length {params.minLen} \
-o {output.no_3p_adapter} \
{input.in_fa} \
&> {log}
'''
#-------------------------------------------------------------------------------
# trim additional nucleotides that might occur between the
# 3' end and the 3' adapter
#-------------------------------------------------------------------------------
rule trim_additional_3p_nuc:
##Singularity required: perl##
input:
no_3p_adapter = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.trimmed.fa.gz"),
script = os.path.join( config["script_dir"],
"ag-trimm-3p-end.pl")
output:
nuc_trimmed = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.trimmed_add_nuc.fa.gz")
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
adapt = lambda wildcards: samples.loc[ wildcards.sample, "threeAdapter"].split("*")[0].rstrip("]").lstrip("["),
minLen=config['min_length'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"trim_3p_nuc",
"{sample}.log")
shell:
'''
zcat {input.no_3p_adapter} \
| perl {input.script} \
--minLen={params.minLen} \
--nuc={params.adapt} \
| gzip > {output.nuc_trimmed}
'''
#-------------------------------------------------------------------------------
# reverse complement
#-------------------------------------------------------------------------------
rule reverse_complement:
input:
input_seqs = get_reverse_compl_input
output:
rev_cmpl = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.trimmed.rev_cmpl.fa.gz")
singularity:
"docker://cjh4zavolab/fastx:0.0.14"
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"rev_cmpl",
"{sample}.log")
shell:
'''
zcat {input.input_seqs} \
| fastx_reverse_complement -z \
-o {output.rev_cmpl} \
&> {log}
'''
#-------------------------------------------------------------------------------
# select valid 3' configuration for 3P-Seq samples
#-------------------------------------------------------------------------------
rule select_for_valid_3p_configuration_3PSeq:
input:
in_fa = get_valid_3p_3PSeq_file,
script = os.path.join( config["script_dir"],
"rs-filter-by-3p-adapter.pl")
output:
selected_3p = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.valid_3p_configuration.fa.gz")
singularity:
"docker://cjh4zavolab/select_valid_5p:3"
params:
adapt = config['to_trim_from_3p_3PSeq'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"trim_3p_part",
"{sample}.log")
shell:
'''
(zcat {input.in_fa} \
| perl {input.script} \
--adapter={params.adapt} \
| gzip > {output.selected_3p}) 2> {log}
'''
#-------------------------------------------------------------------------------
# remove putative leftOvers of the poly(A) tail from the read 3' ends
# 3'READS: 2 As were cleaved already initially as Ts from the 5' end
# 3P-Seq: 2 As were cleaved already initially
#-------------------------------------------------------------------------------
rule remove_polyAtail:
input:
no_3p_adapter = get_input_polyAtail_removal
output:
no_polyAtail = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.trimmed_tail.fa.gz")
singularity:
"docker://zavolab/cutadapt:1.16"
params:
adapt = "AAAAAAAAAAAAAA",
error_rate = config['polyA_trimming_errorRate'],
minLen=config['min_length'],
min_overlap = config['polyA_minOverlap'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
resources:
time = 6
log:
os.path.join(config["samples_dir"],
"logs",
"trim_polyAtail_adapter",
"{sample}.log")
shell:
'''
cutadapt \
--adapter {params.adapt} \
--minimum-length {params.minLen} \
--overlap {params.min_overlap} \
-e {params.error_rate} \
-o {output.no_polyAtail} \
{input.no_3p_adapter} \
&> {log}
'''
#-------------------------------------------------------------------------------
# get number of reads after 3' adapter trimming
#-------------------------------------------------------------------------------
rule no_3pAdapter_read_cnt_se:
##No Singularity support required##
input:
prev_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}.raw.len.out" ),
in_fa = get_reads_after_trimming
output:
trimmed_cnt = temp(os.path.join(config["samples_dir"],
"counts",
"{sample}.after_trim.out" ))
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting",
"{sample}.log")
run:
import gzip
n = 0
with gzip.open(input.in_fa, "rt") as infile:
n = sum([1 for line in infile if line.startswith(">")])
with open(output.trimmed_cnt, "w") as out:
with open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("reads.trim.out\t%i\n" % n)
#-------------------------------------------------------------------------------
# collect high confident reads
#-------------------------------------------------------------------------------
rule get_valid_reads:
##Singularity needed: perl, zcat, gzip##
##Singularity provided: zavolab_minimal:1, not tested##
'''
valid reads have:
not more than 2 Ns
A-content: maximum 80%
a 3' nucleotide other than A
'''
input:
valid_rds_in = get_reads_after_trimming,
script_filter = os.path.join( config["script_dir"],
"ag-filter-seqs-by-nucleotide-composition.pl"),
script_last = os.path.join( config["script_dir"],
"ag-filter-seqs-by-last-nuc.pl")
output:
valid_reads = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.valid.fa.gz")
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
maxN = config['maxN'],
maxAcontent = config['maxAcontent'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"get_valid_reads",
"{sample}.log")
shell:
'''
(zcat {input.valid_rds_in} \
| perl {input.script_filter} \
--max {params.maxN} --nuc N \
| perl {input.script_filter} \
--max {params.maxAcontent} --nuc A \
| perl {input.script_last} \
| gzip > {output.valid_reads}) 2> {log}
'''
#-------------------------------------------------------------------------------
# collect high confident reads
#-------------------------------------------------------------------------------
rule get_valid_reads_with_maxLength:
##Singularity needed: perl, zcat, gzip##
##Singularity provided: zavolab_minimal:1, not tested##
'''
valid reads have:
not more than 2 Ns
A-content: maximum 80%
a 3' nucleotide other than A
a length shorter than a given maximum
'''
input:
valid_rds_in = get_reads_after_trimming,
script_filter = os.path.join( config["script_dir"],
"ag-filter-seqs-by-nucleotide-composition.pl"),
script_len_filter = os.path.join( config["script_dir"],
"ag-filter-seqs-by-length.pl"),
script_last = os.path.join( config["script_dir"],
"ag-filter-seqs-by-last-nuc.pl")
output:
valid_reads = os.path.join(config["samples_dir"],
"{sample}",
"{sample}.valid_maxLength.fa.gz")
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
maxLen = lambda wildcards: int(samples.loc[wildcards.sample, "readlen"]) - int(config['min_sense_strand_shortening']),
maxN = config['maxN'],
maxAcontent = config['maxAcontent'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"get_valid_reads",
"{sample}.log")
shell:
'''
(zcat {input.valid_rds_in} \
| perl {input.script_len_filter} --max={params.maxLen} \
| perl {input.script_filter} \
--max={params.maxN} --nuc=N \
| perl {input.script_filter} \
--max={params.maxAcontent} --nuc=A \
| perl {input.script_last} \
| gzip > {output.valid_reads}) 2> {log}
'''
#-------------------------------------------------------------------------------
# count valid reads
#-------------------------------------------------------------------------------
rule valid_read_cnt:
##No Singularity support required##
'''
count the reads after filtering for valid read configuration
'''
input:
prev_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}.after_trim.out" ),
in_fa = get_valid_reads
output:
valid_cnt = temp(os.path.join(config["samples_dir"],
"counts",
"{sample}.valid.out" ))
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting",
"{sample}.log")
run:
import gzip
n = 0
with gzip.open(input.in_fa, "rt") as infile:
n = sum([1 for line in infile if line.startswith(">")])
with open(output.valid_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("reads.valid.nr\t%i\n" % n)
################################################################################
# Mapping subpipeline will be called here.
# Currently, unique and multi mappers are returned. This behaviour can be changed
# by setting/removing flag --keep-mm in rule remove_inferiors in the subpipeline.
#
# SEGEMEHL MAPPING
# #
# # Author: adapted from mir-map by Paula Iborra de Toledo
# # Maintainer: christina.herrmann@unibas.ch
# # Date: 2019-05-01
# #
# # This workflow processes appropriate genome and annotation files,
# # performs mapping to genome and transcriptome separately,
# # and finally selects the best mappers.
# #
# # INPUT: transcriptome and genome fasta files, gtf annotation, filtered reads (.fa.gz)
# # OUTPUT: bed.gz of mapped reads, sorted by position
#
# # If used as subworkflow via 'include', don't provide config file!
# # Configs are specified in config.yaml of main Snakefile!
# # configfile: "segemehl_config.yaml"
# ##################################################################################
#-------------------------------------------------------------------------------
# only consider unique mappers
#-------------------------------------------------------------------------------
rule select_unique_mappers:
##Singularity needed: python2##
##packages needed: argpase, gzip##
## Singularity provided: python:2.7-slim, not tested ##
input:
reads_bed = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.reads.bed.gz"),
script = os.path.join( config["script_dir"],
"rs-select-unique-mappers.py")
output:
unique_bed = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.reads.unique.bed.gz")
singularity:
"docker://python:2.7-slim"
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
resources:
mem = config['unique_mappers.total_RAM']
log:
os.path.join(config["samples_dir"],
"logs",
"select_unique",
"{sample}.log")
shell:
'''
(python {input.script} \
--bed {input.reads_bed} \
| gzip > {output.unique_bed}) 2>> {log}
'''
#-------------------------------------------------------------------------------
# count mapped reads
# (write them in the appropriate file only in the next rule
#-------------------------------------------------------------------------------
rule mapped_read_cnt:
##No Singularity needed##
input:
prev_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}.valid.out" ),
unique_bed = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.reads.unique.bed.gz"),
reads_bed = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.reads.bed.gz")
output:
mapped_cnt = temp( os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".mapped.out") )
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting",
"{sample}.log")
resources:
mem = config['mapped_read_nt.total_RAM']
run:
import gzip
unique = 0
mapped = {}
with gzip.open(input.reads_bed, "rt") as in_all:
total_mapped = {line.split("\t")[3]:1 for line in in_all.readlines()}
with gzip.open(input.unique_bed, "rt") as in_bed:
unique = sum([1 for line in in_bed])
multi = len(total_mapped) - unique
with open(output.mapped_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("reads.mapped.uniqueMappers.nr\t%i\n" % unique)
out.write("reads.mapped.multiMappers.nr\t%i\n" % multi)
#-------------------------------------------------------------------------------
# get 3' ends
#-------------------------------------------------------------------------------
rule get_3p_ends:
##Singularity needed: perl##
## Singularity provided: zavolab_minimal:1, not tested ##
'''Only 3' ends with the following characteristics are reported:
minimum the last 4 nt map perfectly to the genome
the read was found to be valid before
'''
input:
unique_bed = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.reads.unique.bed.gz"),
script = os.path.join( config["script_dir"],
"cjh-get-3pEnds-from-bed.pl")
output:
end_sites = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.3pSites.bed.gz")
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
correction = lambda wildcards: "1" if samples.loc[wildcards.sample, "protocol"] == "DRS" else "0",
exclude_chr = get_excluded_chromosomes,
min_align = config['min_3p_align'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
resources:
mem = config['get_3p_ends.total_RAM']
log:
os.path.join(config["samples_dir"],
"logs",
"get_3p_ends",
"{sample}.log")
shell:
'''
(perl {input.script} \
{params.exclude_chr} \
--correction={params.correction} \
--strict \
--min_align={params.min_align} \
{input.unique_bed} \
| gzip > {output.end_sites}) 2>> {log}
'''
#-------------------------------------------------------------------------------
# count the number of single 3' ends
# The difference to unique mappers are reads
# that don't map perfectly in the last 4 nucleotides
#-------------------------------------------------------------------------------
rule raw_3pSites_cnt:
##No Singularity needed##
input:
prev_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".mapped.out"),
end_sites = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.3pSites.bed.gz")
output:
sites_cnt = temp( os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".raw3pSites.out") )
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting",
"{sample}.log")
run:
import gzip
plus = 0
plus_reads = 0
minus = 0
minus_reads = 0
with gzip.open(input.end_sites, "rt") as in_bed:
for line in in_bed:
F = line.rstrip().split("\t")
if F[5] == "+":
plus += 1
plus_reads += float(F[4])
else:
minus += 1
minus_reads += float(F[4])
with open(output.sites_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("sites.highconfidence.number.plus\t%i\n" % plus)
out.write("sites.highconfidence.number.minus\t%i\n" % minus)
out.write("sites.highconfidence.reads.plus\t%i\n" % plus_reads)
out.write("sites.highconfidence.reads.minus\t%i\n" % minus_reads)
#-------------------------------------------------------------------------------
# extract the sequences that surround the 3' ends
#-------------------------------------------------------------------------------
rule fetch_flanking_seqs:
## Singularity available, not tested##
## Singularity needed: bedtools 2.27, perl##
input:
genome = config["genome_fasta"],
ends = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.3pSites.bed.gz"),
script = os.path.join( config["script_dir"],
"rs-fetch-flanking-fasta.pl")
output:
seqs = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.3pSites.bed.seqs.gz")
singularity:
"docker://cjh4zavolab/bedtools:2.27"
params:
upstream_ext = config['IP.upstream_region'],
downstream_ext = config['IP.downstream_region'],
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
resources:
mem = config['fetch_seqs.total_RAM']
log:
os.path.join(config["samples_dir"],
"logs",
"fetch_flanking_region_seqs",
"{sample}.log")
shell:
'''
(perl {input.script} \
--genome={input.genome} \
--upstream={params.upstream_ext} \
--downstream={params.downstream_ext} \
{input.ends} \
| gzip > {output.seqs}) 2>> {log}
'''
#-------------------------------------------------------------------------------
# assign internal priming sites
#-------------------------------------------------------------------------------
rule assign_IP_sites:
## Singularity needed: perl##
## Singularity provided: zavolab_minimal:1, not tested ##
input:
seqs = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.3pSites.bed.seqs.gz"),
script = os.path.join( config["script_dir"],
"ag-assign-internal-priming-sites.pl")
output:
ip_assigned = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.3pSites.ip.bed.gz")
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
upstream_ext = config['IP.upstream_region'],
downstream_ext = config['IP.downstream_region'],
tot_As = config['IP.total_As'],
consec_As = config['IP.consecutive_As'],
ds_patterns = get_ds_patterns_for_ipAssignment,
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(config["samples_dir"],
"logs",
"assign_IP_sites",
"{sample}.log")
shell:
'''
(perl {input.script} \
--upstream_len={params.upstream_ext} \
--downstream_len={params.downstream_ext} \
--consecutive_As={params.consec_As} \
--total_As={params.tot_As} \
{params.ds_patterns} \
{input.seqs} \
| gzip > {output.ip_assigned}) 2>> {log}
'''
#-------------------------------------------------------------------------------
# count number of IP sites
#-------------------------------------------------------------------------------
rule IP_3pSites_cnt:
##No Singularity needed##
input:
prev_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".raw3pSites.out"),
end_sites = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.3pSites.ip.bed.gz")
output:
ip_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".ip3pSites.out")
params:
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting",
"{sample}.log")
run:
import gzip
plus = 0
plus_reads = 0
minus = 0
minus_reads = 0
with gzip.open(input.end_sites, "rt") as in_bed:
for line in in_bed:
F = line.rstrip().split("\t")
if F[3] == "IP":
if F[5] == "+":
plus += 1
plus_reads += float(F[4])
else:
minus += 1
minus_reads += float(F[4])
with open(output.ip_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("sites.highconfidence.internalpriming.number.plus\t%i\n" % plus)
out.write("sites.highconfidence.internalpriming.number.minus\t%i\n" % minus)
out.write("sites.highconfidence.internalpriming.reads.plus\t%i\n" % plus_reads)
out.write("sites.highconfidence.internalpriming.reads.minus\t%i\n" % minus_reads)
#-------------------------------------------------------------------------------
# Target rule for pre-processing
#-------------------------------------------------------------------------------
rule complete_preprocessing:
## LOCAL ##
input:
counts = os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".ip3pSites.out")
output:
prepro_cmplt = os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.prepro_cmplt.txt")
shell:
'''
echo '#########################\n \
Pre-processing completed.\n#########################\n \
Created "{input.counts}"' \
> {output.prepro_cmplt}
'''
################################################################################
# combining all samples into the full atlas
################################################################################
#-------------------------------------------------------------------------------
# merge all samples to a full set of 3' end sites
#-------------------------------------------------------------------------------
rule pool_samples:
## Singularity needed: perl##
## Singularity provided: zavolab_minimal:1, not tested ##
input:
dirs_atlas_created = touch(os.path.join(atlas_outputdir,
"logs",
"created_log_dir.out")),
files = expand( os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.3pSites.ip.bed.gz"),
sample = samples.index),
counts = expand( os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".ip3pSites.out"), sample = samples.index),
script = os.path.join( config["script_dir"],
"ag-pool-sites.pl")
output:
pooled_sites = os.path.join( atlas_outputdir,
"3pSites.tsv.gz" )
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"pool_samples.log")
resources:
mem = lambda wildcards: int( 1.8 * len(samples.index) ),
time = config['pool_samples.time']
log:
os.path.join( atlas_outputdir,
"logs",
"pool_samples.log")
shell:
'''
(perl {input.script} \
--noip \
{input.files} \
| gzip > {output.pooled_sites}) 2>> {log}
'''
#-------------------------------------------------------------------------------
# get overall number of unique 3' end sites (without IP sites)
#-------------------------------------------------------------------------------
rule get_unique_3pSites_cnt:
##No Singularity needed##
input:
pooled_sites = os.path.join( atlas_outputdir,
"3pSites.tsv.gz" )
output:
pooled_sites_cnt = os.path.join( atlas_outputdir,
"counts",
"pooled_3p_ends.nr.out" )
params:
cluster_log = os.path.join( atlas_outputdir,
"logs",
"cluster_logs",
"counting_sites.log")
run:
import gzip
n = 0
with gzip.open(input.pooled_sites, "rt") as infile:
n = sum([1 for line in infile if not line.startswith("#")])
with open(output.pooled_sites_cnt, "w") as out:
out.write("3pSites.pooled:\t%i\n" % n)
#-------------------------------------------------------------------------------
# assign poly(A) signals
#-------------------------------------------------------------------------------
rule assign_polyA_signals:
##Singularity needed: perl, bedtools2.27##
## Singularity provided: bedtools:2.27 ##
'''
Assign poly(A) signals to the 3' end sites. Check for signals in the region
of -60 to +10 around each 3' end site. This region is hardcoded in "ag-assign-polyAsignals.pl".
NOTE: Order of PAS in column 82 of output file might not be preserved when repeating the run.
'''
input:
pooled_sites = os.path.join( atlas_outputdir,
"3pSites.tsv.gz"),
script = os.path.join( config["script_dir"],
"ag-assign-polyAsignals.pl")
output:
sites_with_pas = os.path.join( atlas_outputdir,
"3pSites.PAS.tsv.gz")
singularity:
"docker://cjh4zavolab/bedtools:2.27"
params:
signals = " ".join(["--motif=%s" % sig for sig in config['polyA_signals'] ]),
genome = config['genome_fasta'],
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"assign_polyA_signals.log")
log:
os.path.join( atlas_outputdir,
"logs",
"assign_polyA_signals.log")
shell:
'''
(perl {input.script} \
{params.signals} \
--genome={params.genome} \
{input.pooled_sites} \
| gzip > {output.sites_with_pas}) 2>> {log}
'''
#-------------------------------------------------------------------------------
# define sample-specific backgrounds
#-------------------------------------------------------------------------------
rule sample_specific_bg:
## Singularity needed: perl##
## Singularity provided: zavolab_minimal:1, not tested ##
'''Based on the annotated poly(A) signals,
iterate over the 3' ends from highest to lowest supported end
determine the minimum number of reads per 3' end such that
among all 3' end sites with at least this minimum number of reads
x % have at least one annotated poly(A) signal
'''
input:
sites_with_pas = os.path.join( atlas_outputdir,
"3pSites.PAS.tsv.gz"),
script = os.path.join( config["script_dir"],
"rs-find-sample-specific-cutoff.pl")
output:
sites_filtered = os.path.join( atlas_outputdir,
"filteredSites",
"{sample}.filtered.tsv" )
#sites_filtered = temp( os.path.join( atlas_outputdir,
# "filteredSites",
# "{sample}.filtered.tsv" ) )
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
cutoff = config['sample.BG_polyAsignal_cutoff'],
upstream_reg = config['sample.BG_upstream_clustering'],
downstream_reg = config['sample.BG_downstream_clustering'],
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"{sample}.log")
resources:
mem = config['sample.BG_total_RAM']
log:
os.path.join( atlas_outputdir,
"logs",
"sample_specific_bg",
"{sample}.log")
shell:
'''
perl {input.script} \
--cutoff={params.cutoff} \
--upstream={params.upstream_reg} \
--downstream={params.downstream_reg} \
--sample={wildcards.sample} \
{input.sites_with_pas} \
> {output.sites_filtered} 2>> {log}
'''
#-------------------------------------------------------------------------------
# merge the sample-specific results to a new overall table of 3' end sites
#-------------------------------------------------------------------------------
rule create_noBG_3pSites_table:
##No Singularity needed##
input:
filtered = expand( os.path.join( atlas_outputdir,
"filteredSites",
"{sample}.filtered.tsv" ),
sample = samples.index),
raw_table = os.path.join( atlas_outputdir,
"3pSites.PAS.tsv.gz")
output:
table_adjusted = os.path.join(atlas_outputdir,
"3pSites.PAS.filtered.tsv.gz")
#table_adjusted = temp( os.path.join(atlas_outputdir,
# "3pSites.PAS.filtered.tsv.gz") )
params:
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"create_noBG_3pSites_table.log")
resources:
mem = lambda wildcards: ( int( len(samples.index) / 50 ) + 1) * 12,
time = config["noBG_table.time"]
script:
os.path.join( config["snakemake_script_dir"],
"merge-sample-bg-files-stable.py")
#-------------------------------------------------------------------------------
# delete 3' end sites without cutoff-corrected read support from any sample
#-------------------------------------------------------------------------------
rule delete_noReadSupport_rows:
##No Singularity needed##
input:
table_adjusted = os.path.join(atlas_outputdir,
"3pSites.PAS.filtered.tsv.gz")
#table_adjusted = temp( os.path.join(atlas_outputdir,
# "3pSites.PAS.filtered.tsv.gz") )
output:
table_filtered = os.path.join(atlas_outputdir,
"3pSites.PAS.noBG.tsv.gz")
params:
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"delete_noReadSupport_rows_3pSites_table.log")
run:
import gzip
with gzip.open(output.table_filtered, "wt") as out_file, gzip.open(input.table_adjusted, "rt") as infile:
for line in infile:
if line.startswith("#"):
out_file.write(line)
continue
line_list = line.rstrip().split("\t")
read_sum = sum( [1 for i in line_list[3:-2] if float(i) > 0] )
if read_sum > 0:
# this site has still read support
out_file.write(line)
#-------------------------------------------------------------------------------
# For each SAMPLE
# get background-corrected number of 3' sites
# get number of sites with PAS
#-------------------------------------------------------------------------------
rule get_noBG_3pSites_per_sample:
input:
noBG_sites = os.path.join( atlas_outputdir,
"3pSites.PAS.noBG.tsv.gz" ),
prev_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".ip3pSites.out")
output:
noBG_cnt = os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".noBG3pSites.out")
params:
sample = "{sample}",
cluster_log = os.path.join(config["samples_dir"],
"logs",
"cluster_logs",
"counting",
"{sample}.log")
run:
import gzip
sites = 0
reads = 0
pas = 0
pas_reads = 0
col = 0
with gzip.open(input.noBG_sites,"rt") as all_sites:
for line in all_sites:
if line.startswith("#"):
if params.sample in line:
F = line.rstrip().split(";")
col = int(F[0].lstrip("#"))
else:
if col == 0:
print("Column for sample could not be identified!")
print(params.sample)
exit()
else:
line_list = line.rstrip().split("\t")
if line_list[col] != "0":
sites += 1
reads += int(line_list[col])
if line_list[-2] != "NA":
pas += 1
pas_reads += int(line_list[col])
with open(output.noBG_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("sites.noBG.all.reads\t%i\n" % reads)
out.write("sites.noBG.all.number\t%i\n" % sites)
out.write("sites.noBG.withPAS.reads\t%i\n" % pas_reads)
out.write("sites.noBG.withPAS.number\t%i\n" % pas)
if sites != 0:
out.write("sites.noBG.withPAS.percent\t%i\n" % (pas/sites*100)) # For put in mongo we need int
else:
out.write("sites.noBG.withPAS.percent\t%i\n" % sites)
#-------------------------------------------------------------------------------
# For the ATLAS
# get background-corrected number of unique 3' end sites (without IP sites)
#-------------------------------------------------------------------------------
rule get_noBG_3pSites_cnt:
##No Singularity needed##
input:
prev_cnt = os.path.join( atlas_outputdir,
"counts",
"pooled_3p_ends.nr.out"),
noBG_sites = os.path.join( atlas_outputdir,
"3pSites.PAS.noBG.tsv.gz" )
output:
noBG_sites_cnt = os.path.join( atlas_outputdir,
"counts",
"noBG_3p_ends.nr.out" )
params:
cluster_log = os.path.join( atlas_outputdir,
"logs",
"cluster_logs",
"counting_noBG_sites.log")
run:
import gzip
n = 0
with gzip.open(input.noBG_sites, "rt") as infile:
n = sum([1 for line in infile if not line.startswith("#")])
with open(output.noBG_sites_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("3pSites.noBG:\t%i\n" % n)
#-------------------------------------------------------------------------------
# cluster individual closely spaced 3' end sites
#-------------------------------------------------------------------------------
rule cluster_sites:
##Singularity needed: perl##
## Singularity provided: zavolab_minimal:1, not tested ##
input:
table_filtered = os.path.join(atlas_outputdir,
"3pSites.PAS.noBG.tsv.gz"),
script = os.path.join( config["script_dir"],
"ag-generate-clusters.pl")
output:
primary_clusters = os.path.join( atlas_outputdir,
"clusters.primary.tsv.gz" )
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
upstream_ext = config['CL.upstream_clustering'],
downstream_ext = config['CL.downstream_clustering'],
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"cluster_sites.log")
resources:
mem = config['CL.total_RAM'],
time = config['CL.time']
log:
os.path.join( atlas_outputdir,
"logs",
"cluster_sites.log")
shell:
'''
(perl {input.script} \
--upstream={params.upstream_ext} \
--downstream={params.downstream_ext} \
{input.table_filtered} \
| gzip > {output.primary_clusters}) 2> {log}
'''
#-------------------------------------------------------------------------------
# get number of primary clusters
#-------------------------------------------------------------------------------
rule get_prim_clusters_cnt:
##No Singularity needed##
input:
prev_cnt = os.path.join( atlas_outputdir,
"counts",
"noBG_3p_ends.nr.out"),
clusters = os.path.join( atlas_outputdir,
"clusters.primary.tsv.gz" )
output:
clusters_cnt = os.path.join( atlas_outputdir,
"counts",
"prim_clusters.nr.out" )
params:
cluster_log = os.path.join( atlas_outputdir,
"logs",
"cluster_logs",
"counting_prim_clusters.log")
run:
import gzip
n = 0
with gzip.open(input.clusters, "rt") as infile:
n = sum([1 for line in infile if not line.startswith("#")])
with open(output.clusters_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("clusters.primary:\t%i\n" % n)
#-------------------------------------------------------------------------------
# merge closely spaced clusters
#-------------------------------------------------------------------------------
rule merge_clusters:
##Singularity needed: perl##
## Singularity provided: zavolab_minimal:1, not tested ##
''' ATTENTION:
The script expects the input file to be formatted
according to ag-generate-clusters.pl from the A-seq-processing pipeline
-> all data is accessed with hard coded indices
cluster are further merged if:
- an IP candidate has another downstream cluster that shares all its
PAS with the IP candidate
- a cluster shares all its PAS with the next cluster upstream
- two clusters with (?) independent PAS have a combined length smaller the maxsize
Keep all un-merged clusters without PAS
'''
input:
primary_clusters = os.path.join( atlas_outputdir,
"clusters.primary.tsv.gz"),
script = os.path.join( config["script_dir"],
"rs-merge-clusters.pl")
output:
merged_clusters = os.path.join( atlas_outputdir,
"clusters.merged.tsv.gz")
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
params:
maxsize = config['CL.max_cluster_size'],
minDistToPAS = config['CL.min_dist_to_PAS'],
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"merge_clusters.log")
resources:
mem = config['CL.merge_RAM'],
time = config['CL.time']
log:
os.path.join( atlas_outputdir,
"logs",
"merge_clusters.log")
shell:
'''
(perl {input.script} \
--minDistToPAS={params.minDistToPAS} \
--maxsize={params.maxsize} \
{input.primary_clusters} \
| gzip > {output.merged_clusters}) 2> {log}
'''
#-------------------------------------------------------------------------------
# get number of merged clusters
#-------------------------------------------------------------------------------
rule get_merged_clusters_cnt:
##No Singularity needed##
input:
prev_cnt = os.path.join( atlas_outputdir,
"counts",
"prim_clusters.nr.out"),
clusters = os.path.join( atlas_outputdir,
"clusters.merged.tsv.gz" )
output:
clusters_cnt = os.path.join( atlas_outputdir,
"counts",
"merged_clusters.nr.out" )
params:
cluster_log = os.path.join( atlas_outputdir,
"logs",
"cluster_logs",
"counting_merged_clusters.log")
run:
import gzip
n = 0
with gzip.open(input.clusters, "rt") as infile:
n = sum([1 for line in infile if not line.startswith("#")])
with open(output.clusters_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("%s" % cnt.read() )
out.write("clusters.merged:\t%i\n" % n)
#-------------------------------------------------------------------------------
# annotate the location of the clusters with respect to the
# filtered annotation
# annotated are (with the first and last having the highest and lowest
# priority, respectively: TE-terminal exon, EX-exon,
# IN-intron, DS-up to n nt downstream of TE,
# AE-antisense exon, AI-antisense intron, AU-antisense upstream,
# IG-intergenic
#-------------------------------------------------------------------------------
rule annotate_gene_features:
##No Singularity needed##
input:
merged_clusters = os.path.join( atlas_outputdir,
"clusters.merged.tsv.gz"),
script = os.path.join( config["script_dir"],
"rs-annotate-gene-features-tsv.py"),
#anno_filtered = config['gene_annotation_filtered']
anno = config['gene_annotation']
output:
clusters_annotated = os.path.join( atlas_outputdir,
"clusters.anno.tsv.gz")
params:
downstream_region = config['ds_range'],
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"annotate_clusters.log")
log:
os.path.join( atlas_outputdir,
"logs",
"annotate_clusters.log")
shell:
'''
python {input.script} \
--verbose \
--gtf {input.anno} \
--ds-range {params.downstream_region} \
--input {input.merged_clusters} \
| gzip > {output.clusters_annotated} \
2> {log}
'''
#-------------------------------------------------------------------------------
# calculate cluster support measures
#-------------------------------------------------------------------------------
rule cluster_support:
''' ATTENTION: This script requires the original design file.
The correct order of samples must be given.
'''
input:
clusters_annotated = os.path.join( atlas_outputdir,
"clusters.anno.tsv.gz"),
script = os.path.join( config["script_dir"],
"cjh-get-clusters-support.py")
output:
clusters_support = os.path.join( atlas_outputdir,
"clusters.support.tsv.gz"),
clusters_temp = temp(os.path.join( atlas_outputdir,
"clusters.support.tsv"))
singularity:
"docker://python:3.6.9-slim-stretch"
params:
design_file = config['atlas.samples_table'],
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"clusters_support.log")
log:
os.path.join( atlas_outputdir,
"logs",
"clusters_support.log")
shell:
'''
python {input.script} \
--verbose \
--design={params.design_file} \
--in {input.clusters_annotated} \
--out {output.clusters_temp} \
2> {log} &&
gzip -c {output.clusters_temp} \
> {output.clusters_support}
'''
#-------------------------------------------------------------------------------
# make a bed file with cluster tpms for each sample
#-------------------------------------------------------------------------------
rule make_bed:
input:
clusters = os.path.join( atlas_outputdir,
"clusters.support.tsv.gz"),
script = os.path.join( config["script_dir"],
"cjh-bed-per-sample-from-clusters.py")
output:
samples_bed = os.path.join("{path}",
"{sample}.clusters.bed.gz"),
samples_temp = temp(os.path.join("{path}",
"{sample}.clusters.bed"))
singularity:
"docker://python:3.6.9-slim-stretch"
params:
id = "{sample}",
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(atlas_outputdir,
"logs",
"{sample}.log")
shell:
'''
python {input.script} \
-i {input.clusters} \
-s {params.id} \
-o {output.samples_temp} \
2> {log} &&
gzip -c {output.samples_temp} \
> {output.samples_bed}
'''
#-------------------------------------------------------------------------------
# sort bed
#-------------------------------------------------------------------------------
rule sort_bed:
## Singularity: bedtools ##
input:
bed = os.path.join("{path}",
"{sample}.clusters.bed.gz")
output:
sorted_bed = os.path.join("{path}",
"{sample}.clusters." + config['ucsc_db'] + "."
+ config['atlas.release_name'] + ".bed.gz")
singularity:
"docker://cjh4zavolab/bedtools:2.27"
params:
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(atlas_outputdir,
"logs",
"{sample}.log")
shell:
'''
sortBed \
-i {input.bed} \
| gzip \
> {output.sorted_bed}
'''
#-------------------------------------------------------------------------------
# get number of final clusters
# count PAS and annotations
#-------------------------------------------------------------------------------
rule get_final_clusters_stats:
##No Singularity needed##
input:
prev_cnt = os.path.join( atlas_outputdir,
"counts",
"merged_clusters.nr.out"),
clusters_bed = os.path.join( atlas_outputdir,
"atlas.clusters." + config['ucsc_db'] + "."
+ config['atlas.release_name'] + ".bed.gz")
output:
clusters_cnt = os.path.join( atlas_outputdir,
"counts",
"clusters.stats.out" )
params:
cluster_log = os.path.join( atlas_outputdir,
"logs",
"cluster_logs",
"clusters_stats.log")
run:
import gzip
n = 0
p = 0
annos = {'TE': 0,
'EX': 0,
'IN': 0,
'DS': 0,
'AE': 0,
'AI': 0,
'AU': 0,
'IG': 0}
with gzip.open(input.clusters_bed, "rt") as infile:
for line in infile:
# Count clusters
n += 1
# Count clusters with PAS
if not "NA" in line:
p += 1
# For each cluster get annotation
a = line.split('\t')[9]
annos[a] += 1
with open(output.clusters_cnt, "w") as out, open(input.prev_cnt, "r") as cnt:
out.write("{}".format(cnt.read() ))
out.write("clusters.all:\t{:d}\n".format(n))
out.write("clusters.PAS.nr:\t{:d}\n".format(p))
out.write("clusters.PAS.percent:\t{:d}\n".format(int(p/n*100))) # For put in mongo we need int
for k in annos.keys():
out.write("clusters.annos.%s:\t%s\n" % (k, annos[k]))
#-------------------------------------------------------------------------------
# Target rule clustering
#-------------------------------------------------------------------------------
rule complete_clustering:
input:
noBG_cnt = expand( os.path.join(config["samples_dir"],
"counts",
"{sample}_" + config['genome'] + ".noBG3pSites.out"),
sample = samples.index),
cluster_stats = os.path.join( atlas_outputdir,
"counts",
"clusters.stats.out" ),
clusters_bed = os.path.join( atlas_outputdir,
"atlas.clusters." + config['ucsc_db'] + "."
+ config['atlas.release_name'] + ".bed.gz"),
samples_bed = expand( os.path.join(config["samples_dir"],
"{sample}",
config['genome'],
"{sample}.clusters." + config['ucsc_db'] + "."
+ config['atlas.release_name'] + ".bed.gz"),
sample = samples.index)
output:
clst_cmplt = os.path.join(atlas_outputdir,
"clst_cmplt.txt")
shell:
'''
echo '#########################\n \
Clustering completed.\n \
#########################\n \
Created "{input.noBG_cnt}"\n \
"{input.cluster_stats}"\n \
"{input.clusters_bed}"\n \
"{input.samples_bed}"\n' \
> {output.clst_cmplt}
'''
################################################################################
# Make files for visualization of custom tracks in UCSC genome browser
################################################################################
#-------------------------------------------------------------------------------
# get the UCSC chromosome sizes file
#-------------------------------------------------------------------------------
rule fetch_chr_sizes_ucsc:
##LOCAL##
##Singularity needed: wget##
params:
url = config['ucsc_chromSizes_URL']
output:
chr_sizes_ucsc = config["ucsc_chromSizes_file"]
singularity:
"docker://cjh4zavolab/zavolab_minimal:1"
shell:
'''
wget -O {output.chr_sizes_ucsc} \
{params.url} \
&> /dev/null
'''
#-------------------------------------------------------------------------------
# prepare bedGraph for bigWig
#-------------------------------------------------------------------------------
rule clusters_bedGraph:
input:
clusters = os.path.join( atlas_outputdir,
"clusters.anno.tsv.gz"),
script = os.path.join( config["script_dir"],
"cjh-bedGraph-from-tsv.py")
output:
plus = os.path.join(atlas_outputdir,
"tracks",
"{sample}_plus.bedGraph"),
minus = os.path.join(atlas_outputdir,
"tracks",
"{sample}_minus.bedGraph")
singularity:
"docker://python:2.7-slim"
params:
id = "{sample}",
chr_names = lambda wildcards: " ".join([(str(c) +
":" +
config['chromosome_names'][c]) for c in config['chromosome_names']]),
cluster_log = os.path.join( atlas_outputdir,
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(atlas_outputdir,
"logs",
"{sample}.log")
shell:
'''
python {input.script} \
-i {input.clusters} \
-s {params.id} \
--chr-names {params.chr_names} \
-p {output.plus} \
-m {output.minus} \
2> {log}
'''
#-------------------------------------------------------------------------------
# sort bedGraphs
#-------------------------------------------------------------------------------
rule sort_bed_4_big:
## Singularity: bedtools ##
input:
ucsc_bed = os.path.join(atlas_outputdir,
"tracks",
"{sample}_{strand}.bedGraph")
output:
sorted_bed = os.path.join(atlas_outputdir,
"tracks",
"{sample}_{strand}.sorted.bedGraph")
singularity:
"docker://cjh4zavolab/bedtools:2.27"
params:
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"{sample}.log")
log:
os.path.join(atlas_outputdir,
"logs",
"{sample}.log")
shell:
'''
sortBed \
-i {input.ucsc_bed} \
> {output.sorted_bed}
'''
#-------------------------------------------------------------------------------
# prepare bigWig from bedGraph
#-------------------------------------------------------------------------------
rule prepare_bigWig:
##Singularity needed: bedToBigWig##
input:
ucsc_bed = os.path.join(atlas_outputdir,
"tracks",
"{sample}_{strand}.sorted.bedGraph"),
chr_sizes = config['ucsc_chromSizes_file']
output:
bigWig = os.path.join(atlas_outputdir,
"tracks",
"{sample}_{strand}." + config['ucsc_db'] + "." + config['atlas.release_name'] + ".bw")
singularity:
"docker://kerstenbreuer/kent-bedgraphtobigwig"
params:
cluster_log = os.path.join(atlas_outputdir,
"logs",
"cluster_logs",
"bed2bigWig.log")
log:
os.path.join(atlas_outputdir,
"logs",
"bed2bigWig.log")
shell:
'''
bedGraphToBigWig \
{input.ucsc_bed} \
{input.chr_sizes} \
{output.bigWig}
'''
#-------------------------------------------------------------------------------
# Make accessory files containing track line info
#-------------------------------------------------------------------------------
rule make_track_info:
## LOCAL ##
##No Singularity needed##
input:
bigplus = os.path.join(atlas_outputdir,
"tracks",
"{sample}_plus." + config['ucsc_db'] + "." + config['atlas.release_name'] + ".bw"),
bigminus = os.path.join(atlas_outputdir,
"tracks",
"{sample}_minus." + config['ucsc_db'] + "." + config['atlas.release_name'] + ".bw")
params:
name = "{sample}",
atlas_public_name = config['ucsc_db'] + "." + config['atlas.release_name'],
url = os.path.join(config["polyasite_download_url"],
"tracks",
config["genome"]),
plus = "{sample}_plus." + config['ucsc_db'] + "." + config['atlas.release_name'] + ".bw",
minus = "{sample}_minus." + config['ucsc_db'] + "." + config['atlas.release_name'] + ".bw",
output:
track_info = os.path.join(atlas_outputdir,
"tracks",
("{sample}." + config['ucsc_db'] + "." + config['atlas.release_name'] + ".track_info.txt"))
run:
with open(output.track_info, "wt") as out:
out.write('track type=bigWig name="%s: poly(A) clusters plus strand %s" \
visibility="full" color="4,177,216" maxHeightPixels="128:60:8"\
bigDataUrl="%s/%s"\n'\
% (params.name, params.atlas_public_name, params.url, params.plus))
out.write('track type=bigWig name="%s: poly(A) clusters minus strand %s" \
visibility="full" color="241,78,50" maxHeightPixels="128:60:8"\
bigDataUrl="%s/%s"\n' \
% (params.name, params.atlas_public_name, params.url, params.minus))
#-------------------------------------------------------------------------------
# Target rule tracks
#-------------------------------------------------------------------------------
rule complete_tracks:
input:
atlas_track = os.path.join(atlas_outputdir,
"tracks",
("atlas." + config['ucsc_db'] + "." + config['atlas.release_name'] + ".track_info.txt")),
sample_tracks = expand( os.path.join(atlas_outputdir,
"tracks",
("{sample}." + config['ucsc_db'] + "." + config['atlas.release_name'] + ".track_info.txt")),
sample = samples.index),
output:
tracks_cmplt = os.path.join(atlas_outputdir,
"tracks_cmplt.txt")
shell:
'''
echo '#########################\n \
Track files completed.\n \
#########################\n \
Created "{input.atlas_track}"\n \
"{input.sample_tracks}"\n' \
> {output.tracks_cmplt}
'''
#-------------------------------------------------------------------------------
# How did it go?
#-------------------------------------------------------------------------------
onsuccess:
print("Workflow finished, no error")
onerror:
print("An error occurred, check log at %s." % {log})
| [
"christina.herrmann@unibas.ch"
] | christina.herrmann@unibas.ch | |
fcb4c6ebca02a66d08e3dd8fc564224541dfb1df | d3bb87661f3322e90a49d34961d42a6deb615aff | /get-gmus-attributes/best_attributes.py | 91f63044c642737f2f1522a1e884cf20601735cb | [] | no_license | UW-Macrostrat/map-processing | 07b0a248305488ec3e76f0424f1357ccf993ce91 | 0d33dabf4fecd4c9c3b8573e4db914470d38d191 | refs/heads/master | 2020-05-17T00:04:41.262573 | 2015-07-10T15:11:16 | 2015-07-10T15:11:16 | 30,981,903 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,432 | py | import os
import psycopg2
from psycopg2.extensions import AsIs
import sys
import subprocess
sys.path = [os.path.join(os.path.dirname(__file__), os.pardir)] + sys.path
import credentials
# Connect to Postgres
pg_conn = psycopg2.connect(dbname=credentials.pg_db, user=credentials.pg_user, host=credentials.pg_host, port=credentials.pg_port)
pg_cur = pg_conn.cursor()
# 1. Low hanging fruit. Fill best_* if * or new_* are the same
pg_cur.execute("UPDATE gmus.units SET best_unit_name = unit_name WHERE unit_name = new_unit_name");
pg_cur.execute("UPDATE gmus.units SET best_unitdesc = unitdesc WHERE unitdesc = new_unitdesc");
pg_cur.execute("UPDATE gmus.units SET best_unit_com = unit_com WHERE unit_com = new_unit_com");
pg_cur.execute("UPDATE gmus.units SET best_strat_unit = strat_unit WHERE strat_unit = new_strat_unit");
pg_conn.commit()
# nulls after step1 (6283 rows in gmus.units):
# best_unit_name - 36
# best_unitdesc - 574
# best_unit_com - 2685 (500 not empty)
# best_strat_unit - 5263 (13 not empty)
# 2. If len(thing) > 1 and len(other thing) < 1, set best_thing = thing; Do inverse as well
pg_cur.execute("UPDATE gmus.units SET best_unit_name = unit_name WHERE char_length(unit_name) > 1 and new_unit_name is null");
pg_cur.execute("UPDATE gmus.units SET best_unitdesc = unitdesc WHERE char_length(unitdesc) > 1 and new_unitdesc is null");
pg_cur.execute("UPDATE gmus.units SET best_unit_com = unit_com WHERE char_length(unit_com) > 1 and new_unit_com is null");
pg_cur.execute("UPDATE gmus.units SET best_strat_unit = strat_unit WHERE char_length(strat_unit) > 1 and new_strat_unit is null");
# nulls after step2 (6283 rows in gmus.units):
# best_unit_name - 18
# best_unitdesc - 557 (299 not empty)
# best_unit_com - 2668 (483 not empty)
# best_strat_unit - 5261 (11 not empty)
# 3. char_length(unitdesc) < 255 and char_length(new_unitdesc) > 255, use new_unitdesc
pg_cur.execute("UPDATE gmus.units SET best_unitdesc = new_unitdesc WHERE char_length(unitdesc) < 255 AND char_length(new_unitdesc) > 255")
pg_cur.execute("UPDATE gmus.units SET best_unit_com = new_unit_com WHERE char_length(unit_com) < 255 AND char_length(new_unit_com) > 255")
# nulls after step2 (6283 rows in gmus.units):
# best_unit_name - 18
# best_unitdesc - 288 (30 not empty)
# best_unit_com - 2304 (119 not empty)
# best_strat_unit - 5261 (11 not empty)
pg_cur.execute("update gmus.units set best_strat_unit = new_strat_unit where char_length(new_strat_unit) > char_length(strat_unit) and best_strat_unit is null");
pg_cur.execute("update gmus.units set best_unit_com = new_unit_com where unit_com is not null AND best_unit_com is null");
pg_cur.execute("update gmus.units set best_unit_name = new_unit_name where id in (3276, 6106, 4538, 1877, 4533, 6254, 6253)");
# A + B
pg_cur.execute("update gmus.units set best_unit_name = concat(unit_name, ' | ', new_unit_name) where id in (6238, 4968, 6135, 5120)");
# A
pg_cur.execute("update gmus.units set best_unit_name = unit_name where id in (2733, 2732, 2736, 2735, 2737, 2734, 5115)");
pg_cur.execute("update gmus.units set best_unitdesc = concat(unitdesc, ' | ', new_unitdesc) WHERE id in (6238, 4968, 6135, 5120) and best_unitdesc is null");
pg_cur.execute("update gmus.units set best_unitdesc = new_unitdesc WHERE best_unitdesc is null");
# Leftovers
pg_cur.execute("update gmus.units set best_unit_name = new_unit_name where unit_link = 'SDJms;0'");
pg_conn.commit()
| [
"jczaplew@gmail.com"
] | jczaplew@gmail.com |
0776d0727b2dcb2702c95cf752b9ffe9895b22c9 | 6b5f8d3678c78e4d6fbc51431d656b3c8c82a76a | /apps/torneo/migrations/0001_initial.py | 1b44f0dc482b6112bc898c07d6bf2e14efc7018a | [] | no_license | EderChu/LigaDeportiva | 13116290129d90bac90ed7d29375acd7453979dc | 747b88d9761591104cba051731c8a1c4542a7816 | refs/heads/master | 2021-01-18T04:03:18.388480 | 2015-12-11T19:33:11 | 2015-12-11T19:33:11 | 47,946,846 | 0 | 0 | null | 2015-12-14T02:12:53 | 2015-12-14T02:12:53 | null | UTF-8 | Python | false | false | 5,206 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('equipo', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Arbitro',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('experiencia', models.IntegerField()),
],
options={
'verbose_name': 'Arbitro',
'verbose_name_plural': 'Arbitros',
},
),
migrations.CreateModel(
name='CampoDeportivo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nombre', models.CharField(max_length=50, verbose_name=b'Nombre')),
('direccion', models.CharField(max_length=100, verbose_name=b'Direccion')),
('propietario', models.CharField(max_length=50, verbose_name=b'Propietario')),
('capacidad', models.IntegerField()),
('descripcion', models.CharField(max_length=50, verbose_name=b'Descripcion')),
],
options={
'verbose_name': 'CampoDeportivo',
'verbose_name_plural': 'CampoDeportivos',
},
),
migrations.CreateModel(
name='Fixture',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('fecha', models.DateField(null=True, blank=True)),
('hora', models.TimeField()),
('arbitro', models.ForeignKey(to='torneo.Arbitro')),
('campodeportiv', models.ForeignKey(to='torneo.CampoDeportivo')),
('elocal', models.ForeignKey(to='equipo.Equipo')),
('evisitante', models.ForeignKey(related_name='Equipo_evisitante', to='equipo.Equipo')),
],
options={
'verbose_name': 'Fixture',
'verbose_name_plural': 'Fixtures',
},
),
migrations.CreateModel(
name='Persona',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('dni', models.CharField(max_length=8, verbose_name=b'Dni')),
('nombres', models.CharField(max_length=50, verbose_name=b'Nombre')),
('apellidos', models.CharField(max_length=50, verbose_name=b'Apellidos')),
('email', models.EmailField(max_length=254, verbose_name=b'Correo Electronico')),
('direccion', models.CharField(max_length=50, verbose_name=b'Direccion')),
('sexo', models.CharField(max_length=1, verbose_name=b'sexo', choices=[(b'M', b'MASCULINO'), (b'F', b'FEMENINO')])),
('telefono', models.CharField(max_length=10, verbose_name=b'Telefono')),
('fecha_nacimineto', models.DateField(null=True, verbose_name=b'Fecha de nacimiento', blank=True)),
('foto', models.ImageField(upload_to=b'')),
],
options={
'verbose_name': 'Persona',
'verbose_name_plural': 'Personas',
},
),
migrations.CreateModel(
name='PrecioPago',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('concepto', models.CharField(max_length=15, verbose_name=b'Concepto', choices=[(b'inscripcion', b'Inscripcion'), (b'amonestacion', b'Amonestacion'), (b'reclamo', b'Reclamo'), (b'arbitraje', b'Arbitraje')])),
('monto', models.DecimalField(verbose_name=b'Precio', max_digits=5, decimal_places=2)),
],
options={
'verbose_name': 'Precios',
'verbose_name_plural': 'Precioss',
},
),
migrations.CreateModel(
name='Torneo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nombre', models.CharField(max_length=70, verbose_name=b'Nombre')),
('denominacion', models.CharField(max_length=100, verbose_name=b'Denominacion')),
('fecha_inicio', models.DateField(verbose_name=b'Fecha Inicio')),
('fecha_fin', models.DateField(verbose_name=b'Fecha Fin')),
('bases', models.FileField(upload_to=b'', verbose_name=b'Bases')),
],
options={
'verbose_name': 'Torneo',
'verbose_name_plural': 'Torneos',
},
),
migrations.AddField(
model_name='fixture',
name='torneo',
field=models.ForeignKey(to='torneo.Torneo'),
),
migrations.AddField(
model_name='arbitro',
name='persona',
field=models.OneToOneField(to='torneo.Persona'),
),
]
| [
"scam1121@gmail.com"
] | scam1121@gmail.com |
e891c9a2c341878657997783d07af08d24b6558d | 4321f053da142c2faa27db42a0e2fff9544f76c2 | /urlshorterner/shortener/migrations/0001_initial.py | 8f99a99eb66b5e10e1546527d4a485a93e8e2c47 | [] | no_license | ELOBO5/url_shortener | b2dd48fdbf0a126fe72b815c813c9335a9e4b07a | 3e5ef0ac9196ce9bb9185e1ec32099fc9839bb66 | refs/heads/master | 2023-08-15T00:41:21.504266 | 2021-10-07T08:40:19 | 2021-10-07T08:40:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 558 | py | # Generated by Django 3.2.7 on 2021-10-04 10:13
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Shortener',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('long_url', models.URLField()),
('short_url', models.CharField(max_length=15, unique=True)),
],
),
]
| [
"harris.mawardi@yahoo.co.uk"
] | harris.mawardi@yahoo.co.uk |
1cde0b81137596a71bf451ead8674e6f871f0355 | f5572734ef861c9e4aba0afaf8e3ada765eda04c | /pvc/ecat_smooth.py | 55ebdcfff6a545413b6b1ce1d85716f2c0b60385 | [
"MIT"
] | permissive | fengfeng11246/PetProcessing | fd9aa5c65a123a8551ba389d7b2f1fdb02ce6cb1 | be6d6467a54bd883f39f672864c0184576b3a93c | refs/heads/master | 2020-08-01T10:53:33.928853 | 2013-01-16T01:44:46 | 2013-01-16T01:44:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,423 | py | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
#!/usr/bin/env python
import os
import nibabel
import numpy as np
import scipy.weave as weave
from scipy.weave import converters
import numpy
class PetPsf():
# resolution determined for out Pet scanner
_ecat_resolution = {'radial' : [3.64, 4.10, 7.40 ],
'tangential' : [3.64, 4.05, 4.46 ],
'axial' : [3.97, 5.36, 6.75 ] }
_halfwidth = 7; # predefined by previous testing on scanner
_base_divisor = 2.35482 #standard constant
def __init__(self, infile):
"""this class loads in am image, and applies the point spread
function specific to the PET scanner up at LBL to the image
Parameters
----------
infile : file of image to apply pet scanner specific psf
"""
self.img = nibabel.load(infile)
dat = self.img.get_data().squeeze()
self.dat = np.nan_to_num(dat) #deal with NAN in matrix
self.matrix_dim = self.dat.shape
ndim = len(self.dat.shape)
self.voxel_res = self.img.get_header().get_zooms()[:ndim]
rad = [x /self._base_divisor for x in self._ecat_resolution['radial'] ]
tan = [x / self._base_divisor for x in self._ecat_resolution['tangential'] ]
axial = [x / self._base_divisor for x in self._ecat_resolution['axial'] ]
self.radial = np.array(rad)
self.tan = np.array(tan)
self.axial = np.array(axial)
# find minimum value across 3 arrays
self.minres = min(rad + tan + axial)
self.uniform_sm = self.minres
self.uniform_sm_sq = self.minres**2
# size of the data array
self.average_pixel_size = (self.voxel_res[1] + self.voxel_res[0]) / 2.0
self.deltarad = (self.radial[1:] - self.radial[:-2]) / 100.0
self.deltatan = (self.tan[1:] - self.tan[:-2]) / 100.0
self.deltaaxial = (self.axial[1:] - self.axial[:-2]) / 100.0
def _calc_sigma(self, insigma, norm):
"""calculates true sigma based on constraints
and converts to pixel widths using norm
Parameters
----------
insigma : the sigma calculated for a specific region
norm : pixel dim, or avg pixel dim for direction in which
sigma is being calculated
"""
sigma_sq = insigma**2
sigma_sq = sigma_sq - self.uniform_sm_sq
if sigma_sq > 0.0:
out = np.sqrt(sigma_sq)
else:
out = 0.0
return out / norm
def compute_xy_kernel(self,x,y):
"""Find the smoothing kernel specific to a given x,y coordinate
Parameters
----------
x : x voxel
y : x voxel
"""
center_pixel = np.array(self.matrix_dim[:2]) / 2.0 # center voxel in a slice
xydim = np.prod(self.matrix_dim[:2]) #number of voxels in a slice
dx = (x - center_pixel[0]) * self.voxel_res[0]
dy = (center_pixel[1] - y) * self.voxel_res[1]
radius = np.sqrt(dx**2 + dy**2 )# in mm
print radius
if radius > 0.0:
angle = np.arctan2(dy,dx)
else:
angle = 0.0
if radius < 100.0:
sigma_radial = self.radial[0] + self.deltarad[0] * radius
sigma_tan = self.tan[0] + self.deltatan[0] * radius
else:
sigma_radial = self.radial[1] + self.deltarad[1] * (radius - 100.0)
sigma_tan = self.tan[1] + self.deltatan[1] * (radius - 100.0)
sigma_radial = self._calc_sigma(sigma_radial,self.average_pixel_size)
sigma_tan = self._calc_sigma(sigma_tan, self.average_pixel_size)
kern = self._calc_gauss2d_kernel(sigma_radial, sigma_tan, angle)
return kern
def _calc_gauss2d_kernel(self, sigma_rad, sigma_tan, angle):
"""
returns N X N gaussian kernel N = halfwidth * 2 + 1
sigmas are in matrix dim widths, NOT mm
angle : float
rotation value
"""
length = self._halfwidth * 2 +1
length_sq = length**2
kern = np.zeros(length_sq)
halfwidth = self._halfwidth
# if either sigma is zero, return a delta function
# or dont alter the data
if (sigma_rad <= 0.0) or (sigma_tan <= 0.0):
kern[length * length / 2] = 1.0
print 'returning delta'
return kern
cos_theta = np.cos(angle)
sin_theta = np.sin(angle)
sigma_rad_sq = sigma_rad**2
sigma_tan_sq = sigma_tan**2
tmpjnk = np.zeros((length, length))
indx, indy = np.nonzero(tmpjnk == 0)
u = (indx - halfwidth)* cos_theta - (indy - halfwidth) * sin_theta
v = (indx - halfwidth) * sin_theta + (indy - halfwidth) * cos_theta
val = np.exp(-.5 * u * u / sigma_rad_sq)
val = val * np.exp(-.5 * v * v / sigma_tan_sq)
val = val / np.sum(val)
return val
def convolve_xy(self):
""" convolve the xy specific 2D kernel with points in
the data slices in the x-y plane
returns result as an array in case you want
to save it or check it out
"""
xmax, ymax, zmax = self.matrix_dim
psfdat = np.zeros(self.matrix_dim)
xrange = np.arange(self._halfwidth, xmax - self._halfwidth)
yrange = np.arange(self._halfwidth, ymax - self._halfwidth)
for x in xrange:
for y in yrange:
kern = self.compute_xy_kernel(x,y)
half_kern = kern.shape[0]/2
for z in np.arange(zmax):
tmp = self.dat[x-self._halfwidth:x+self._halfwidth+1,
y-self._halfwidth:y+self._halfwidth+1,
z].ravel()
psfdat[x,y,z] = np.sum(tmp * kern)
self.xy_psf = psfdat
return psfdat
def wconvolve_xy(self):
"""convolve the xy specific 2D kernel with points in
the data slices in the x-y plane
returns result as an array in case you want
to save it or check it out
USES WEAVE for SPEED
"""
xmax, ymax, zmax = self.matrix_dim
mysum = 0.0
psfdat = np.zeros(self.matrix_dim)
xrange = np.arange(self._halfwidth, xmax - self._halfwidth)
yrange = np.arange(self._halfwidth, ymax - self._halfwidth)
dat = self.dat
code = \
"""
int xind, yind;
fprintf("%d,%d",(x,y));
for (int z = 0; z< zmax; z++){
for (int i = 0; i<15; i++){
xind = x - i;
xind -= 7;
yind = y - i;
yind -= 7;
fprintf("%d,%d",(x,y));
fprintf("dat = %f",dat(xind, yind,z));
fprintf("\\n");
fprintf("kernel = %f", kernel(i));
fprintf("\\n");
}
}
"""
for x in xrange:
for y in yrange:
kernel = self.compute_xy_kernel(x,y)
weave.inline(code, ['x','y','zmax','mysum','psfdat','kernel','dat'],
type_converters=converters.blitz,
compiler='gcc')
self.xy_psf = psfdat
return psfdat
#tmp = dat(x-(i-7), y-(i-7),z)
#psfdat(x,y,z) = mysum;
def compute_z_kernel(self, x, y):
"""
compute z location specific kernel
Parameters
----------
x : x coordinate of voxel
y : y coordinate of voxel
"""
halfwidth = self._halfwidth
center_pixel = np.array(self.matrix_dim[:2]) / 2.0 # center voxel in a slice
dx = (x - center_pixel[0]) * self.voxel_res[0]
dy = (center_pixel[1] - y) * self.voxel_res[1]
radius = np.sqrt( dx**2 + dy**2)
if radius < 100 :
sigma_axial = self.axial[0] + self.deltaaxial[0] * radius
else:
sigma_axial = self.axial[1] + self.deltaaxial[1] * (radius - 100.0)
sigma_axial = self._calc_sigma(sigma_axial, self.voxel_res[2])
kern = self._calc_gauss1d_kernel(sigma_axial)
return kern
def _calc_gauss1d_kernel(self, sigma):
"""calculates a 1D gaussian kernel based on a given sigma
normalizes result
"""
fwhm = self._halfwidth
len = fwhm * 2 + 1
kern = np.zeros(len)
if sigma < 0.0:
kern[fwhm] = 1.0
return kern
x = np.arange(-fwhm, fwhm+1)
kern = np.exp(-.5 * x**2 / (sigma**2))
kern = kern / np.sum(kern)
return kern
def convolve_z(self):
""" convolve the results of the xy_smooth with
a new kernel computed in the z direction
"""
halfwidth = self._halfwidth
xmax, ymax, zmax = self.matrix_dim
psfdat = np.zeros(self.matrix_dim)
xrange = np.arange(halfwidth, xmax - halfwidth)
yrange = np.arange(halfwidth, ymax - halfwidth)
finaldat = np.zeros(self.matrix_dim)
for x in xrange:
for y in yrange:
kern = self.compute_z_kernel(x,y)
for z in np.arange(0,
zmax):
if (z - halfwidth) <= 0:
tmpdat = np.zeros(15)
if z == 0:
tmpdat[7:] = self.xy_psf[x,y,z:z+halfwidth + 1]
else:
tmpdat[7 - z :] = self.xy_psf[x,y,0:z+halfwidth + 1]
elif (z + halfwidth + 1 > zmax):
tmpdat = np.zeros(15)
if z == zmax:
tmpdat[:7] = self.xy_psf[x,y,z-halfwidth:]
else:
tmpdat[:15-(8-(zmax-z))] = self.xy_psf[x,y,z-halfwidth:]
else:
tmpdat = self.xy_psf[x,y, z - halfwidth : z + halfwidth + 1]
finaldat[x,y,z] = np.sum(tmpdat * kern)
self.finaldat = finaldat
return finaldat
def save_result(self, filename=None):
""" saves the result of smoothing input in xy, and z to a new file
Parameters
----------
filename : file to save new image
if None, will create filename from original file and
save in same directory prepending PET_PSF to filename
"""
if filename is None:
basename = os.path.abspath(self.img.get_filename())
pth, filenme = os.path.split(basename)
nme, ext = os.path.splitext(filenme)
filename = os.path.join(pth, 'PET_PSF_%s.nii'%(nme))
newimg = nibabel.Nifti1Image(self.finaldat,
self.img.get_affine())
newimg.to_filename(filename)
return filename
if __name__ == '__main__':
# this SHOULD be resliced into space of pet image, but we are lazy
infile = '/home/jagust/cindeem/CODE/pverousset/tmp_atrophy_smooth/rgm_seg_bin.nii'
petpsf = PetPsf(infile)
xyresult = petpsf.convolve_xy()
zresult = petpsf.convolve_z()
newfile = petpsf.save_result()
| [
"cindee@berkeley.edu"
] | cindee@berkeley.edu |
c7399ed2a6a14b00d2fa225c257399f315242144 | ab85cd2264844b47234698686229f78a8a149c07 | /Deployed Flask File.py | b3bc14f2bf5e88d3270dd541fda1c4582ea1b008 | [] | no_license | pbhagavanreddy/Flask-Deployment | ecd4089aa80ccf679ac5c651f31db851f86fe572 | a8ee0e0be20d6ad504d53a84dbeddc4a2a06dfd9 | refs/heads/main | 2023-04-13T00:15:42.790021 | 2021-04-22T04:36:36 | 2021-04-22T04:36:36 | 360,392,525 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,211 | py | from flask import Flask, render_template, session, url_for,redirect
# these 2 used when Postman API - request,jsonify
import numpy as np
from flask_wtf import FlaskForm
from wtforms import TextField, SubmitField
from tensorflow.keras.models import load_model
import joblib
def return_prediction(model, scaler, sample_json):
s_len = sample_json['SepalLengthCm']
s_wid = sample_json['SepalWidthCm']
p_len = sample_json['PetalLengthCm']
p_wid = sample_json['PetalWidthCm']
flower = [[s_len,s_wid, p_len,p_wid]]
classes = np.array(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'])
flower = scaler.transform(flower)
class_ind = model.predict_classes(flower)
return classes[class_ind][0]
app = Flask(__name__)
app.config['SECRET_KEY'] = 'mysecretkey'
class FlowerForm(FlaskForm):
sep_len = TextField("Sepal Length")
sep_wid = TextField("Sepal Width")
pet_len = TextField("Petal Length")
pet_wid = TextField("Petal Width")
submit = SubmitField("Analyze")
@app.route("/", methods=['GET', 'POST'])
def index():
#return '<h1>FLASK APP IS RUNNING</h1>'
form = FlowerForm()
if form.validate_on_submit():
session['sep_len'] = form.sep_len.data
session['sep_wid'] = form.sep_wid.data
session['pet_len'] = form.pet_len.data
session['pet_wid'] = form.pet_wid.data
return redirect(url_for("prediction"))
return render_template('home.html', form=form)
flower_model = load_model('final_iris_model.h5')
flower_scaler = joblib.load('iris_scaler.pkl')
#@app.route('/api/flower',methods=['POST']) - this is when API is used
@app.route('/prediction')
def prediction():
#content = request.json - this is when API is used
content = {}
content['SepalLengthCm'] = float(session['sep_len'])
content['SepalWidthCm'] = float(session['sep_wid'])
content['PetalLengthCm'] = float(session['pet_len'])
content['PetalWidthCm'] = float(session['pet_wid'])
results = return_prediction(flower_model, flower_scaler, content)
#return jsonify(results) -ths is when API is used
return render_template('prediction.html', results=results)
if __name__=='__main__':
app.run() | [
"noreply@github.com"
] | noreply@github.com |
fa0310a2886327f6a0829a3c05aa0ac2780b276c | c3422fd903f86edfa07a0d4e8d9aa7c123364ece | /app/hello_django/settings.py | 4c28a32df3f2e76b02de6d16a0399444c185ef08 | [
"MIT"
] | permissive | rachid1234/sendintime | 2e786bb7e2278dd9311d57032821d5bef0263760 | b7218f5f918b30a0b4343d4c434ff80d2a46eff6 | refs/heads/main | 2023-02-04T21:05:57.657739 | 2020-12-25T18:14:50 | 2020-12-25T18:14:50 | 324,344,489 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,894 | py | """
Django settings for hello_django project.
Generated by 'django-admin startproject' using Django 3.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
SECRET_KEY = os.environ.get("SECRET_KEY")
DEBUG = int(os.environ.get("DEBUG", default=0))
# 'DJANGO_ALLOWED_HOSTS' should be a single string of hosts with a space between each.
# For example: 'DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 [::1]'
ALLOWED_HOSTS = ("localhost", "127.0.0.1", "[::1]")
SECRET_KEY = "foo1234567890"
# Define APPLICATON_KEY, APPLICATON_SECRET,
# USER_TOKEN, and USER_SECRET from the credentials
# provided in your LinkedIn application
APPLICATON_KEY = "779japzhbz07yt"
APPLICATON_SECRET = "8ydBuhZTMsCKlzqR"
RETURN_URL = 'http://localhost:8000'
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"upload",
"linkedin_v2"
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'hello_django.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hello_django.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": os.environ.get("SQL_ENGINE", "django.db.backends.sqlite3"),
"NAME": os.environ.get("SQL_DATABASE", os.path.join(BASE_DIR, "db.sqlite3")),
"USER": os.environ.get("SQL_USER", "user"),
"PASSWORD": os.environ.get("SQL_PASSWORD", "password"),
"HOST": os.environ.get("SQL_HOST", "localhost"),
"PORT": os.environ.get("SQL_PORT", "5432"),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = "/staticfiles/"
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
MEDIA_URL = "/mediafiles/"
MEDIA_ROOT = os.path.join(BASE_DIR, "mediafiles")
| [
"amine@MacBook-Pro-de-mac.local"
] | amine@MacBook-Pro-de-mac.local |
b468b68150bb6fd52e90e01fcf615bdf01f04f4b | 3b50605ffe45c412ee33de1ad0cadce2c5a25ca2 | /python/paddle/fluid/tests/unittests/test_dist_fleet_ps13.py | 58248d325b1452e0525f68f20276017e7ad7e814 | [
"Apache-2.0"
] | permissive | Superjomn/Paddle | f5f4072cf75ac9ecb0ff528876ee264b14bbf8d1 | 7a0b0dab8e58b6a3b28b3b82c43d55c9bd3d4188 | refs/heads/develop | 2023-02-04T20:27:54.244843 | 2023-01-26T15:31:14 | 2023-01-26T15:31:14 | 66,896,049 | 4 | 1 | Apache-2.0 | 2023-04-14T02:29:52 | 2016-08-30T01:45:54 | C++ | UTF-8 | Python | false | false | 6,958 | py | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
os.environ["WITH_DISTRIBUTE"] = "ON"
import unittest
import paddle
import paddle.distributed.fleet as fleet
import paddle.distributed.fleet.base.role_maker as role_maker
import paddle.fluid as fluid
paddle.enable_static()
# For Net
base_lr = 0.2
emb_lr = base_lr * 3
dict_dim = 1500
emb_dim = 128
hid_dim = 128
margin = 0.1
sample_rate = 1
batch_size = 4
# this unittest is tested for SparseSharedAdamSGDRule
class TestPSPassWithBow(unittest.TestCase):
def net(self):
def get_acc(cos_q_nt, cos_q_pt, batch_size):
cond = paddle.less_than(cos_q_nt, cos_q_pt)
cond = fluid.layers.cast(cond, dtype='float64')
cond_3 = paddle.sum(cond)
acc = paddle.divide(
cond_3,
fluid.layers.fill_constant(
shape=[1], value=batch_size * 1.0, dtype='float64'
),
name="simnet_acc",
)
return acc
def get_loss(cos_q_pt, cos_q_nt):
loss_op1 = paddle.subtract(
fluid.layers.fill_constant_batch_size_like(
input=cos_q_pt, shape=[-1, 1], value=margin, dtype='float32'
),
cos_q_pt,
)
loss_op2 = paddle.add(loss_op1, cos_q_nt)
loss_op3 = paddle.maximum(
fluid.layers.fill_constant_batch_size_like(
input=loss_op2, shape=[-1, 1], value=0.0, dtype='float32'
),
loss_op2,
)
avg_cost = paddle.mean(loss_op3)
return avg_cost
is_distributed = False
is_sparse = True
# query
q = paddle.static.data(
name="query_ids", shape=[-1, 1], dtype="int64", lod_level=1
)
# embedding
q_emb = fluid.contrib.layers.sparse_embedding(
input=q,
size=[dict_dim, emb_dim],
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01),
name="__emb__",
learning_rate=emb_lr,
),
)
q_emb = paddle.reshape(q_emb, [-1, emb_dim])
# vsum
q_sum = fluid.layers.sequence_pool(input=q_emb, pool_type='sum')
q_ss = paddle.nn.functional.softsign(q_sum)
# fc layer after conv
q_fc = paddle.static.nn.fc(
x=q_ss,
size=hid_dim,
weight_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01),
name="__q_fc__",
learning_rate=base_lr,
),
)
# label data
label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64")
# pt
pt = paddle.static.data(
name="pos_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
)
# embedding
pt_emb = fluid.contrib.layers.sparse_embedding(
input=pt,
size=[dict_dim, emb_dim],
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01),
name="__emb__",
learning_rate=emb_lr,
),
)
pt_emb = paddle.reshape(pt_emb, [-1, emb_dim])
# vsum
pt_sum = fluid.layers.sequence_pool(input=pt_emb, pool_type='sum')
pt_ss = paddle.nn.functional.softsign(pt_sum)
# fc layer
pt_fc = paddle.static.nn.fc(
x=pt_ss,
size=hid_dim,
weight_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01),
name="__fc__",
learning_rate=base_lr,
),
bias_attr=fluid.ParamAttr(name="__fc_b__"),
)
# nt
nt = paddle.static.data(
name="neg_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
)
# embedding
nt_emb = fluid.contrib.layers.sparse_embedding(
input=nt,
size=[dict_dim, emb_dim],
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01),
name="__emb__",
learning_rate=emb_lr,
),
)
nt_emb = paddle.reshape(nt_emb, [-1, emb_dim])
# vsum
nt_sum = fluid.layers.sequence_pool(input=nt_emb, pool_type='sum')
nt_ss = paddle.nn.functional.softsign(nt_sum)
# fc layer
nt_fc = paddle.static.nn.fc(
x=nt_ss,
size=hid_dim,
weight_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01),
name="__fc__",
learning_rate=base_lr,
),
bias_attr=fluid.ParamAttr(name="__fc_b__"),
)
cos_q_pt = paddle.nn.functional.cosine_similarity(q_fc, pt_fc)
cos_q_nt = paddle.nn.functional.cosine_similarity(q_fc, nt_fc)
# loss
avg_cost = get_loss(cos_q_pt, cos_q_nt)
# acc
acc = get_acc(cos_q_nt, cos_q_pt, batch_size)
return [avg_cost, acc, cos_q_pt]
def test(self):
os.environ["PADDLE_PSERVER_NUMS"] = "2"
os.environ["PADDLE_TRAINERS_NUM"] = "2"
os.environ["POD_IP"] = "127.0.0.1"
os.environ["PADDLE_PORT"] = "36001"
os.environ["PADDLE_TRAINER_ID"] = "0"
os.environ["PADDLE_TRAINERS_NUM"] = "2"
os.environ[
"PADDLE_PSERVERS_IP_PORT_LIST"
] = "127.0.0.1:36001,127.0.0.2:36001"
os.environ["TRAINING_ROLE"] = "PSERVER"
role = role_maker.PaddleCloudRoleMaker()
fleet.init(role)
loss, acc, _ = self.net()
strategy = paddle.distributed.fleet.DistributedStrategy()
strategy.a_sync = True
configs = {}
configs['__emb__'] = {
"table_parameters.__emb__.accessor.embed_sgd_param.name": "SparseSharedAdamSGDRule",
"table_parameters.__emb__.accessor.embedx_sgd_param.name": "SparseSharedAdamSGDRule",
}
strategy.sparse_table_configs = configs
optimizer = paddle.fluid.optimizer.SGD(learning_rate=0.01)
optimizer = fleet.distributed_optimizer(optimizer, strategy=strategy)
optimizer.minimize(loss)
fleet.init_server()
if __name__ == '__main__':
unittest.main()
| [
"noreply@github.com"
] | noreply@github.com |
fdba97aa3f723173a174712b445c40df7b64abcd | 3a642fa1fc158d3289358b53770cdb39e5893711 | /src/xlsxwriter/test/comparison/test_print_area02.py | 8dc1c8ed62b42654997cba02f26ba5b02274c02d | [] | no_license | andbar-ru/traceyourself.appspot.com | d461277a3e6f8c27a651a1435f3206d7b9307d9f | 5f0af16ba2727faceb6b7e1b98073cd7d3c60d4c | refs/heads/master | 2020-07-23T14:58:21.511328 | 2016-12-26T22:03:01 | 2016-12-26T22:03:01 | 73,806,841 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,906 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013, John McNamara, jmcnamara@cpan.org
#
import unittest
import os
from ...workbook import Workbook
from ..helperfunctions import _compare_xlsx_files
class TestCompareXLSXFiles(unittest.TestCase):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'print_area02.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = ['xl/printerSettings/printerSettings1.bin',
'xl/worksheets/_rels/sheet1.xml.rels']
self.ignore_elements = {'[Content_Types].xml': ['<Default Extension="bin"'],
'xl/worksheets/sheet1.xml': ['<pageMargins', '<pageSetup']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with a print area."""
filename = self.got_filename
####################################################
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
worksheet.print_area('A1:G1')
worksheet.write('A1', 'Foo')
workbook.close()
####################################################
got, exp = _compare_xlsx_files(self.got_filename,
self.exp_filename,
self.ignore_files,
self.ignore_elements)
self.assertEqual(got, exp)
def tearDown(self):
# Cleanup.
if os.path.exists(self.got_filename):
os.remove(self.got_filename)
if __name__ == '__main__':
unittest.main()
| [
"andrey@voktd-andbar.int.kronshtadt.ru"
] | andrey@voktd-andbar.int.kronshtadt.ru |
d3b3d4e310f966463f944761e165626677feadbf | e5db3cd2fdf6d16e3136786982e08dc9414d653b | /computeAve/check50/__init__.py | af1698f39b89c49894edb65b4aac4a81cdd9c0cd | [] | no_license | cwaage/checks | 821f78622c0adaf89f5a4ccc28ba8e026ed61e35 | 04ac89ea26e35fff188f9fcdaf0031d3896eed15 | refs/heads/master | 2020-04-03T13:44:27.793485 | 2019-01-16T13:46:10 | 2019-01-16T13:46:10 | 155,296,131 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,594 | py | from check50 import *
class Computer_Average(Checks):
@check()
def exists(self):
"""computeAve.c exists"""
self.require("computeAve.c")
@check("exists")
def compiles(self):
"""computeAve.c compiles"""
self.spawn("clang -std=c11 -o computeAve computeAve.c -lcs50 -lm").exit(0)
@check("compiles")
def test_fail_no_args(self):
"""Correctly handles no arguments"""
self.spawn("./computeAve").stdout("").exit(1)
@check("compiles")
def test_fail_string_arg(self):
"""Correctly rejects command-line argument \"five\" """
self.spawn("./computeAve five").stdout("").exit(1)
@check("compiles")
def test_fail_too_manyh_args(self):
"""Correctly handles too many arguments"""
self.spawn("./computeAve 5 10").stdout("").exit(1)
@check("compiles")
def test_pass_1_inputs(self):
"""Correctly handles a single deposit"""
self.spawn("./computeAve 1").stdin("50.55").stdout("Your average deposit is \$50.55\n").exit(0)
@check("compiles")
def test_pass_3_inputs(self):
"""Correctly handles 3 deposits"""
self.spawn("./computeAve 3").stdin("30.30").stdin("2000.79").stdin("0.05").stdout("Your average deposit is \$677.05\n").exit(0)
@check("compiles")
def test_pass_5_inputs(self):
"""Correctly handles 5 deposits"""
self.spawn("./computeAve 5").stdin("5.99").stdin("4.50").stdin("200.02").stdin("100.50").stdin("20.02").stdout("Your average deposit is \$66.21\n").exit(0)
| [
"noreply@github.com"
] | noreply@github.com |
209d0d207a72810d04216d15b8bafdbea0c50c36 | 19cda0cee59db5462e4e49ab5ada0b72f79556e2 | /Chapter_8_Classes_Methods/Exercise_9.13.py | 79c5ed4ec769756a283c43195eb7a16616ab19c4 | [] | no_license | vspatil/Python3-HandsOn | 1a2ff11a59d18509c818752f1716f7ee5baba471 | d03bf06f95797ac2a56a0a78a4f6ead141bbf214 | refs/heads/master | 2020-04-15T10:10:12.776911 | 2019-01-08T06:54:23 | 2019-01-08T06:54:23 | 164,356,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 675 | py | #9.13
"""
from collections import OrderedDict
favourite_languages = OrderedDict()
favourite_languages['Brad'] = 'SQL'
favourite_languages['Adam'] = 'ERP'
favourite_languages['Mike'] = 'Python'
favourite_languages['Chris'] = 'JAVA'
for name , language in favourite_languages.items():
print( name.title() + " 's favourite language is : " + language.title())
"""
#9.14
#from random import randint
#x = randint(1,6)
#print(x)
from random import randint
class dice():
def __init__(self,sides):
self.sides = sides
def roll_dice(self):
x= randint(1,self.sides)
print( "Random number now we got is : " + str(x))
d= dice(20)
d.roll_dice()
| [
"vanispatil@gmail.com"
] | vanispatil@gmail.com |
1d2fcfdd3bd3561748484b153ccd79db0d2f6603 | ca850269e513b74fce76847310bed143f95b1d10 | /build/navigation/move_slow_and_clear/catkin_generated/pkg.installspace.context.pc.py | e8dee1765968cad46f6536a7c38fe58f630c2d73 | [] | no_license | dvij542/RISS-2level-pathplanning-control | f98f2c83f70c2894d3c248630159ea86df8b08eb | 18390c5ab967e8649b9dc83681e9090a37f3d018 | refs/heads/main | 2023-06-15T03:58:25.293401 | 2021-06-20T20:20:30 | 2021-06-20T20:20:30 | 368,553,169 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "${prefix}/include".split(';') if "${prefix}/include" != "" else []
PROJECT_CATKIN_DEPENDS = "geometry_msgs;nav_core;pluginlib;roscpp".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lmove_slow_and_clear".split(';') if "-lmove_slow_and_clear" != "" else []
PROJECT_NAME = "move_slow_and_clear"
PROJECT_SPACE_DIR = "/home/dvij5420/catkin_ws/install"
PROJECT_VERSION = "1.14.9"
| [
"dvij.kalaria@gmail.com"
] | dvij.kalaria@gmail.com |
45ffe5b3c7157bbd4c9d05915a60db89b8cfb1ab | 374d01175564001908b85ebead7ca9f0d623cf4f | /endpoints/migrations/0015_auto_20210423_0447.py | 2234f44a378c706b6fb68eaaea79e18640d3235c | [] | no_license | Pandemic21/clammerav | bb9478bd044ba8fd6c80b768bab29770c2c5216d | 62a66c7eb51f170c09801446bad31a3741892fe6 | refs/heads/master | 2023-05-03T05:11:36.775531 | 2021-05-02T21:21:54 | 2021-05-02T21:21:54 | 363,769,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 760 | py | # Generated by Django 3.2 on 2021-04-23 04:47
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('endpoints', '0014_auto_20210423_0424'),
]
operations = [
migrations.AlterField(
model_name='ingest',
name='id',
field=models.UUIDField(default=uuid.UUID('c9ce826d-22d3-4aa2-b59c-e7a933a25b47'), editable=False, primary_key=True, serialize=False, unique=True),
),
migrations.AlterField(
model_name='ingest',
name='url',
field=models.URLField(default='http://127.0.0.1:8000/endpoints/ingest/c9ce826d-22d3-4aa2-b59c-e7a933a25b47/join', editable=False, max_length=500),
),
]
| [
"pandemicjunk@gmail.com"
] | pandemicjunk@gmail.com |
7ed4c2eb2c224f3d1a91789faff26ab73a083d63 | 6821339070e85305875633abca1c3d6c90881ede | /flaskWeb/flask_demo/blue_print/index.py | ebd3377ee3bac19028f4335aaccdf5e7338cc9be | [] | no_license | Abel-Fan/uaif1901 | 07cda7ea5675ec52ae92c0021f713951c62bd198 | f6d81a44b658e61b2c3ae6b4b604faebc1fb136a | refs/heads/master | 2020-05-03T01:05:46.289805 | 2019-04-30T10:16:53 | 2019-04-30T10:16:53 | 178,328,172 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 662 | py | from flask import Blueprint,render_template
from flaskWeb.flask_demo.db.connectdb import database,cursor
from flaskWeb.flask_demo.settings import INDEX_STATIC
indexblue = Blueprint("index",__name__,url_prefix="/")
@indexblue.route("/",methods=["GET"])
def index():
data = {}
sql = "select * from produces limit 3"
cursor.execute(sql) # 执行sql语句
tuijians = cursor.fetchall() # 获取数据
data['tuijian'] = tuijians
return render_template("index/index.html",data=data,index_static=INDEX_STATIC)
@indexblue.route("/<pagename>.html",methods=["GET"])
def getpage(pagename):
return render_template("index/%s.html"%pagename) | [
"842615663@qq.com"
] | 842615663@qq.com |
1e8d620d1b89f1f965f4bcf74087f22d4cfa0fc7 | 211fb63fc4262cd618319ee715cbb39ba0a316ab | /SeleniumSEHU/UserModule.py | 869ca5cb152a6dc61b32bee61c40baf583bc5cf4 | [] | no_license | aurajimenez/Selenium-SEHU | a955e7074677087442fd200346a24aef442569d8 | 82c968ce9f410a6ee02ddee17e8a1ba13dfa5940 | refs/heads/master | 2022-12-22T12:27:31.007226 | 2020-09-23T04:29:57 | 2020-09-23T04:29:57 | 289,055,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,737 | py | import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
import time
class UserModule(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Opera(executable_path = r"operadriver.exe")
driver = self.driver
driver.maximize_window()
driver.get('http://localhost:8000/usuarios/login')
username = driver.find_element_by_xpath('//*[@id="usernameInput"]')
username.send_keys("admin")
password = driver.find_element_by_xpath('//*[@id="passwordInput"]')
password.send_keys("historias")
button = driver.find_element_by_xpath('//*[@id="main-wrapper"]/div/div/div/div/div/div/form/button')
button.send_keys(Keys.ENTER)
def test_register_user(self):
element = self.driver
menu_user = element.find_element_by_xpath('//*[@id="sidebarnav"]/li[3]/a/span').click()
register_option = element.find_element_by_xpath('//*[@id="sidebarnav"]/li[3]/ul/li[1]/a').click()
username = element.find_element_by_xpath('//*[@id="id_username"]')
username.send_keys("juanprueba")
nombres = element.find_element_by_xpath('//*[@id="id_first_name"]')
nombres.send_keys("Juan alberto")
apellidos = element.find_element_by_xpath('//*[@id="id_last_name"]')
apellidos.send_keys("Cifuentes")
correo_electronico = element.find_element_by_xpath('//*[@id="id_email"]')
correo_electronico.send_keys("juancif@gmail.com")
cargo = Select(element.find_element_by_xpath('//*[@id="id_cargo"]'))
cargo_options = ['Administrador','Integrante','Manager']
act_options = []
for option in cargo.options:
act_options.append(option.text)
#self.assertListEqual(cargo_options,act_options)
cargo.select_by_visible_text('Integrante')
skills = element.find_element_by_xpath('//*[@id="id_skills"]')
skills.send_keys("Git,github")
enfoque = Select(element.find_element_by_xpath('//*[@id="id_enfoque"]'))
enfoque_options = ['BACKEND','FRONTEND','FULLSTACK','QA','CEO']
act_options2 = []
for option in enfoque.options:
act_options.append(option.text)
#self.assertListEqual(enfoque_options,act_options2)
enfoque.select_by_visible_text('BACKEND')
#foto = element.find_element_by_xpath('//*[@id="id_foto_perfil"]')
contrasena = element.find_element_by_xpath('//*[@id="id_password1"]')
contrasena.send_keys("contrasena111")
confirmacion_contrasena = element.find_element_by_xpath('//*[@id="id_password2"]')
confirmacion_contrasena.send_keys("contrasena111")
register_button = element.find_element_by_xpath('//*[@id="main-wrapper"]/div[4]/div[2]/div/div/div/div/div/form/div[2]/div/button[1]')
register_button.send_keys(Keys.ENTER)
def tearDown(self):
self.driver.close()
if __name__ == '__main__':
unittest.main() | [
"auracristina.jimenezg@gmail.com"
] | auracristina.jimenezg@gmail.com |
6d368b9177060cc3907bbfdd9e7c3a64c76e995b | 832ae9dee102c195e8d25d64d3745e208731b725 | /needybot-raspPy/src/__main__.py | 153d37ea79a4772d94313f9b4b251682fc193c01 | [] | no_license | brettberry/needybot | 0eda60debbe4afca3fec629648a159d8a847dabd | f001357e5e63bebf09bd7376193929ff13a13076 | refs/heads/master | 2020-09-06T05:33:50.987229 | 2016-09-07T15:45:12 | 2016-09-07T15:45:12 | 67,619,382 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | from nanpy import ArduinoApi, SerialManager
connection = SerialManager(device='/dev/cu.usbmodem1421')
arduino = ArduinoApi(connection=connection)
frontLeft = { "en": 13, "in1": 12, "in2": 11 }
frontRight = { "en": 8, "in1": 10, "in2": 9 }
backLeft = { "en": 7, "in1": 6, "in2": 5 }
backRight = { "en": 2, "in1": 4, "in2": 3}
def activateMotor(motor):
arduino.pinMode(motor["in1"], arduino.OUTPUT)
arduino.pinMode(motor["in2"], arduino.OUTPUT)
arduino.analogWrite(motor["en"], 200)
arduino.digitalWrite(motor["in1"], arduino.HIGH)
#
# activateMotor(frontLeft)
# activateMotor(frontRight)
# activateMotor(backRight)
# activateMotor(backLeft)
| [
"brett.elizabeth.berry@gmail.com"
] | brett.elizabeth.berry@gmail.com |
21bdc59a455bc4ad4b5587d08c511557800ce26d | a7b61a0b2f6b1d1d742873c8e1f2fce2ffae7268 | /utils/hexutils.py | 87d2bfe5277d83b3c801decd82a139577a61fd56 | [
"MIT"
] | permissive | chenxiaoli/ethereum-watch | dd95483e4179a4fed4d6f4b8c51445d92f602c58 | 27104b3486f20e78b596b93ff38948715e26db67 | refs/heads/master | 2022-01-11T16:30:14.557387 | 2021-03-25T15:24:52 | 2021-03-25T15:24:52 | 216,600,649 | 4 | 0 | MIT | 2022-01-06T22:40:24 | 2019-10-21T15:21:00 | Python | UTF-8 | Python | false | false | 316 | py | from eth_utils import remove_0x_prefix
def hex2a(s):
# astr = ''
chrlist = []
s = remove_0x_prefix(s)
for i in range(0, len(s), 2):
aint = int(s[i:i + 2], 16)
if aint >= 32:
# astr = astr + chr(aint)
chrlist.append(aint)
return ''.join(map(chr, chrlist))
| [
"39185753@qq.com"
] | 39185753@qq.com |
91d40edb831ae2487b3fd892d3a141546d48b51d | d99955a336ebd72cb2df241e97f4a5644ca1f3f7 | /statistics.py | b26965e7e00d35957c81e2a25dae129f615fefb4 | [
"MIT"
] | permissive | mintanwei/IPCLs-Net | 397ba29815555d59febcd9b5ba77f1fa1b5b6c42 | 04937df683216a090c0749cc90ab7e517dbab0fd | refs/heads/main | 2023-03-03T16:37:07.697438 | 2021-02-01T09:07:47 | 2021-02-01T09:07:47 | 333,429,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,053 | py | import seaborn
import pandas as pd
import matplotlib.pyplot as plt
result_file = "./experiments/total.txt"
file = open(result_file).readlines()
seaborn.set_style("whitegrid")
# data = {
# 'k':["1"]*9,
# 'p':[0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.5, 0.75, 1],
# 'F2':[0.846]*9,
# 'Sensitivity':[0.868]*9,
# 'Precision':[0.802]*9,
# 'Recall':[0.857]*9
# }
#
# for stat in file:
# stat = stat.strip().split(",")
# for n in stat:
# n = n.split("=")
# if n[0].strip() == "k":
# data[n[0].strip()].append(n[1])
# else:
# data[n[0].strip()].append(float(n[1]))
#
# frame = pd.DataFrame(data)
# print(frame[:40])
#
# # sns.palplot(sns.hls_palette(8, l=.7, s=.9))
#
# f = plt.figure()
# f.add_subplot(1, 2, 1)
# pl = seaborn.lineplot(x="k", y="F2", hue="p", data=frame, palette=seaborn.hls_palette(9, s=.5))
# f.add_subplot(1, 2, 2)
# pl2 = seaborn.lineplot(x="p", y="F2", hue="k", data=frame, palette=seaborn.hls_palette(7, s=.5))
# plt.show()
import numpy as np
#
data = {
'Image type': ['Ground truth', 'Prediction', 'Ground truth', 'Prediction', 'Ground truth', 'Prediction', 'Ground truth', 'Prediction'],
'Lesion type':['A','A','B1','B1','B2','B2','B3','B3'],
'Proportion (%)':[9.7, 7.3, 78.4, 77.3, 10.5, 11.1, 1.4, 4.3]
}
# data = {
# 'Image type': ['Ground truth', 'Prediction', 'Ground truth', 'Prediction', 'Ground truth', 'Prediction', 'Ground truth', 'Prediction'],
# 'Lesion type':['A','A','B1','B1','B2','B2','B3','B3'],
# 'Proportion (%)':[18.7, 15.1, 76.4, 82.4, 4.9, 2.5, 0.000, 0.000]
# }
df = pd.DataFrame(data, columns=['Image type', 'Lesion type', 'Proportion (%)'])
b = seaborn.barplot(x='Lesion type', y='Proportion (%)', data=df, hue='Image type')
for index,row in df.iterrows():
if row.name % 2 == 0:
b.text(row.name//2 - 0.2,row.values[2]+1,round(row.values[2],1),color="black",ha="center")
else:
b.text(row.name//2 + 0.2, row.values[2] + 1, round(row.values[2], 1), color="black", ha="center")
plt.show() | [
"mintanwei@126.com"
] | mintanwei@126.com |
7eeb8cc2f090a98bd9e74c64a2ec9543969941c3 | f557a58a55be8167f89d900b612616187356e3db | /autokey/.config/autokey/data/TouchCursor/02 QWERTY/10 - P Backspace.py | f55be76ee7942b78f31794f5e35acee7fc55f367 | [
"MIT"
] | permissive | jeebak/touchcursor-x11 | 44a59368f2c812b1fa6314c5983bb7c6abd6c61c | 1e210a11fd5f72353ab8b685f9543840696acafd | refs/heads/master | 2021-07-06T22:55:08.268457 | 2020-06-08T00:26:26 | 2020-06-08T00:26:26 | 97,556,429 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | engine.run_script('init-touchcursor')
keyboard.send_keys('<backspace>')
| [
"jeebak.kim@gmail.com"
] | jeebak.kim@gmail.com |
925bce9d8a558122254c04e1433f029d7becf5d7 | 85336da957102ec16dc7a7abb1aaab5ce4716a84 | /app/notes/views.py | e920ee71fe60647f9a3ad28c4bffc045f5510ab3 | [] | no_license | Aman2313/djoser | 9a115df4d203079488487d3517d4ebde46e4cbbc | 8c64b8a969104299f84dfd475e861a10a69905cf | refs/heads/main | 2023-03-14T03:43:41.665297 | 2021-03-04T20:09:46 | 2021-03-04T20:09:46 | 344,596,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 494 | py | from django.shortcuts import render
from rest_framework import viewsets
from apps.notes.models import Note
from apps.notes.serializers import NoteSerializer
class NoteViewSet(viewsets.ModelViewSet):
serializer_class = NoteSerializer
queryset = Note.objects.all()
def perform_create(self, serializer):
serializer.save(created_by=self.request.user)
def get_queryset(self):
return self.queryset.filter(created_by=self.request.user)
# Create your views here.
| [
"root@ip-172-31-8-220.ap-south-1.compute.internal"
] | root@ip-172-31-8-220.ap-south-1.compute.internal |
97deaf7c49e82df113a6ce3714f5553326e2aade | 982e913f1af0f342c0613034361d78019216e231 | /cognite/client/data_classes/iam.py | 4f7fa7b518cd6e2b07533f1b71d91dfe87511775 | [
"Apache-2.0"
] | permissive | ik-learning/cognite-sdk-python | 2defb83ab4a923e2b3a96b8055b14a2f921b5d1a | 11d51477feabdd363fdb072c444201ccda7182e7 | refs/heads/master | 2020-07-12T03:49:20.573317 | 2019-08-27T08:35:15 | 2019-08-27T08:35:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,874 | py | from typing import *
from cognite.client.data_classes._base import *
# GenClass: ServiceAccount
class ServiceAccount(CogniteResource):
"""No description.
Args:
name (str): Unique name of the service account
groups (List[int]): List of group ids
id (int): No description.
is_deleted (bool): If this service account has been logically deleted
deleted_time (int): Time of deletion
cognite_client (CogniteClient): The client to associate with this object.
"""
def __init__(
self,
name: str = None,
groups: List[int] = None,
id: int = None,
is_deleted: bool = None,
deleted_time: int = None,
cognite_client=None,
):
self.name = name
self.groups = groups
self.id = id
self.is_deleted = is_deleted
self.deleted_time = deleted_time
self._cognite_client = cognite_client
# GenStop
class ServiceAccountList(CogniteResourceList):
_RESOURCE = ServiceAccount
_ASSERT_CLASSES = False
# GenClass: NewApiKeyResponseDTO
class APIKey(CogniteResource):
"""No description.
Args:
id (int): The internal ID for the API key.
service_account_id (int): The ID of the service account.
created_time (int): The time of creation in Unix milliseconds.
status (str): The status of the API key.
value (str): The API key to be used against the API.
cognite_client (CogniteClient): The client to associate with this object.
"""
def __init__(
self,
id: int = None,
service_account_id: int = None,
created_time: int = None,
status: str = None,
value: str = None,
cognite_client=None,
):
self.id = id
self.service_account_id = service_account_id
self.created_time = created_time
self.status = status
self.value = value
self._cognite_client = cognite_client
# GenStop
class APIKeyList(CogniteResourceList):
_RESOURCE = APIKey
_ASSERT_CLASSES = False
# GenClass: Group
class Group(CogniteResource):
"""No description.
Args:
name (str): Name of the group
source_id (str): ID of the group in the source. If this is the same ID as a group in the IDP, a user in that group will implicitly be a part of this group as well.
capabilities (List[Dict[str, Any]]): No description.
id (int): No description.
is_deleted (bool): No description.
deleted_time (int): No description.
cognite_client (CogniteClient): The client to associate with this object.
"""
def __init__(
self,
name: str = None,
source_id: str = None,
capabilities: List[Dict[str, Any]] = None,
id: int = None,
is_deleted: bool = None,
deleted_time: int = None,
cognite_client=None,
):
self.name = name
self.source_id = source_id
self.capabilities = capabilities
self.id = id
self.is_deleted = is_deleted
self.deleted_time = deleted_time
self._cognite_client = cognite_client
# GenStop
class GroupList(CogniteResourceList):
_RESOURCE = Group
_ASSERT_CLASSES = False
# GenClass: SecurityCategoryDTO
class SecurityCategory(CogniteResource):
"""No description.
Args:
name (str): Name of the security category
id (int): Id of the security category
cognite_client (CogniteClient): The client to associate with this object.
"""
def __init__(self, name: str = None, id: int = None, cognite_client=None):
self.name = name
self.id = id
self._cognite_client = cognite_client
# GenStop
class SecurityCategoryList(CogniteResourceList):
_RESOURCE = SecurityCategory
_ASSERT_CLASSES = False
| [
"noreply@github.com"
] | noreply@github.com |
ffb751d0b3bd9c075d953abe3eb0a50653e9c9f4 | 6f8c7d2263f055b92d9a33b14095f0afa95b0fe7 | /core/migrations/0014_auto_20171120_1311.py | 275f0697eaea2138aa2d1e5e42285ef030ddd9fc | [] | no_license | timam1992/waass | 239d88c4339b7fb2ee338f266cee1f89e8e690a2 | 67cad45ccf66e51981ce4cd82ac0f61a55cbb523 | refs/heads/master | 2021-08-22T18:09:08.590338 | 2017-11-30T22:10:18 | 2017-11-30T22:10:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0013_auto_20171118_1959'),
]
operations = [
migrations.AlterField(
model_name='assignment',
name='session',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='assignment_session', to='core.Session'),
),
migrations.AlterField(
model_name='person',
name='session',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='core.Session'),
),
]
| [
"biswashirok@gmail.com"
] | biswashirok@gmail.com |
7ec96b068b8307a652e928eb77fe503c9c28d7b0 | cfcf10e6ded2ca4ee05ad77baa44503d87dd4136 | /python/moter2.py | 87f470e31ed1089c6a73cae77401b3e91ee48178 | [] | no_license | Rafael0110/programs | bb249148fa3dcf89f6a4cdd15a3f5d8e4218623e | 75a9116554c9fd570451923d5cd47e826c0dac9a | refs/heads/master | 2019-07-08T02:05:44.793366 | 2018-02-06T07:33:47 | 2018-02-06T07:33:47 | 119,935,759 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,028 | py | #!/usr/bin/python
# coding: utf-8
import RPi.GPIO as GPIO
import time
import signal
import sys
def rad(num):
n = float(num)
return (n+90.0)/180.0*(12.0-2.5)+2.5
def exit_handler(signal, frame):
# Ctrl+Cが押されたときにデバイスを初期状態に戻して終了する。
print("\nExit")
time.sleep(0.5)
servow.ChangeDutyCycle(rad(0))
servoh.ChangeDutyCycle(rad(0))
time.sleep(0.5)
servow.stop()
servoh.stop()
GPIO.cleanup()
sys.exit(0)
# 終了処理用のシグナルハンドラを準備
signal.signal(signal.SIGINT, exit_handler)
GPIO.setmode(GPIO.BCM)
# pwm = GPIO.PWM([チャンネル], [周波数(Hz)])
# GPIO 20番を使用
gp_out = 20
GPIO.setup(gp_out, GPIO.OUT)
servoh = GPIO.PWM(gp_out, 50)
servoh.start(0.0)
# GPIO 21番を使用
gp_out = 21
GPIO.setup(gp_out, GPIO.OUT)
servow = GPIO.PWM(gp_out, 50)
servow.start(0.0)
val = [-90,-67.5,-45,-22.5,0,22.5,45,67.5,90]
servow.ChangeDutyCycle(rad(0))
time.sleep(0.5)
servow.ChangeDutyCycle(rad(-90))
time.sleep(0.5)
servow.ChangeDutyCycle(rad(90))
time.sleep(0.5)
servow.ChangeDutyCycle(rad(0))
time.sleep(0.5)
servoh.ChangeDutyCycle(rad(0))
time.sleep(0.5)
servoh.ChangeDutyCycle(rad(-90))
time.sleep(0.5)
servoh.ChangeDutyCycle(rad(90))
time.sleep(0.5)
servoh.ChangeDutyCycle(rad(0))
time.sleep(0.5)
while True:
for i, dc in enumerate(map(rad,val)):
servow.ChangeDutyCycle(dc)
time.sleep(0.5)
for i, dc in enumerate( reversed(map(rad,val)) ):
servow.ChangeDutyCycle(dc)
time.sleep(0.5)
for i, dc in enumerate(map(rad,val)):
servoh.ChangeDutyCycle(dc)
time.sleep(0.5)
for i, dc in enumerate( reversed(map(rad,val)) ):
servoh.ChangeDutyCycle(dc)
time.sleep(0.5)
# while True:
# for i, dc in enumerate(val):
# servo.ChangeDutyCycle(dc)
# print("Angle:" + str(i*22.5)+" dc = %.4f" % dc)
# time.sleep(0.5)
# for i, dc in enumerate( reversed(val) ):
# servo.ChangeDutyCycle(dc)
# print("Angle:" + str(180 - i*22.5)+" dc = %.4f" % dc)
# time.sleep(0.5) | [
"ichirosasaki@sf.cs.it-chiba.ac.jp"
] | ichirosasaki@sf.cs.it-chiba.ac.jp |
2bce411c35e912e6ed7c250789f2f2259956fe8f | 6679fd1102802bf190294ef43c434b6047840dc2 | /openconfig_bindings/bgp/global_/afi_safis/afi_safi/l2vpn_vpls/prefix_limit/__init__.py | 912ccb145ff12843af0245b01ed67e1ee0f21e7d | [] | no_license | robshakir/pyangbind-openconfig-napalm | d49a26fc7e38bbdb0419c7ad1fbc590b8e4b633e | 907979dc14f1578f4bbfb1c1fb80a2facf03773c | refs/heads/master | 2023-06-13T17:17:27.612248 | 2016-05-10T16:46:58 | 2016-05-10T16:46:58 | 58,091,515 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,217 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import config
import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp - based on the path /bgp/global/afi-safis/afi-safi/l2vpn-vpls/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__config','__state',)
_yang_name = 'prefix-limit'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'bgp', u'global', u'afi-safis', u'afi-safi', u'l2vpn-vpls', u'prefix-limit']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /bgp/global/afi_safis/afi_safi/l2vpn_vpls/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /bgp/global/afi_safis/afi_safi/l2vpn_vpls/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /bgp/global/afi_safis/afi_safi/l2vpn_vpls/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /bgp/global/afi_safis/afi_safi/l2vpn_vpls/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
config = property(_get_config, _set_config)
state = property(_get_state, _set_state)
_pyangbind_elements = {'config': config, 'state': state, }
| [
"rjs@jive.com"
] | rjs@jive.com |
778b5ec5834ff92e0543076176b311dae05ffe3b | e5cc31d7a1cd2bf662e792cf55325f9b93b0405c | /ip_canny_edge.py | 28102b0adb865b84009c49c6c01ffb4b6a19947d | [] | no_license | ericrom1228/Canny-edge-image-maker | b0fa8f9ce3173a3999f000c2cd792b2526cdcc89 | 05a3623a65c12d75b3ef76b10062ea63cecd32c6 | refs/heads/master | 2020-09-17T08:33:41.393772 | 2019-11-25T22:42:01 | 2019-11-25T22:42:01 | 224,055,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 909 | py | import cv2
import numpy as np
from matplotlib import pyplot as plt
def nothing(x):
pass
img = cv2.imread('Eric_Romano.jpg',0)
window_title = 'edges with trackbar'
cv2.namedWindow(window_title)
cv2.createTrackbar('min', window_title, 0, 200, nothing)
cv2.createTrackbar('max', window_title, 300, 500, nothing)
while True:
minval = cv2.getTrackbarPos('min', window_title)
maxval = cv2.getTrackbarPos('max', window_title)
edges = cv2.Canny(img,minval,maxval)
numpy_horizontal = np.hstack((img,edges))
cv2.imshow(window_title, numpy_horizontal)
if cv2.waitKey(1) == 27: #27 is the value of the ESC key
break
cv2.destroyAllWindows()
'''
plt.subplot(1,2,1), plt.imshow(img, cmap='gray'), plt.title('Original')
plt.xticks([]), plt.yticks([])
plt.subplot(1,2,2), plt.imshow(edges, cmap='gray'), plt.title('Edges')
plt.xticks([]), plt.yticks([])
plt.show()
'''
| [
"noreply@github.com"
] | noreply@github.com |
3dc16aba361b3ae8a677b763d965456e88721d4b | 29d982d3358b91bc120a7d4d2c3b65fe3058e2b0 | /15.2.py | ae5318e23af4909d1ad2d0ce257271e322a543fb | [] | no_license | JHanek3/Coursera | 46153f3abda91fc86b59aac2d313f1c9a48c6d77 | 5846f00a08752493d91a31e4992a1613ba762567 | refs/heads/master | 2020-06-19T16:24:29.279198 | 2019-07-14T01:57:26 | 2019-07-14T01:57:26 | 196,782,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,252 | py | import urllib.request, urllib.parse, urllib.error
import json
import ssl
api_key = False
# If you have a Google Places API key, enter it here
# api_key = 'AIzaSy___IDByT70'
# https://developers.google.com/maps/documentation/geocoding/intro
if api_key is False:
api_key = 42
serviceurl = 'http://py4e-data.dr-chuck.net/json?'
else :
serviceurl = 'https://maps.googleapis.com/maps/api/geocode/json?'
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
while True:
address = input('Enter location: ')
if len(address) < 1: break
parms = dict()
parms['address'] = address
if api_key is not False: parms['key'] = api_key
url = serviceurl + urllib.parse.urlencode(parms)
print('Retrieving', url)
uh = urllib.request.urlopen(url, context=ctx)
data = uh.read().decode()
print('Retrieved', len(data), 'characters')
try:
js = json.loads(data)
except:
js = None
if not js or 'status' not in js or js['status'] != 'OK':
print('==== Failure To Retrieve ====')
print(data)
continue
print ("Place id", js["results"][0]["place_id"])
| [
"noreply@github.com"
] | noreply@github.com |
a868f06ffc94c8e8f5374027fa9157e9edf75fed | 9d5ae8cc5f53f5aee7247be69142d9118769d395 | /582. Kill Process.py | f6d2712a589e4d1bded42a8fccb55a00c2de168e | [] | no_license | BITMystery/leetcode-journey | d4c93319bb555a7e47e62b8b974a2f77578bc760 | 616939d1599b5a135747b0c4dd1f989974835f40 | refs/heads/master | 2020-05-24T08:15:30.207996 | 2017-10-21T06:33:17 | 2017-10-21T06:33:17 | 84,839,304 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 627 | py | class Solution(object):
def killProcess(self, pid, ppid, kill):
"""
:type pid: List[int]
:type ppid: List[int]
:type kill: int
:rtype: List[int]
"""
d = {}
for i in xrange(len(ppid)):
if ppid[i] in d:
d[ppid[i]] += [pid[i]]
else:
d[ppid[i]] = [pid[i]]
res = []
stack = [kill]
while stack:
k = stack.pop()
res += [k]
if k in d:
stack += d[k]
return res
s = Solution()
print s.killProcess([1, 3, 10, 5], [3, 0, 5, 3], 5) | [
"noreply@github.com"
] | noreply@github.com |
7111ddfb6acf2732a7fac3581369ead18f23ff53 | 109ac2988a85c85ce0d734b788caca1c3177413b | /senlin/tests/__init__.py | 1634fd8f1ae8335f9341c3e1fcb454027b088cb8 | [
"Apache-2.0"
] | permissive | tengqm/senlin | 481c16e19bc13911625d44819c6461a7c72e41cd | aa59c55c098abb13590bc4308c753338ce4a70f4 | refs/heads/master | 2021-01-19T04:51:17.010414 | 2015-03-16T10:06:09 | 2015-03-16T10:06:09 | 28,478,662 | 2 | 5 | null | 2015-03-04T07:05:00 | 2014-12-25T10:22:18 | Python | UTF-8 | Python | false | false | 912 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n
def fake_translate_msgid(msgid, domain, desired_locale=None):
return msgid
oslo_i18n.enable_lazy()
#To ensure messages don't really get translated while running tests.
#As there are lots of places where matching is expected when comparing
#exception message(translated) with raw message.
oslo_i18n._translate_msgid = fake_translate_msgid
| [
"tengqim@cn.ibm.com"
] | tengqim@cn.ibm.com |
062ecdd265d1297a42ff5c0a78c923af59c683dd | 60d84625e0d033748400b0bf0089248c3d83d9e1 | /hacker_news/main.py | 8c35f865565b3a74e1b84749d6a890cce1903edb | [] | no_license | bokotomo/feed_app | c4fb045a3340c5e0e49223e25323299a9775c3ec | c1c089faba23a1f58232fd0bd5cdc3c82fabe0dc | refs/heads/master | 2021-06-28T22:45:06.812241 | 2017-09-15T04:43:42 | 2017-09-15T04:43:42 | 103,614,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | from message_manager import MessageManager
if __name__ == "__main__":
zc
| [
"bokotomo@me.com"
] | bokotomo@me.com |
417001e885e8d1bbd4496011fe2f1681b399931a | 8a0e26f5823b7c40410ef48a577a82be9aa0d6bd | /fun2.py | d972f51825dd5aa267f9442a241cf3b9a566c145 | [] | no_license | yanashurpik/Andersen-Trainee-Shurpik | 40c209f9f73451dc1d2a9353d1423b7e3a39723e | 1bca51f8d1e42bec4833770ae8687d0298413ae0 | refs/heads/main | 2023-05-07T17:06:32.258501 | 2021-05-15T13:07:50 | 2021-05-15T13:07:50 | 367,345,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | #Яна Шурпик, Минск, AQA, python
def collecting_data():
second_data = input("Введите имя: ")
return second_data
def second_function(second_data):
if second_data == 'Вячеслав':
print('Привет, Вячеслав')
return True
else:
print("Нет такого имени")
return False
#second_function(collecting_data()) | [
"yana_shurpik@imap.by"
] | yana_shurpik@imap.by |
e03e985e0c2f4e2e0cf120daa75deb2ef3afd435 | 09b40c90f5848b284c5a0bc626fc144c1dcda8c0 | /node_modules/uws/build/config.gypi | 58687fdd4f26aa2379fa56eefd518b18c60967d2 | [
"Zlib"
] | permissive | jboudouris/gifMatcherPersonal | 48a9b9478fcaa44760ce17065bacb55449d75991 | 4c07803adb09b9e998b87cf21dcf8327cdfe61d9 | refs/heads/master | 2022-12-11T19:10:46.377675 | 2019-07-10T00:44:42 | 2019-07-10T00:44:42 | 133,701,249 | 1 | 0 | null | 2022-12-02T03:38:38 | 2018-05-16T17:25:55 | JavaScript | UTF-8 | Python | false | false | 5,234 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"coverage": "false",
"debug_devtools": "node",
"debug_http2": "false",
"debug_nghttp2": "false",
"force_dynamic_crt": 0,
"host_arch": "x64",
"icu_data_file": "icudt60l.dat",
"icu_data_in": "../../deps/icu-small/source/data/in/icudt60l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "deps/icu-small",
"icu_small": "true",
"icu_ver_major": "60",
"llvm_version": 0,
"node_byteorder": "little",
"node_enable_d8": "false",
"node_enable_v8_vtunejit": "false",
"node_install_npm": "true",
"node_module_version": 57,
"node_no_browser_globals": "false",
"node_prefix": "/usr/local",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_nghttp2": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_bundled_v8": "true",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_lttng": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"node_use_v8_platform": "true",
"node_without_node_options": "false",
"openssl_fips": "",
"openssl_no_asm": 0,
"shlib_suffix": "57.dylib",
"target_arch": "x64",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "true",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_enable_inspector": 1,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_promise_internal_field_count": 1,
"v8_random_seed": 0,
"v8_trace_maps": 0,
"v8_use_snapshot": "true",
"want_separate_host_toolset": 0,
"xcode_version": "7.0",
"nodedir": "/Users/JamesBoudouris/.node-gyp/8.11.1",
"standalone_static_library": 1,
"save_dev": "true",
"dry_run": "",
"legacy_bundling": "",
"browser": "",
"commit_hooks": "true",
"only": "",
"viewer": "man",
"also": "",
"rollback": "true",
"audit": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"maxsockets": "50",
"shell": "/bin/bash",
"metrics_registry": "https://registry.npmjs.org/",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"timing": "",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"fetch_retries": "2",
"registry": "https://registry.npmjs.org/",
"no_proxy": "",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"logs_max": "10",
"prefer_online": "",
"cache_lock_retries": "10",
"global_style": "",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"offline": "",
"read_only": "",
"searchlimit": "20",
"access": "",
"json": "",
"allow_same_version": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/JamesBoudouris/.npm-init.js",
"userconfig": "/Users/JamesBoudouris/.npmrc",
"cidr": "",
"node_version": "8.11.1",
"user": "",
"auth_type": "legacy",
"editor": "vi",
"ignore_prepublish": "",
"save": "true",
"script_shell": "",
"tag": "latest",
"global": "",
"progress": "true",
"ham_it_up": "",
"optional": "true",
"searchstaleness": "900",
"bin_links": "true",
"force": "",
"save_prod": "",
"searchopts": "",
"depth": "Infinity",
"node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js",
"rebuild_bundle": "true",
"sso_poll_frequency": "500",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"scripts_prepend_node_path": "warn-only",
"sso_type": "oauth",
"strict_ssl": "true",
"tag_version_prefix": "v",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"save_exact": "",
"cache_lock_stale": "60000",
"prefer_offline": "",
"version": "",
"cache_min": "10",
"otp": "",
"cache": "/Users/JamesBoudouris/.npm",
"searchexclude": "",
"color": "true",
"package_lock": "true",
"package_lock_only": "",
"save_optional": "",
"user_agent": "npm/6.0.1 node/v8.11.1 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"send_metrics": "",
"init_version": "1.0.0",
"node_options": "",
"umask": "0022",
"scope": "",
"git": "git",
"init_author_name": "",
"onload_script": "",
"tmp": "/var/folders/vf/_n2s_v4j4974jftdhl6wxk9r0000gn/T",
"unsafe_perm": "true",
"link": "",
"prefix": "/usr/local"
}
}
| [
"JamesBoudouris@Georges-MacBook-Pro.local"
] | JamesBoudouris@Georges-MacBook-Pro.local |
5093968bda5c65cae1a81296e7048c66d8ce4653 | 3b109c04041f581a4c16e99e67c9cd6a20c6bea6 | /cfgov/v1/tests/migrations/test_0142_migrate_pas_link_data_to_pagechooserblock.py | 8890e2064c8973e4acef332b676a6f9cb018d132 | [
"CC0-1.0"
] | permissive | adrukh/cfgov-refresh | 49270630740ef7f95a66db253be28cde91c29b82 | 2209b1e9c74e3a2db74c97603d54274722ad8cec | refs/heads/master | 2023-07-08T17:26:32.855003 | 2019-03-07T17:25:37 | 2019-03-07T17:25:37 | 174,398,822 | 0 | 0 | CC0-1.0 | 2023-09-05T14:53:11 | 2019-03-07T18:28:30 | Python | UTF-8 | Python | false | false | 10,262 | py | from importlib import import_module
from unittest import TestCase
from v1.models.base import CFGOVPage
from v1.tests.wagtail_pages.helpers import save_new_page
class TestMigration0142(TestCase):
@classmethod
def setUpClass(cls):
super(TestMigration0142, cls).setUpClass()
cls.migration = import_module(
'v1.migrations.0142_migrate_pas_link_data_to_pagechooserblock'
)
def test_forward_mapper_internal_pas_link(self):
# data comes from EmailSignup block inside FullWidthText on page 11358
data = {
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'heading': u'Buying a home?',
'form_field': [{
'info': u'<p><a id="3775" linktype="page">PAS</a></p>',
'inline_info': True,
'required': True,
'label': u'Email address',
'btn_text': u'Sign up',
'placeholder': u'example@mail.com',
'type': u'email'
}],
'default_heading': False
}
migrated = self.migration.forward_mapper(
'unused param',
data
)
self.assertEqual(migrated, {
'heading': u'Buying a home?',
'default_heading': False,
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'disclaimer_page': 3775,
'form_field': [{
'type': u'email',
'inline_info': True,
'btn_text': u'Sign up',
'info': u'<p><a id="3775" linktype="page">PAS</a></p>',
'label': u'Email address',
'required': True,
'placeholder': u'example@mail.com'
}]
})
def test_forward_mapper_explicit_path_pas_link(self):
page = CFGOVPage(
title='Privacy Act statement',
slug='privacy-act-statement',
)
save_new_page(page)
data = {
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'heading': u'Buying a home?',
'form_field': [{
'info': u'<p><a href="/privacy-act-statement/">PAS</a></p>',
'inline_info': True,
'required': True,
'label': u'Email address',
'btn_text': u'Sign up',
'placeholder': u'example@mail.com',
'type': u'email'
}],
'default_heading': False
}
migrated = self.migration.forward_mapper(
'unused param',
data
)
self.assertEqual(migrated, {
'heading': u'Buying a home?',
'default_heading': False,
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'disclaimer_page': page.pk,
'form_field': [{
'type': u'email',
'inline_info': True,
'btn_text': u'Sign up',
'info': u'<p><a href="/privacy-act-statement/">PAS</a></p>',
'label': u'Email address',
'required': True,
'placeholder': u'example@mail.com'
}]
})
def test_forward_mapper_no_pas_link_gets_generic(self):
data = {
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'heading': u'Buying a home?',
'form_field': [{
'info': u'<p>Who needs a disclaimer?</p>',
'inline_info': True,
'required': True,
'label': u'Email address',
'btn_text': u'Sign up',
'placeholder': u'example@mail.com',
'type': u'email'
}],
'default_heading': False
}
migrated = self.migration.forward_mapper(
'unused param',
data
)
self.assertEqual(migrated, {
'heading': u'Buying a home?',
'default_heading': False,
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'disclaimer_page': 1189,
'form_field': [{
'type': u'email',
'inline_info': True,
'btn_text': u'Sign up',
'info': u'<p>Who needs a disclaimer?</p>',
'label': u'Email address',
'required': True,
'placeholder': u'example@mail.com'
}]
})
def test_forward_mapper_no_info_gets_generic(self):
data = {
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'heading': u'Buying a home?',
'form_field': [{
'inline_info': True,
'required': True,
'label': u'Email address',
'btn_text': u'Sign up',
'placeholder': u'example@mail.com',
'type': u'email'
}],
'default_heading': False
}
migrated = self.migration.forward_mapper(
'unused param',
data
)
self.assertEqual(migrated, {
'heading': u'Buying a home?',
'default_heading': False,
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'disclaimer_page': 1189,
'form_field': [{
'type': u'email',
'inline_info': True,
'btn_text': u'Sign up',
'label': u'Email address',
'required': True,
'placeholder': u'example@mail.com'
}]
})
def test_forward_mapper_empty_form_field_gets_generic(self):
data = {
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'heading': u'Buying a home?',
'form_field': [],
'default_heading': False
}
migrated = self.migration.forward_mapper(
'unused param',
data
)
self.assertEqual(migrated, {
'heading': u'Buying a home?',
'default_heading': False,
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'disclaimer_page': 1189,
'form_field': []
})
def test_forward_mapper_no_form_field_gets_generic(self):
data = {
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'heading': u'Buying a home?',
'default_heading': False
}
migrated = self.migration.forward_mapper(
'unused param',
data
)
self.assertEqual(migrated, {
'heading': u'Buying a home?',
'default_heading': False,
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'disclaimer_page': 1189,
})
def test_forward_mapper_wrong_generic_links_get_correct_generic_link(self):
self.maxDiff = None
data = {
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'heading': u'Buying a home?',
'form_field': [{
'info': u'<a id="558" linktype="page">Privacy Policy</a>',
'inline_info': True,
'required': True,
'label': u'Email address',
'btn_text': u'Sign up',
'placeholder': u'example@mail.com',
'type': u'email'
}],
'default_heading': False
}
migrated = self.migration.forward_mapper(
'unused param',
data
)
self.assertEqual(migrated, {
'heading': u'Buying a home?',
'default_heading': False,
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'disclaimer_page': 1189,
'form_field': [{
'type': u'email',
'inline_info': True,
'btn_text': u'Sign up',
'info': u'<a id="558" linktype="page">Privacy Policy</a>',
'label': u'Email address',
'required': True,
'placeholder': u'example@mail.com'
}]
})
data = {
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'heading': u'Buying a home?',
'form_field': [{
'info': u'<a id="571" linktype="page">'
'Website Privacy Policy</a>',
'inline_info': True,
'required': True,
'label': u'Email address',
'btn_text': u'Sign up',
'placeholder': u'example@mail.com',
'type': u'email'
}],
'default_heading': False
}
migrated = self.migration.forward_mapper(
'unused param',
data
)
self.assertEqual(migrated, {
'heading': u'Buying a home?',
'default_heading': False,
'text': u'Our email newsletter has tips and info to help you ...',
'gd_code': u'USCFPB_127',
'disclaimer_page': 1189,
'form_field': [{
'type': u'email',
'inline_info': True,
'btn_text': u'Sign up',
'info': u'<a id="571" linktype="page">'
'Website Privacy Policy</a>',
'label': u'Email address',
'required': True,
'placeholder': u'example@mail.com'
}]
})
| [
"scott.cranfill@cfpb.gov"
] | scott.cranfill@cfpb.gov |
4ad44bcde9b6556481cdb983363a5b9757ecef01 | e1b09ae83920656b20cad0e84f21b741752e926d | /sams/check_dupl_def2.py | 29943740c0b63b607eb174d6f368341eced7c57f | [] | no_license | yeongsun/cute | 5c46729d43f13967cdf4bda0edd100362de90c70 | 3150d7387c04c15e3569dc821562564cd8f9d87c | refs/heads/master | 2020-04-25T10:38:41.833479 | 2018-11-29T05:42:46 | 2018-11-29T05:42:46 | 156,344,910 | 0 | 0 | null | 2018-11-06T07:41:03 | 2018-11-06T07:41:03 | null | UTF-8 | Python | false | false | 2,231 | py | import os, sys
import logging
import concurrent.futures
import ys_logger
sys.path.append(os.path.abspath('..'))
logger = logging.getLogger('root')
logger.setLevel("INFO")
logger.addHandler(ys_logger.MyHandler())
logger.info("Finish setting logger")
class check_dupl_conc():
def __init__(self):
self.f1 = open("delivered_data/sum.tsv", "r")
# 박영선 a
# 이원문 b
# 카카오 c
# 박영선 d
# 이원문 e
self.f2 = open("not_dup_head_conc.txt", "w")
self.f3 = open("dup_head_conc.txt", "w")
self.lst = list()
def preproc(self):
l1 = list
for ff in self.f1:
ff = ff.replace("\n", "")
i = ff.split("\t")
if len(i) == 9:
q1 = i[4].strip().replace("?", "")
q2 = i[5].strip().replace("?", "")
ans = i[6].strip()
l1 = q1, q2, ans
elif len(i) == 5:
q1 = i[1].strip().replace("?", "")
q2 = i[2].strip().replace("?", "")
ans = i[3].strip()
l1 = q1, q2, ans
self.lst.append(l1)
self.f1.close()
logger.info("Finish load f1")
def comp(self, f):
for line in f:
item = line.split("\t")
q1 = item[5].strip().replace("?", "")
q2 = item[13].strip().replace("?", "")
ans = item[6].strip()
flag = True
for l in self.lst:
if q1 == l[0] and q2 == l[1] and ans == l[2]:
flag = False
self.f3.write(line)
break
if flag:
self.f2.write(line)
def main(self):
with open("select3.txt", "r") as f:
# 박영선 parkys a
# 이원문 moon b
# 카카오 kakao c
# 박영선 ylunar x
# 이원문 moon y
self.comp(f)
logger.info("Finish All")
self.f2.close()
self.f3.close()
if __name__ == "__main__":
a = check_dupl_conc()
a.preproc()
a.main() | [
"ylunar@naver.com"
] | ylunar@naver.com |
88df44e91d318c2d1910ffe75e04bff27aebd197 | c087b91f6b8258b96be12388ea2b066b346f05ce | /roapy/core/domain.py | 615f9ce815fbb1c4c508689a19545bc855c00cb6 | [
"MIT"
] | permissive | kirknorth/roapy | 7d3e602655b8e8d28d1a400ecd02b6c38ed9720f | 9779f683e2eb1afc550a88a488c5c0849d7f25f8 | refs/heads/master | 2021-01-10T23:15:18.999992 | 2016-11-01T21:54:43 | 2016-11-01T21:54:43 | 70,624,678 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,046 | py | """
grid.core.domain
================
"""
import pyproj
import numpy as np
class Domain(object):
"""
Attributes
----------
x, y, z : ndarray
Axes in meters defining the rectilinear grid.
coordinates : tuple
The (z, y, x) coordinates of each vertex in the rectilinear grid.
lat_0, lon_0, alt_0 : float
Latitude, longitude, and altitude of the grid origin. Latitude and
longitude should be in decimal degrees, and altitude should be meters
above mean sea level.
proj : str, optional
Projection used to transform geographic coordinates (latitude and
longitude) to planar coordinates. The default projection is Lambert
conformal conic.
datum : str, optional
A datum defines the shape, size, and orientation of the Earth. The
default datum is the World Geodetic System 1984.
ellps : str, optional
The default ellipsoid is the World Geodetic System 1984.
dem : gdal.Dataset, optional
A digital elevation model (DEM).
"""
def __init__(self, axes, origin, proj='lcca', datum='WGS84', ellps='WGS84',
dem=None):
""" Initialize. """
# Grid axes attributes
self.z, self.y, self.x = [np.asarray(axis) for axis in axes]
self.nz, self.ny, self.nx = self.z.size, self.y.size, self.x.size
self.shape = (self.nz, self.ny, self.nx)
# Grid origin attributes
self.lat_0, self.lon_0, self.alt_0 = origin
# Grid coordinates attribute
self._add_grid_coordinates()
# Projection and geod attributes
self.proj = pyproj.Proj(
proj=proj, ellps=ellps, datum=datum, lat_0=self.lat_0,
lon_0=self.lon_0, x_0=0.0, y_0=0.0)
self.geod = pyproj.Geod(ellps=ellps, datum=datum)
# GDAL dataset attribute
self.dem = dem
# Default radar offset attribute
self.radar_offset = None
def compute_radar_offset_from_origin(self, radar, debug=False):
""" Compute radar (z, y, x) offset from grid origin. """
# Parse radar latitude, longitude, and altitude
radar_lat = radar.latitude['data'][0]
radar_lon = radar.longitude['data'][0]
radar_alt = radar.altitude['data'][0]
#
if self.alt_0 is None:
self.alt_0 = radar_alt
#
radar_x, radar_y = self.proj(radar_lon, radar_lat)
radar_z = radar_alt - self.alt_0
if debug:
print 'Radar x offset from origin: {:.2f} m'.format(radar_x)
print 'Radar y offset from origin: {:.2f} m'.format(radar_y)
print 'Radar z offset from origin: {:.2f} m'.format(radar_z)
self.radar_offset = (radar_z, radar_y, radar_x)
return
def _add_grid_coordinates(self):
""" Add (z, y, x) coordinates of each grid point. """
Z, Y, X = np.meshgrid(self.z, self.y, self.x, indexing='ij')
self.coordinates = (Z.flatten(), Y.flatten(), X.flatten())
return
| [
"kirk.w.north@gmail.com"
] | kirk.w.north@gmail.com |
1528c695e6d9d88403930b883ec3be56309b17c3 | 86f99a5835a7dda2f80d951bfee77000686274df | /lib/python/invenio/bibauthorid_webapi.py | 59fbbf98fb5984957e1c50042b2dc0af6412eeaf | [] | no_license | tomas44444/augustsedlacek | e8e99ac591f3d85d7bb5c20d08686682d6e1a859 | a47084a0ef7aa6c5dd8d34a9322b2b97528db904 | refs/heads/master | 2021-01-10T02:39:26.089821 | 2013-02-26T13:07:16 | 2013-02-26T13:07:16 | 8,431,012 | 0 | 1 | null | 2020-07-26T09:14:50 | 2013-02-26T11:19:50 | Python | UTF-8 | Python | false | false | 37,101 | py | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
'''
Bibauthorid_webapi
Point of access to the documents clustering facility.
Provides utilities to safely interact with stored data.
'''
import invenio.bibauthorid_config as bconfig
import invenio.bibauthorid_frontinterface as dbapi
import invenio.bibauthorid_name_utils as nameapi
import invenio.webauthorprofile_interface as webauthorapi
import invenio.search_engine as search_engine
from search_engine import perform_request_search
from cgi import escape
from time import gmtime, strftime, ctime
from invenio.access_control_admin import acc_find_user_role_actions
from invenio.webuser import collect_user_info, get_session, getUid
from invenio.webuser import isUserSuperAdmin
from invenio.access_control_engine import acc_authorize_action
from invenio.access_control_admin import acc_get_role_id, acc_get_user_roles
from invenio.external_authentication_robot import ExternalAuthRobot
from invenio.external_authentication_robot import load_robot_keys
from invenio.config import CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL
from invenio.config import CFG_SITE_URL
from invenio.mailutils import send_email
from operator import add
from invenio.bibauthorid_dbinterface import get_personiID_external_ids #export #pylint: disable-msg=W0614
def get_person_redirect_link(pid):
'''
Returns the canonical name of a pid if found, the pid itself otherwise
@param pid: int
'''
cname = dbapi.get_canonical_id_from_personid(pid)
if len(cname) > 0:
return str(cname[0][0])
else:
return str(pid)
def update_person_canonical_name(person_id, canonical_name, userinfo=''):
'''
Updates a person's canonical name
@param person_id: person id
@param canonical_name: string
'''
if userinfo.count('||'):
uid = userinfo.split('||')[0]
else:
uid = ''
dbapi.update_personID_canonical_names([person_id], overwrite=True, suggested=canonical_name)
dbapi.insert_user_log(userinfo, person_id, 'data_update', 'CMPUI_changecanonicalname', '', 'Canonical name manually updated.', userid=uid)
def get_canonical_id_from_person_id(person_id):
'''
Finds the person canonical name from personid (e.g. 1)
@param person_id: the canonical ID
@type person_id: string
@return: result from the request or person_id on failure
@rtype: int
'''
if not person_id or not (isinstance(person_id, str) or isinstance(person_id, int)):
return person_id
canonical_name = person_id
try:
canonical_name = dbapi.get_canonical_id_from_personid(person_id)[0][0]
except IndexError:
pass
return canonical_name
def get_person_id_from_canonical_id(canonical_id):
'''
Finds the person id from a canonical name (e.g. Ellis_J_R_1)
@param canonical_id: the canonical ID
@type canonical_id: string
@return: result from the request or -1 on failure
@rtype: int
'''
if not canonical_id or not isinstance(canonical_id, str):
return -1
pid = -1
try:
pid = dbapi.get_person_id_from_canonical_id(canonical_id)[0][0]
except IndexError:
pass
return pid
def get_bibrefs_from_bibrecs(bibreclist):
'''
Retrieve all bibrefs for all the recids in the list
@param bibreclist: list of record IDs
@type bibreclist: list of int
@return: a list of record->bibrefs
@return: list of lists
'''
return [[bibrec, dbapi.get_possible_bibrecref([''], bibrec, always_match=True)]
for bibrec in bibreclist]
def get_possible_bibrefs_from_pid_bibrec(pid, bibreclist, always_match=False, additional_names=None):
'''
Returns for each bibrec a list of bibrefs for which the surname matches.
@param pid: person id to gather the names strings from
@param bibreclist: list of bibrecs on which to search
@param always_match: match all bibrefs no matter the name
@param additional_names: [n1,...,nn] names to match other then the one from personid
'''
pid = wash_integer_id(pid)
pid_names = dbapi.get_person_db_names_set(pid)
if additional_names:
pid_names += zip(additional_names)
lists = []
for bibrec in bibreclist:
lists.append([bibrec, dbapi.get_possible_bibrecref([n[0] for n in pid_names], bibrec,
always_match)])
return lists
def get_pid_from_uid(uid):
'''
Return the PID associated with the uid
@param uid: the internal ID of a user
@type uid: int
@return: the Person ID attached to the user or -1 if none found
'''
if not isinstance(uid, tuple):
uid = ((uid,),)
return dbapi.get_personid_from_uid(uid)
def get_user_level(uid):
'''
Finds and returns the aid-universe-internal numeric user level
@param uid: the user's id
@type uid: int
@return: A numerical representation of the maximum access level of a user
@rtype: int
'''
actions = [row[1] for row in acc_find_user_role_actions({'uid': uid})]
return max([dbapi.resolve_paper_access_right(acc) for acc in actions])
def get_person_id_from_paper(bibref=None):
'''
Returns the id of the person who wrote the paper
@param bibref: the bibref,bibrec pair that identifies the person
@type bibref: str
@return: the person id
@rtype: int
'''
if not is_valid_bibref(bibref):
return -1
person_id = -1
db_data = dbapi.get_papers_status(bibref)
try:
person_id = db_data[0][1]
except (IndexError):
pass
return person_id
def get_papers_by_person_id(person_id= -1, rec_status= -2, ext_out=False):
'''
Returns all the papers written by the person
@param person_id: identifier of the person to retrieve papers from
@type person_id: int
@param rec_status: minimal flag status a record must have to be displayed
@type rec_status: int
@param ext_out: Extended output (w/ author aff and date)
@type ext_out: boolean
@return: list of record ids
@rtype: list of int
'''
if not isinstance(person_id, int):
try:
person_id = int(person_id)
except (ValueError, TypeError):
return []
if person_id < 0:
return []
if not isinstance(rec_status, int):
return []
records = []
db_data = dbapi.get_person_papers(person_id,
rec_status,
show_author_name=True,
show_title=False,
show_rt_status=True,
show_affiliations=ext_out,
show_date=ext_out,
show_experiment=ext_out)
if not ext_out:
records = [[row["data"].split(",")[1], row["data"], row["flag"],
row["authorname"]] for row in db_data]
else:
for row in db_data:
recid = row["data"].split(",")[1]
bibref = row["data"]
flag = row["flag"]
authorname = row["authorname"]
rt_status = row['rt_status']
authoraff = ", ".join(row['affiliation'])
try:
date = sorted(row['date'], key=len)[0]
except IndexError:
date = "Not available"
exp = ", ".join(row['experiment'])
#date = ""
records.append([recid, bibref, flag, authorname,
authoraff, date, rt_status, exp])
return records
def get_papers_cluster(bibref):
'''
Returns the cluster of documents connected with this one
@param bibref: the table:bibref,bibrec pair to look for
@type bibref: str
@return: a list of record IDs
@rtype: list of int
'''
papers = []
person_id = get_person_id_from_paper(bibref)
if person_id > -1:
papers = get_papers_by_person_id(person_id)
return papers
def get_person_request_ticket(pid= -1, tid=None):
'''
Returns the list of request tickets associated to a person.
@param pid: person id
@param tid: ticket id, to select if want to retrieve only a particular one
@return: tickets [[],[]]
'''
if pid < 0:
return []
else:
return dbapi.get_request_ticket(pid, ticket_id=tid)
def get_persons_with_open_tickets_list():
'''
Finds all the persons with open tickets and returns pids and count of tickets
@return: [[pid,ticket_count]]
'''
return dbapi.get_persons_with_open_tickets_list()
def get_person_names_from_id(person_id= -1):
'''
Finds and returns the names associated with this person along with the
frequency of occurrence (i.e. the number of papers)
@param person_id: an id to find the names for
@type person_id: int
@return: name and number of occurrences of the name
@rtype: tuple of tuple
'''
# #retrieve all rows for the person
if (not person_id > -1) or (not isinstance(person_id, int)):
return []
return dbapi.get_person_names_count(person_id)
def get_person_db_names_from_id(person_id= -1):
'''
Finds and returns the names associated with this person as stored in the
meta data of the underlying data set along with the
frequency of occurrence (i.e. the number of papers)
@param person_id: an id to find the names for
@type person_id: int
@return: name and number of occurrences of the name
@rtype: tuple of tuple
'''
# #retrieve all rows for the person
if (not person_id > -1) or (not isinstance(person_id, int)):
return []
return dbapi.get_person_db_names_count(person_id)
def get_longest_name_from_pid(person_id= -1):
'''
Finds the longest name of a person to be representative for this person.
@param person_id: the person ID to look at
@type person_id: int
@return: returns the longest normalized name of a person
@rtype: string
'''
if (not person_id > -1) or (not isinstance(person_id, int)):
return "This doesn't look like a person ID!"
longest_name = ""
for name in dbapi.get_person_names_count(person_id):
if name and len(name[0]) > len(longest_name):
longest_name = name[0]
if longest_name:
return longest_name
else:
return "This person does not seem to have a name!"
def get_most_frequent_name_from_pid(person_id= -1, allow_none=False):
'''
Finds the most frequent name of a person to be
representative for this person.
@param person_id: the person ID to look at
@type person_id: int
@return: returns the most frequent normalized name of a person
@rtype: string
'''
pid = wash_integer_id(person_id)
if (not pid > -1) or (not isinstance(pid, int)):
if allow_none:
return None
else:
return "'%s' doesn't look like a person ID!" % person_id
person_id = pid
mf_name = ""
try:
nn = dbapi.get_person_names_count(person_id)
mf_name = sorted(nn, key=lambda k:k[1], reverse=True)[0][0]
except IndexError:
pass
if mf_name:
return mf_name
else:
if allow_none:
return None
else:
return "This person does not seem to have a name!"
def get_paper_status(bibref):
'''
Finds an returns the status of a bibrec to person assignment
@param bibref: the bibref-bibrec pair that unambiguously identifies a paper
@type bibref: string
'''
db_data = dbapi.get_papers_status(bibref)
#data,PersonID,flag
status = None
try:
status = db_data[0][2]
except IndexError:
status = -10
status = wash_integer_id(status)
return status
def wash_integer_id(param_id):
'''
Creates an int out of either int or string
@param param_id: the number to be washed
@type param_id: int or string
@return: The int representation of the param or -1
@rtype: int
'''
pid = -1
try:
pid = int(param_id)
except (ValueError, TypeError):
return (-1)
return pid
def is_valid_bibref(bibref):
'''
Determines if the provided string is a valid bibref-bibrec pair
@param bibref: the bibref-bibrec pair that unambiguously identifies a paper
@type bibref: string
@return: True if it is a bibref-bibrec pair and False if it's not
@rtype: boolean
'''
if (not isinstance(bibref, str)) or (not bibref):
return False
if not bibref.count(":"):
return False
if not bibref.count(","):
return False
try:
table = bibref.split(":")[0]
ref = bibref.split(":")[1].split(",")[0]
bibrec = bibref.split(":")[1].split(",")[1]
except IndexError:
return False
try:
table = int(table)
ref = int(ref)
bibrec = int(bibrec)
except (ValueError, TypeError):
return False
return True
def is_valid_canonical_id(cid):
'''
Checks if presented canonical ID is valid in structure
Must be of structure: ([Initial|Name]\.)*Lastname\.Number
Example of valid cid: J.Ellis.1
@param cid: The canonical ID to check
@type cid: string
@return: Is it valid?
@rtype: boolean
'''
if not cid.count("."):
return False
xcheck = -1
sp = cid.split(".")
if not (len(sp) > 1 and sp[-1]):
return False
try:
xcheck = int(sp[-1])
except (ValueError, TypeError, IndexError):
return False
if xcheck and xcheck > -1:
return True
else:
return False
def add_person_comment(person_id, message):
'''
Adds a comment to a person after enriching it with meta-data (date+time)
@param person_id: person id to assign the comment to
@type person_id: int
@param message: defines the comment to set
@type message: string
@return the message incl. the metadata if everything was fine, False on err
@rtype: string or boolean
'''
msg = ""
pid = -1
try:
msg = str(message)
pid = int(person_id)
except (ValueError, TypeError):
return False
strtimestamp = strftime("%Y-%m-%d %H:%M:%S", gmtime())
msg = escape(msg, quote=True)
dbmsg = "%s;;;%s" % (strtimestamp, msg)
dbapi.set_person_data(pid, "comment", dbmsg)
return dbmsg
def get_person_comments(person_id):
'''
Get all comments from a person
@param person_id: person id to get the comments from
@type person_id: int
@return the message incl. the metadata if everything was fine, False on err
@rtype: string or boolean
'''
pid = -1
comments = []
try:
pid = int(person_id)
except (ValueError, TypeError):
return False
for row in dbapi.get_person_data(pid, "comment"):
comments.append(row[1])
return comments
def search_person_ids_by_name(namequery):
'''
Prepares the search to search in the database
@param namequery: the search query the user enquired
@type namequery: string
@return: information about the result w/ probability and occurrence
@rtype: tuple of tuple
'''
query = ""
escaped_query = ""
try:
query = str(namequery)
except (ValueError, TypeError):
return []
if query:
escaped_query = escape(query, quote=True)
else:
return []
return dbapi.find_personIDs_by_name_string(escaped_query)
def insert_log(userinfo, personid, action, tag, value, comment='', transactionid=0):
'''
Log an action performed by a user
Examples (in the DB):
1 2010-09-30 19:30 admin||10.0.0.1 1 assign paper 1133:4442 'from 23'
1 2010-09-30 19:30 admin||10.0.0.1 1 assign paper 8147:4442
2 2010-09-30 19:35 admin||10.0.0.1 1 reject paper 72:4442
@param userinfo: information about the user [UID|IP]
@type userinfo: string
@param personid: ID of the person this action is targeting
@type personid: int
@param action: intended action
@type action: string
@param tag: A tag to describe the data entered
@type tag: string
@param value: The value of the action described by the tag
@type value: string
@param comment: Optional comment to describe the transaction
@type comment: string
@param transactionid: May group bulk operations together
@type transactionid: int
@return: Returns the current transactionid
@rtype: int
'''
userinfo = escape(str(userinfo))
action = escape(str(action))
tag = escape(str(tag))
value = escape(str(value))
comment = escape(str(comment))
if not isinstance(personid, int):
try:
personid = int(personid)
except (ValueError, TypeError):
return -1
if not isinstance(transactionid, int):
try:
transactionid = int(transactionid)
except (ValueError, TypeError):
return -1
if userinfo.count('||'):
uid = userinfo.split('||')[0]
else:
uid = ''
return dbapi.insert_user_log(userinfo, personid, action, tag,
value, comment, transactionid, userid=uid)
def user_can_modify_data(uid, pid):
'''
Determines if a user may modify the data of a person
@param uid: the id of a user (invenio user id)
@type uid: int
@param pid: the id of a person
@type pid: int
@return: True if the user may modify data, False if not
@rtype: boolean
@raise ValueError: if the supplied parameters are invalid
'''
if not isinstance(uid, int):
try:
uid = int(uid)
except (ValueError, TypeError):
raise ValueError("User ID has to be a number!")
if not isinstance(pid, int):
try:
pid = int(pid)
except (ValueError, TypeError):
raise ValueError("Person ID has to be a number!")
return dbapi.user_can_modify_data(uid, pid)
def user_can_modify_paper(uid, paper):
'''
Determines if a user may modify the record assignments of a person
@param uid: the id of a user (invenio user id)
@type uid: int
@param pid: the id of a person
@type pid: int
@return: True if the user may modify data, False if not
@rtype: boolean
@raise ValueError: if the supplied parameters are invalid
'''
if not isinstance(uid, int):
try:
uid = int(uid)
except (ValueError, TypeError):
raise ValueError("User ID has to be a number!")
if not paper:
raise ValueError("A bibref is expected!")
return dbapi.user_can_modify_paper(uid, paper)
def person_bibref_is_touched_old(pid, bibref):
'''
Determines if an assignment has been touched by a user (i.e. check for
the flag of an assignment being 2 or -2)
@param pid: the id of the person to check against
@type pid: int
@param bibref: the bibref-bibrec pair that unambiguously identifies a paper
@type bibref: string
@raise ValueError: if the supplied parameters are invalid
'''
if not isinstance(pid, int):
try:
pid = int(pid)
except (ValueError, TypeError):
raise ValueError("Person ID has to be a number!")
if not bibref:
raise ValueError("A bibref is expected!")
return dbapi.person_bibref_is_touched_old(pid, bibref)
def get_review_needing_records(pid):
'''
Returns list of records associated to pid which are in need of review
(only bibrec ma no bibref selected)
@param pid: pid
'''
pid = wash_integer_id(pid)
db_data = dbapi.get_person_papers_to_be_manually_reviewed(pid)
return [int(row[1]) for row in db_data if row[1]]
def add_review_needing_record(pid, bibrec_id):
'''
Add record in need of review to a person
@param pid: pid
@param bibrec_id: bibrec
'''
pid = wash_integer_id(pid)
bibrec_id = wash_integer_id(bibrec_id)
dbapi.add_person_paper_needs_manual_review(pid, bibrec_id)
def del_review_needing_record(pid, bibrec_id):
'''
Removes a record in need of review from a person
@param pid: personid
@param bibrec_id: bibrec
'''
pid = wash_integer_id(pid)
bibrec_id = wash_integer_id(bibrec_id)
dbapi.del_person_papers_needs_manual_review(pid, bibrec_id)
def get_processed_external_recids(pid):
'''
Get list of records that have been processed from external identifiers
@param pid: Person ID to look up the info for
@type pid: int
@return: list of record IDs
@rtype: list of strings
'''
list_str = dbapi.get_processed_external_recids(pid)
return list_str.split(";")
def set_processed_external_recids(pid, recid_list):
'''
Set list of records that have been processed from external identifiers
@param pid: Person ID to set the info for
@type pid: int
@param recid_list: list of recids
@type recid_list: list of int
'''
if isinstance(recid_list, list):
recid_list_str = ";".join(recid_list)
dbapi.set_processed_external_recids(pid, recid_list_str)
def arxiv_login(req):
'''
Log in through arxive. If user already associated to a personid, returns the personid.
If user has no pid, try to guess which personid to associate based on surname and papers
from arxiv. If no compatible person is found, creates a new person.
At the end of the process opens a ticket for the user claiming the papers from arxiv.
!!! the user will find the open ticket, which will require him to go through the
final review before getting committed.
@param req: Apache request object
@type req: Apache request object
@return: Returns the pid resulting in the process
@rtype: int
'''
def session_bareinit(req):
session = get_session(req)
try:
pinfo = session["personinfo"]
if 'ticket' not in pinfo:
pinfo["ticket"] = []
except KeyError:
pinfo = dict()
session['personinfo'] = pinfo
pinfo["ticket"] = []
session.dirty = True
session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
ticket = session['personinfo']['ticket']
uinfo = collect_user_info(req)
pinfo['external_first_entry'] = False
try:
name = uinfo['external_firstname']
except KeyError:
name = ''
try:
surname = uinfo['external_familyname']
except KeyError:
surname = ''
if surname:
session['personinfo']['arxiv_name'] = nameapi.create_normalized_name(
nameapi.split_name_parts(surname + ', ' + name))
else:
session['personinfo']['arxiv_name'] = ''
session.dirty = True
try:
arxiv_p_ids = uinfo['external_arxivids'].split(';')
except KeyError:
arxiv_p_ids = []
#'external_arxivids': 'hep-th/0112017;hep-th/0112020',
#'external_familyname': 'Weiler',
#'external_firstname': 'Henning',
try:
found_bibrecs = set(reduce(add, [perform_request_search(p='037:' + str(arx), of='id', rg=0)for arx in arxiv_p_ids]))
except (IndexError, TypeError):
found_bibrecs = set()
#found_bibrecs = [567700, 567744]
uid = getUid(req)
pid, pid_found = dbapi.get_personid_from_uid([[uid]])
if not pid_found:
pid = dbapi.reclaim_personid_for_new_arXiv_user(found_bibrecs,
nameapi.create_normalized_name(nameapi.split_name_parts(surname + ', ' + name)), uid)
else:
pid = pid[0]
pid_bibrecs = set([i[0] for i in dbapi.get_all_personids_recs(pid, claimed_only=True)])
missing_bibrecs = found_bibrecs - pid_bibrecs
#present_bibrecs = found_bibrecs.intersection(pid_bibrecs)
#assert len(found_bibrecs) == len(missing_bibrecs) + len(present_bibrecs)
tempticket = []
#now we have to open the tickets...
#person_papers contains the papers which are already assigned to the person and came from arxive,
#they can be claimed regardless
for bibrec in missing_bibrecs:
tempticket.append({'pid':pid, 'bibref':str(bibrec), 'action':'confirm'})
#check if ticket targets (bibref for pid) are already in ticket
for t in list(tempticket):
for e in list(ticket):
if e['pid'] == t['pid'] and e['bibref'] == t['bibref']:
ticket.remove(e)
ticket.append(t)
session.dirty = True
return pid
def external_user_can_perform_action(uid):
'''
Check for SSO user and if external claims will affect the
decision wether or not the user may use the Invenio claiming platform
@param uid: the user ID to check permissions for
@type uid: int
@return: is user allowed to perform actions?
@rtype: boolean
'''
#If no EXTERNAL_CLAIMED_RECORDS_KEY we bypass this check
if not bconfig.EXTERNAL_CLAIMED_RECORDS_KEY:
return True
uinfo = collect_user_info(uid)
keys = []
for k in bconfig.EXTERNAL_CLAIMED_RECORDS_KEY:
if k in uinfo:
keys.append(k)
full_key = False
for k in keys:
if uinfo[k]:
full_key = True
break
return full_key
def is_external_user(uid):
'''
Check for SSO user and if external claims will affect the
decision wether or not the user may use the Invenio claiming platform
@param uid: the user ID to check permissions for
@type uid: int
@return: is user allowed to perform actions?
@rtype: boolean
'''
#If no EXTERNAL_CLAIMED_RECORDS_KEY we bypass this check
if not bconfig.EXTERNAL_CLAIMED_RECORDS_KEY:
return False
uinfo = collect_user_info(uid)
keys = []
for k in bconfig.EXTERNAL_CLAIMED_RECORDS_KEY:
if k in uinfo:
keys.append(k)
full_key = False
for k in keys:
if uinfo[k]:
full_key = True
break
return full_key
def check_transaction_permissions(uid, bibref, pid, action):
'''
Check if the user can perform the given action on the given pid,bibrefrec pair.
return in: granted, denied, warning_granted, warning_denied
@param uid: The internal ID of a user
@type uid: int
@param bibref: the bibref pair to check permissions for
@type bibref: string
@param pid: the Person ID to check on
@type pid: int
@param action: the action that is to be performed
@type action: string
@return: granted, denied, warning_granted xor warning_denied
@rtype: string
'''
c_own = True
c_override = False
is_superadmin = isUserSuperAdmin({'uid': uid})
access_right = _resolve_maximum_acces_rights(uid)
bibref_status = dbapi.get_bibref_modification_status(bibref)
old_flag = bibref_status[0]
if old_flag == 2 or old_flag == -2:
if action in ['confirm', 'assign']:
new_flag = 2
elif action in ['repeal']:
new_flag = -2
elif action in ['reset']:
new_flag = 0
if old_flag != new_flag:
c_override = True
uid_pid = dbapi.get_personid_from_uid([[uid]])
if not uid_pid[1] or pid != uid_pid[0][0]:
c_own = False
#if we cannot override an already touched bibref, no need to go on checking
if c_override:
if is_superadmin:
return 'warning_granted'
if access_right[1] < bibref_status[1]:
return "warning_denied"
else:
if is_superadmin:
return 'granted'
#let's check if invenio is allowing us the action we want to perform
if c_own:
action = bconfig.CLAIMPAPER_CLAIM_OWN_PAPERS
else:
action = bconfig.CLAIMPAPER_CLAIM_OTHERS_PAPERS
auth = acc_authorize_action(uid, action)
if auth[0] != 0:
return "denied"
#now we know if claiming for ourselfs, we can ask for external ideas
if c_own:
action = 'claim_own_paper'
else:
action = 'claim_other_paper'
ext_permission = external_user_can_perform_action(uid)
#if we are here invenio is allowing the thing and we are not overwriting a
#user with higher privileges, if externals are ok we go on!
if ext_permission:
if not c_override:
return "granted"
else:
return "warning_granted"
return "denied"
def delete_request_ticket(pid, ticket):
'''
Delete a request ticket associated to a person
@param pid: pid (int)
@param ticket: ticket id (int)
'''
dbapi.delete_request_ticket(pid, ticket)
def delete_transaction_from_request_ticket(pid, tid, action, bibref):
'''
Deletes a transaction from a ticket. If ticket empty, deletes it.
@param pid: pid
@param tid: ticket id
@param action: action
@param bibref: bibref
'''
rt = get_person_request_ticket(pid, tid)
if len(rt) > 0:
# rt_num = rt[0][1]
rt = rt[0][0]
else:
return
for t in list(rt):
if str(t[0]) == str(action) and str(t[1]) == str(bibref):
rt.remove(t)
action_present = False
for t in rt:
if str(t[0]) in ['confirm', 'repeal']:
action_present = True
if not action_present:
delete_request_ticket(pid, tid)
return
dbapi.update_request_ticket(pid, rt, tid)
def create_request_ticket(userinfo, ticket):
'''
Creates a request ticket
@param usernfo: dictionary of info about user
@param ticket: dictionary ticket
'''
# write ticket to DB
# send eMail to RT
udata = []
mailcontent = []
m = mailcontent.append
m("A user sent a change request through the web interface.")
m("User Information:")
for k, v in userinfo.iteritems():
if v:
m(" %s: %s" % (k, v))
m("\nLinks to all issued Person-based requests:\n")
for i in userinfo:
udata.append([i, userinfo[i]])
tic = {}
for t in ticket:
if not t['action'] in ['confirm', 'assign', 'repeal', 'reset']:
return False
elif t['pid'] < 0:
return False
elif not is_valid_bibref(t['bibref']):
return False
if t['action'] == 'reset':
#we ignore reset tickets
continue
else:
if t['pid'] not in tic:
tic[t['pid']] = []
if t['action'] == 'assign':
t['action'] = 'confirm'
tic[t['pid']].append([t['action'], t['bibref']])
for pid in tic:
data = []
for i in udata:
data.append(i)
data.append(['date', ctime()])
for i in tic[pid]:
data.append(i)
dbapi.update_request_ticket(pid, data)
pidlink = get_person_redirect_link(pid)
m("%s/person/%s?open_claim=True#tabTickets" % (CFG_SITE_URL, pidlink))
m("\nPlease remember that you have to be logged in "
"in order to see the ticket of a person.\n")
if ticket and tic and mailcontent:
sender = CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL
if bconfig.TICKET_SENDING_FROM_USER_EMAIL and userinfo['email']:
sender = userinfo['email']
send_email(sender,
CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL,
subject="[Author] Change Request",
content="\n".join(mailcontent))
return True
def send_user_commit_notification_email(userinfo, ticket):
'''
Sends commit notification email to RT system
'''
# send eMail to RT
mailcontent = []
m = mailcontent.append
m("A user committed a change through the web interface.")
m("User Information:")
for k, v in userinfo.iteritems():
if v:
m(" %s: %s" % (k, v))
m("\nChanges:\n")
for t in ticket:
m(" --- <start> --- \n")
for k, v in t.iteritems():
m(" %s: %s \n" % (str(k), str(v)))
if k == 'bibref':
try:
br = int(v.split(',')[1])
m(" Title: %s\n" % search_engine.get_fieldvalues(br, "245__a"))
except (TypeError, ValueError, IndexError):
pass
m(" --- <end> --- \n")
if ticket and mailcontent:
sender = CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL
send_email(sender,
CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL,
subject="[Author] NO ACTIONS NEEDED. Changes performed by SSO user.",
content="\n".join(mailcontent))
return True
def user_can_view_CMP(uid):
action = bconfig.CLAIMPAPER_VIEW_PID_UNIVERSE
auth = acc_authorize_action(uid, action)
if auth[0] == 0:
return True
else:
return False
def _resolve_maximum_acces_rights(uid):
'''
returns [max_role, lcul] to use in execute_action and check_transaction_permissions.
Defaults to ['guest',0] if user has no roles assigned.
Always returns the maximum privilege.
'''
roles = {bconfig.CLAIMPAPER_ADMIN_ROLE: acc_get_role_id(bconfig.CLAIMPAPER_ADMIN_ROLE),
bconfig.CLAIMPAPER_USER_ROLE: acc_get_role_id(bconfig.CLAIMPAPER_USER_ROLE)}
uroles = acc_get_user_roles(uid)
max_role = ['guest', 0]
for r in roles:
if roles[r] in uroles:
rright = bconfig.CMPROLESLCUL[r]
if rright >= max_role[1]:
max_role = [r, rright]
return max_role
def create_new_person(uid, uid_is_owner=False):
'''
Create a new person.
@param uid: User ID to attach to the person
@type uid: int
@param uid_is_owner: Is the uid provided owner of the new person?
@type uid_is_owner: bool
@return: the resulting person ID of the new person
@rtype: int
'''
pid = dbapi.create_new_person(uid, uid_is_owner=uid_is_owner)
return pid
def execute_action(action, pid, bibref, uid, userinfo='', comment=''):
'''
Executes the action, setting the last user right according to uid
@param action: the action to perform
@type action: string
@param pid: the Person ID to perform the action on
@type pid: int
@param bibref: the bibref pair to perform the action for
@type bibref: string
@param uid: the internal user ID of the currently logged in user
@type uid: int
@return: success of the process
@rtype: boolean
'''
pid = wash_integer_id(pid)
if not action in ['confirm', 'assign', 'repeal', 'reset']:
return False
elif pid < 0:
return False
elif pid == -3:
pid = dbapi.create_new_person(uid, uid_is_owner=False)
elif not is_valid_bibref(bibref):
return False
if userinfo.count('||'):
uid = userinfo.split('||')[0]
else:
uid = ''
user_level = _resolve_maximum_acces_rights(uid)[1]
if action in ['confirm', 'assign']:
dbapi.insert_user_log(userinfo, pid, 'assign', 'CMPUI_ticketcommit', bibref, comment, userid=uid)
dbapi.confirm_papers_to_person(pid, [bibref], user_level)
elif action in ['repeal']:
dbapi.insert_user_log(userinfo, pid, 'repeal', 'CMPUI_ticketcommit', bibref, comment, userid=uid)
dbapi.reject_papers_from_person(pid, [bibref], user_level)
elif action in ['reset']:
dbapi.insert_user_log(userinfo, pid, 'reset', 'CMPUI_ticketcommit', bibref, comment, userid=uid)
dbapi.reset_papers_flag(pid, [bibref])
else:
return False
#This is the only point which modifies a person, so this can trigger the
#deletion of a cached page
webauthorapi.expire_all_cache_for_personid(pid)
return True
def sign_assertion(robotname, assertion):
'''
Sign an assertion for the export of IDs
@param robotname: name of the robot. E.g. 'arxivz'
@type robotname: string
@param assertion: JSONized object to sign
@type assertion: string
@return: The signature
@rtype: string
'''
robotname = ""
secr = ""
if not robotname:
return ""
robot = ExternalAuthRobot()
keys = load_robot_keys()
try:
secr = keys["Robot"][robotname]
except:
secr = ""
return robot.sign(secr, assertion)
| [
"root@tomasp-AMILO.(none)"
] | root@tomasp-AMILO.(none) |
e4133ca7cab9d2cfbcfdb3bd426bd7881b929df7 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/Soup/HSTSEnforcer.py | d992a49c116ab36e63951f4d43c0915e61e2f82c | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 17,580 | py | # encoding: utf-8
# module gi.repository.Soup
# from /usr/lib64/girepository-1.0/Soup-2.4.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.Gio as __gi_repository_Gio
import gobject as __gobject
from .SessionFeature import SessionFeature
class HSTSEnforcer(__gi_overrides_GObject.Object, SessionFeature):
"""
:Constructors:
::
HSTSEnforcer(**properties)
new() -> Soup.HSTSEnforcer
"""
def add_feature(self, type): # real signature unknown; restored from __doc__
""" add_feature(self, type:GType) -> bool """
return False
def attach(self, session): # real signature unknown; restored from __doc__
""" attach(self, session:Soup.Session) """
pass
def bind_property(self, *args, **kwargs): # real signature unknown
pass
def bind_property_full(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def chain(self, *args, **kwargs): # real signature unknown
pass
def compat_control(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def connect(self, *args, **kwargs): # real signature unknown
pass
def connect_after(self, *args, **kwargs): # real signature unknown
pass
def connect_data(self, detailed_signal, handler, *data, **kwargs): # reliably restored by inspect
"""
Connect a callback to the given signal with optional user data.
:param str detailed_signal:
A detailed signal to connect to.
:param callable handler:
Callback handler to connect to the signal.
:param *data:
Variable data which is passed through to the signal handler.
:param GObject.ConnectFlags connect_flags:
Flags used for connection options.
:returns:
A signal id which can be used with disconnect.
"""
pass
def connect_object(self, *args, **kwargs): # real signature unknown
pass
def connect_object_after(self, *args, **kwargs): # real signature unknown
pass
def detach(self, session): # real signature unknown; restored from __doc__
""" detach(self, session:Soup.Session) """
pass
def disconnect(*args, **kwargs): # reliably restored by inspect
""" signal_handler_disconnect(instance:GObject.Object, handler_id:int) """
pass
def disconnect_by_func(self, *args, **kwargs): # real signature unknown
pass
def do_changed(self, *args, **kwargs): # real signature unknown
""" changed(self, old_policy:Soup.HSTSPolicy, new_policy:Soup.HSTSPolicy) """
pass
def do_has_valid_policy(self, *args, **kwargs): # real signature unknown
""" has_valid_policy(self, domain:str) -> bool """
pass
def do_hsts_enforced(self, *args, **kwargs): # real signature unknown
""" hsts_enforced(self, message:Soup.Message) """
pass
def do_is_persistent(self, *args, **kwargs): # real signature unknown
""" is_persistent(self) -> bool """
pass
def emit(self, *args, **kwargs): # real signature unknown
pass
def emit_stop_by_name(self, detailed_signal): # reliably restored by inspect
""" Deprecated, please use stop_emission_by_name. """
pass
def find_property(self, property_name): # real signature unknown; restored from __doc__
""" find_property(self, property_name:str) -> GObject.ParamSpec """
pass
def force_floating(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def freeze_notify(self): # reliably restored by inspect
"""
Freezes the object's property-changed notification queue.
:returns:
A context manager which optionally can be used to
automatically thaw notifications.
This will freeze the object so that "notify" signals are blocked until
the thaw_notify() method is called.
.. code-block:: python
with obj.freeze_notify():
pass
"""
pass
def getv(self, names, values): # real signature unknown; restored from __doc__
""" getv(self, names:list, values:list) """
pass
def get_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def get_domains(self, session_policies): # real signature unknown; restored from __doc__
""" get_domains(self, session_policies:bool) -> list """
return []
def get_policies(self, session_policies): # real signature unknown; restored from __doc__
""" get_policies(self, session_policies:bool) -> list """
return []
def get_properties(self, *args, **kwargs): # real signature unknown
pass
def get_property(self, *args, **kwargs): # real signature unknown
pass
def get_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def handler_block(obj, handler_id): # reliably restored by inspect
"""
Blocks the signal handler from being invoked until
handler_unblock() is called.
:param GObject.Object obj:
Object instance to block handlers for.
:param int handler_id:
Id of signal to block.
:returns:
A context manager which optionally can be used to
automatically unblock the handler:
.. code-block:: python
with GObject.signal_handler_block(obj, id):
pass
"""
pass
def handler_block_by_func(self, *args, **kwargs): # real signature unknown
pass
def handler_disconnect(*args, **kwargs): # reliably restored by inspect
""" signal_handler_disconnect(instance:GObject.Object, handler_id:int) """
pass
def handler_is_connected(*args, **kwargs): # reliably restored by inspect
""" signal_handler_is_connected(instance:GObject.Object, handler_id:int) -> bool """
pass
def handler_unblock(*args, **kwargs): # reliably restored by inspect
""" signal_handler_unblock(instance:GObject.Object, handler_id:int) """
pass
def handler_unblock_by_func(self, *args, **kwargs): # real signature unknown
pass
def has_feature(self, type): # real signature unknown; restored from __doc__
""" has_feature(self, type:GType) -> bool """
return False
def has_valid_policy(self, domain): # real signature unknown; restored from __doc__
""" has_valid_policy(self, domain:str) -> bool """
return False
def install_properties(self, pspecs): # real signature unknown; restored from __doc__
""" install_properties(self, pspecs:list) """
pass
def install_property(self, property_id, pspec): # real signature unknown; restored from __doc__
""" install_property(self, property_id:int, pspec:GObject.ParamSpec) """
pass
def interface_find_property(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def interface_install_property(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def interface_list_properties(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def is_floating(self): # real signature unknown; restored from __doc__
""" is_floating(self) -> bool """
return False
def is_persistent(self): # real signature unknown; restored from __doc__
""" is_persistent(self) -> bool """
return False
def list_properties(self): # real signature unknown; restored from __doc__
""" list_properties(self) -> list, n_properties:int """
return []
def new(self): # real signature unknown; restored from __doc__
""" new() -> Soup.HSTSEnforcer """
pass
def newv(self, object_type, parameters): # real signature unknown; restored from __doc__
""" newv(object_type:GType, parameters:list) -> GObject.Object """
pass
def notify(self, property_name): # real signature unknown; restored from __doc__
""" notify(self, property_name:str) """
pass
def notify_by_pspec(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def override_property(self, property_id, name): # real signature unknown; restored from __doc__
""" override_property(self, property_id:int, name:str) """
pass
def ref(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def ref_sink(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def remove_feature(self, type): # real signature unknown; restored from __doc__
""" remove_feature(self, type:GType) -> bool """
return False
def replace_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def replace_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def run_dispose(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def set_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def set_policy(self, policy): # real signature unknown; restored from __doc__
""" set_policy(self, policy:Soup.HSTSPolicy) """
pass
def set_properties(self, *args, **kwargs): # real signature unknown
pass
def set_property(self, *args, **kwargs): # real signature unknown
pass
def set_session_policy(self, domain, include_subdomains): # real signature unknown; restored from __doc__
""" set_session_policy(self, domain:str, include_subdomains:bool) """
pass
def steal_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def steal_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def stop_emission(self, detailed_signal): # reliably restored by inspect
""" Deprecated, please use stop_emission_by_name. """
pass
def stop_emission_by_name(*args, **kwargs): # reliably restored by inspect
""" signal_stop_emission_by_name(instance:GObject.Object, detailed_signal:str) """
pass
def thaw_notify(self): # real signature unknown; restored from __doc__
""" thaw_notify(self) """
pass
def unref(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def watch_closure(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def weak_ref(self, *args, **kwargs): # real signature unknown
pass
def _force_floating(self, *args, **kwargs): # real signature unknown
""" force_floating(self) """
pass
def _ref(self, *args, **kwargs): # real signature unknown
""" ref(self) -> GObject.Object """
pass
def _ref_sink(self, *args, **kwargs): # real signature unknown
""" ref_sink(self) -> GObject.Object """
pass
def _unref(self, *args, **kwargs): # real signature unknown
""" unref(self) """
pass
def _unsupported_data_method(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def _unsupported_method(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __deepcopy__(self, *args, **kwargs): # real signature unknown
pass
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self, **properties): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
g_type_instance = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
parent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
priv = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
qdata = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
ref_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__gpointer__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__grefcount__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
props = None # (!) real value is '<gi._gi.GProps object at 0x7f8e47db9a60>'
__class__ = None # (!) real value is "<class 'gi.types.GObjectMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': ObjectInfo(HSTSEnforcer), '__module__': 'gi.repository.Soup', '__gtype__': <GType SoupHSTSEnforcer (94750594763632)>, '__doc__': None, '__gsignals__': {}, 'new': gi.FunctionInfo(new), 'get_domains': gi.FunctionInfo(get_domains), 'get_policies': gi.FunctionInfo(get_policies), 'has_valid_policy': gi.FunctionInfo(has_valid_policy), 'is_persistent': gi.FunctionInfo(is_persistent), 'set_policy': gi.FunctionInfo(set_policy), 'set_session_policy': gi.FunctionInfo(set_session_policy), 'do_changed': gi.VFuncInfo(changed), 'do_has_valid_policy': gi.VFuncInfo(has_valid_policy), 'do_hsts_enforced': gi.VFuncInfo(hsts_enforced), 'do_is_persistent': gi.VFuncInfo(is_persistent), 'parent': <property object at 0x7f8e47ed6180>, 'priv': <property object at 0x7f8e47ed6270>})"
__gdoc__ = 'Object SoupHSTSEnforcer\n\nSignals from SoupHSTSEnforcer:\n changed (SoupHSTSPolicy, SoupHSTSPolicy)\n hsts-enforced (SoupMessage)\n\nSignals from GObject:\n notify (GParam)\n\n'
__gsignals__ = {}
__gtype__ = None # (!) real value is '<GType SoupHSTSEnforcer (94750594763632)>'
__info__ = ObjectInfo(HSTSEnforcer)
| [
"ttys3@outlook.com"
] | ttys3@outlook.com |
57fbff89c04d7446cff424581cf47b00d74c7634 | 29c1600023fac94f1fc14c580e84ad603e37d12f | /gm_work/gm_work/settings.py | 4847fba777a9f1e27e964e9c871015dd7443d6a3 | [] | no_license | kingking888/scrapy_spider-1 | 33fb084b46617bd750273c471ed851cc5cf39c4a | 348402430d116eff53eb77761f618654f74a287f | refs/heads/master | 2022-07-04T11:04:15.564883 | 2020-05-16T14:47:58 | 2020-05-16T14:47:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,369 | py | # -*- coding: utf-8 -*-
# Scrapy settings for gm_work project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
import time
BOT_NAME = 'gm_work'
SPIDER_MODULES = ['gm_work.spiders']
NEWSPIDER_MODULE = 'gm_work.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'gm_work (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
CONCURRENT_REQUESTS = 4 #并发
# Configure a delay for requests for the same website (default: 0)
# See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
# DOWNLOAD_DELAY = 0.5#下载延迟三秒
RANDOMIZE_DOWNLOAD_DELAY = True
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
COOKIES_ENABLED = False
REDIRECT_MAX_TIMES = 10
# REDIRECT_ENALBED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
# DEFAULT_REQUEST_HEADERS = {
# "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
# "Accept-Encoding": "gzip, deflate, br",
# "Accept-Language": "zh-CN,zh;q=0.9",
# "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
# }
# ----------- selenium参数配置 -------------
SELENIUM_TIMEOUT = 25 # selenium浏览器的超时时间,单位秒
#LOAD_IMAGE = True # 是否下载图片
WINDOW_HEIGHT = 900 # 浏览器窗口大小
WINDOW_WIDTH = 900
# Enable or disable spider middlewares爬虫中间件
# See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'gm_work.middlewares.AntAppSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares下载中间件
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
MY_USER_AGENT = [
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
]
DOWNLOADER_MIDDLEWARES = {
# 'gm_work.middlewares.AntAppDownloaderMiddleware': 543,
# 'gm_work.middlewares.RandomUserAgentMiddleware':543,
# 'gm_work.middlewares.ProxyDownloaderMiddleware': 400
# 'gm_work.middlewares.SeleniumMiddleware': 10
# 'gm_work.middlewares.HostDownloaderMiddleware': 30,
# 'gm_work.middlewares.SmtPrameDownloaderMiddleware': 21,
# 'gm_work.middlewares.IpChangeDownloaderMiddleware': 20,
'gm_work.middlewares.ProcessAllExceptionMiddleware': 20,
# 'gm_work.middlewares.TaobaoZhiboDownloaderMiddleware': 22,
# 'gm_work.middlewares.UpdatetimeMiddleware': 23,
}
# Enable or disable extensions
# See https://doc.scrapy.org/en/latest/topics/extensions.html
EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
# 'gm_work.extension.redisSpiderSmartIdleCloseExensions': 500,##自动关闭
# 'gm_work.extension.Spider1OpenCloseLogging': 500,##添加starturl
# 'gm_work.extension.SpiderOpenCloseLogging': 500,##request队列
}
# 'gm_work.middlewares.HostDownloaderMiddleware': 500,
# Configure item pipelines
# See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {#从低到高
'gm_work.pipelines.CodeWriterPipeline': 290,
'gm_work.pipelines.JsonWriterPipeline': 300,
'gm_work.pipelines.errorWriterPipeline': 310,
# 'gm_work.pipelines.MysqlPipeline': 300,
# 'scrapy_redis.pipelines.RedisPipeline': 290
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []#这么http状态码不响应
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
RETRY_ENABLED = True#重试
RETRY_TIMES = 3
#RETRY_HTTP_CODES=#遇到什么网络状态码进行重试默认[500, 502, 503, 504, 522, 524, 408]
HTTPERROR_ALLOWED_CODES=[301,302,307,403,404,408,429,500, 502, 503, 504, 522, 524] #允许在此列表中的非200状态代码响应
REDIRECT_ENABLED = False##重定向
#DOWNLOAD_TIMEOUT超时等待时间
#DOWNLOAD_MAXSIZE下载最大相应大小
#DOWNLOAD_WARNSIZE下载警告大小
#log日志记录
LOG_LEVEL = "INFO"
to_day = time.localtime()
log_file_path = 'log/scrapy_{}_{}_{}.log'.format(to_day.tm_year, to_day.tm_mon, to_day.tm_mday)#在spider添加spidername
#LOG_FILE = log_file_path
# COMMANDS_MODULE = "gm_work.commands"#将自定义命令加入到scrapy中
#SPIDER_LOADER_CLASS = ""#这个?
#reids
#指定使用scrapy-redis的调度器
SCHEDULER = "scrapy_redis.scheduler.Scheduler"
DUPEFILTER_CLASS = "scrapy_redis.dupefilter.RFPDupeFilter"#指定使用scrapy-redis的去重
# 指定排序爬取地址时使用的队列,
# 默认的 按优先级排序(Scrapy默认),由sorted set实现的一种非FIFO、LIFO方式。
#广度优先:"scrapy_redis.queue.FifoQueue 深度优先:"SpiderPriorityQueue LifoQueue 优先: PriorityQueue
SCHEDULER_QUEUE_CLASS = 'scrapy_redis.queue.PriorityQueue'
REDIS_START_URLS_AS_SET = True
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
REDIS_PARAMS = {'password': 'nriat.123456',}
SCHEDULER_PERSIST = True# 是否在关闭时候保留原来的调度器和去重记录,True=保留,False=清空
# 密码登陆
# REDIS_URL="redis://[user]:password@localhost:port"
#连接MYSQL数据库
MYSQL_HOST = '192.168.0.227'
MYSQL_PORT = 3306
MYSQL_DBNAME = 'ec_cross_border'
MYSQL_USER = 'dev'
MYSQL_PASSWD = 'Data227or8Dev715#'
#爬行顺序
# DEPTH_PRIORITY = 1#正数以广度优先,加后面两个设置彻底以广度优先
# SCHEDULER_DISK_QUEUE = 'scrapy.squeues.PickleFifoDiskQueue'
# SCHEDULER_MEMORY_QUEUE = 'scrapy.squeues.FifoMemoryQueue'
#extend相关的东西
MYEXT_ENABLED=True # 开启redis结束的扩展
IDLE_NUMBER=60 # 配置空闲持续时间单位为 360个 ,一个时间单位为5s
#pipeline
SAVE_PATH = r"D:\spider_data"
LIMIT_NUM_DATA = 10000
LIMIT_NUM_ERROR = 100000
LIMIT_NUM_CODE = 10000
#extension:
MYEXT_ITEMCOUNT = 1000#检查间隔
SEED_FILE_PATH = r"D:\spider_seed"
SPLIT_NUM = 10000
CHANGE_IP_NUM = 50 | [
"xuchao0802@aliyun.com"
] | xuchao0802@aliyun.com |
0938c39b0ade04e2c3360559d93681b6c6a32640 | 5f7ad776002f81da978d180e0cbea4588e6b8d70 | /manage/resources.py | 3e42553c0f588efa84bde8f978bf49fb12fad394 | [] | no_license | thesteamnetworkcom/ToastBackendR1 | 3b812779ccf779f0dd360c7165727a1dfbd643cb | cb7b412c7d6c9d78dcfbaf238a12e03055999293 | refs/heads/master | 2020-06-20T18:18:21.843322 | 2019-07-16T14:06:14 | 2019-07-16T14:06:14 | 197,205,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py |
from import_export import resources
from .models import Assignment
from .models import Complex
from .models import Crosswalk
from .models import PMTL
from .models import Detail
from .models import Equipment
from .models import Facility
class ComplexResource(resources.ModelResource):
class Meta:
model = Complex
class CrosswalkResource(resources.ModelResource):
class Meta:
model = Crosswalk
class PMTLResource(resources.ModelResource):
class Meta:
model = PMTL
import_id_fields=('ComplexID',)
class DetailResource(resources.ModelResource):
class Meta:
model = Detail
class Equipmentresource(resources.ModelResource):
class Meta:
model = Equipment
class FacilityResource(resources.ModelResource):
model = Facility
| [
"scottiemlewis1991@gmail.com"
] | scottiemlewis1991@gmail.com |
c462013ed3ab5ba561d890a7be8d9df5ed9bdf6f | c362623e7bd0d656ad3a5a87cff8c2f2f4d64c30 | /example/wikidocs_exam_11_20.py | b96e7d53b878744a881b52ea3ed6b05932a6a7b8 | [] | no_license | bbster/PracticeAlgorithm | 92ce418e974e4be8e95b0878b2e349bf8438de5f | 171fa1880fb2635c5bac55c18a6981a656470292 | refs/heads/master | 2021-07-10T16:17:24.088996 | 2020-12-09T10:47:46 | 2020-12-09T10:47:46 | 222,721,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,257 | py | # https://wikidocs.net/7014
# 011
삼성전자 = 50000
print("평가금액", 삼성전자 * 10)
# 012
시가총액 = 298000000000
현재가 = 50000
PER = 15.79
print("시가총액:", 시가총액, "현재가:", 현재가, "PER:", PER)
# 답안지
# 시가총액 = 298000000000000
# 현재가 = 5000
# PER = 15.79
# print(시가총액, type(시가총액))
# print(현재가, type(현재가))
# print(PER, type(PER))
# type(변수) - 변수의 데이터 타입을 알수있다. int형인지 float인지 등등
# 013
s = "hello"
t = "python"
print(s, end="! ");print(t)
# 답안지
# s = "hello"
# t = "python"
# print(s+"!", t)
# 014
print(2+2*3)
# 015
a = "128"
print(type(a))
# class 'str'
# 016
num_str = "720"
num_int_casting = int("720")
print(num_str, type(num_str))
print(num_int_casting, type(num_int_casting))
# 017
num = 100
str_casting = str(100)
str_casting2 = str(num)
print(str_casting, type(str_casting))
print(str_casting2, type(str_casting2))
# 018
str_a = "15.79"
float_casting = float(str_a)
print(float_casting, type(float_casting))
# 019
year = "2020"
print(year, type(year))
year_int_casting = int(year)
print(year_int_casting, type(year_int_casting))
# 020
air_conditioner = 48584
term = 36
print(air_conditioner * term)
| [
"bbster12@naver.com"
] | bbster12@naver.com |
af7ae0fac74f267efbcb2be1b212c04444ecf130 | 9ac15348f627be602826e3699d2260ba76af712a | /pagerank/lib.py | 591065bb105953ea2dd3c24940ccf936291a61f7 | [] | no_license | mrasap/MMDS-algorithms | aab30f6d93380f74411bcf5802b4667354921845 | 7d745936df6f935ec715b7e478b7cd868804a8f8 | refs/heads/master | 2020-04-27T19:20:26.140807 | 2019-03-18T20:36:58 | 2019-03-18T20:36:58 | 174,613,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,800 | py | from collections import OrderedDict
import csv
import itertools
import numpy as np
def import_csv_as_adjacency_dict(path: str) -> OrderedDict:
"""
Import the csv file and return an adjacency dict of the graph.
Expects the csv file to be of the format:
'node from, node to, weight'
where weight is currently set to 1
:param path: the path to the data file
:return: OrderedDict: a dict of all nodes as keys, and a set of nodes that are are
discovered by the outgoing edges of the key as values.
"""
with open(path) as csvfile:
reader = csv.reader(csvfile, delimiter=',')
edges = [row[0:2] for row in reader]
# this operation is equivalent to flatten.distinct in scala
set_of_nodes = set(itertools.chain.from_iterable(edges))
data = OrderedDict({node: set() for node in sorted(list(set_of_nodes))})
for edge in edges:
data[edge[0]].add(edge[1])
return data
def construct_adjacency_matrix_from_dict(data: OrderedDict) -> np.ndarray:
"""
See function name.
:param data: OrderedDict: adjacency dict
:return: np.ndarray: adjacency matrix
"""
arr = np.zeros((len(data), len(data)))
nodes = [k for k in data.keys()]
for i, (_, lst) in enumerate(data.items()):
n_outgoing_edges = len(lst)
for node_to in lst:
j = nodes.index(node_to)
arr[j][i] = 1 / n_outgoing_edges
return arr
def import_csv_as_adjacency_matrix(path: str) -> np.ndarray:
return construct_adjacency_matrix_from_dict(import_csv_as_adjacency_dict(path))
if __name__ == '__main__':
lst = import_csv_as_adjacency_dict('../data/pagerank/example1.csv')
print(lst)
matrix = construct_adjacency_matrix_from_dict(lst)
print(matrix)
| [
"michael.kemna@gmail.com"
] | michael.kemna@gmail.com |
fa215df08f999e6b643dc6ae38cde42f29bad61f | cf6f79ec1a7cf2cbcf5b9dde3012b475203fcf08 | /Dota2/hero_type.py | ddec8990120292613a29c00466bf40d98f0e25ea | [] | no_license | BligJason/CS4373_Weka_Project | 1bbc4299d141893279c77d63b934ca8fcb08031d | 98b5f9eb8fb232d705b96391424665df5585c99e | refs/heads/master | 2021-01-23T07:55:24.617427 | 2017-04-21T02:04:09 | 2017-04-21T02:04:09 | 86,464,185 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,697 | py | #This dictionary maps each of the 113 heroes to their attack type, ranged or melee.
#Attack types were determined by reading hero description on Dota2 web site.
AttackType = {"Abaddon" : "Melee",
"Alchemist" : "Melee",
"AncientApparition" : "Ranged",
"Anti-Mage" : "Melee",
"ArcWarden" : "Ranged",
"Axe" : "Melee",
"Bane" : "Ranged",
"Batrider" : "Ranged",
"Beastmaster" : "Melee",
"Bloodseeker" : "Melee",
"BountyHunter" : "Melee",
"Brewmaster" : "Melee",
"Bristleback" : "Melee",
"Broodmother" : "Melee",
"CentaurWarrunner" : "Melee",
"ChaosKnight" : "Melee",
"Chen" : "Ranged",
"Clinkz" : "Ranged",
"Clockwerk" : "Melee",
"CrystalMaiden" : "Ranged",
"DarkSeer" : "Melee",
"Dazzle" : "Ranged",
"DeathProphet" :"Ranged",
"Disruptor" : "Ranged",
"Doom" : "Melee",
"DragonKnight" : "Melee",
"DrowRanger" : "Ranged",
"EarthSpirit" : "Melee",
"Earthshaker" : "Melee",
"ElderTitan" : "Melee",
"EmberSpirit" : "Melee",
"Enchantress" : "Ranged",
"Enigma" : "Ranged",
"FacelessVoid" : "Melee",
"Gyrocopter" : "Ranged",
"Huskar" : "Ranged",
"Invoker" : "Ranged",
"Io" : "Ranged",
"Jakiro" : "Ranged",
"Juggernaut" : "Melee",
"KeeperoftheLight" : "Ranged",
"Kunkka" : "Melee",
"LegionCommander" : "Melee",
"Leshrac" : "Ranged",
"Lich" : "Ranged",
"Lifestealer" : "Melee",
"Lina" : "Ranged",
"Lion" : "Ranged",
"LoneDruid" : "Ranged",
"Luna" : "Ranged",
"Lycan" : "Melee",
"Magnus" : "Melee",
"Medusa" : "Ranged",
"Meepo" : "Melee",
"Mirana" : "Ranged",
"MonkeyKing" : "Melee",
"Morphling" : "Ranged",
"NagaSiren" : "Melee",
"NaturesProphet" : "Ranged",
"Necrophos" : "Ranged",
"NightStalker" : "Melee",
"NyxAssassin" : "Melee",
"OgreMagi" : "Melee",
"Omniknight" : "Melee",
"Oracle" : "Ranged",
"OutworldDevourer" : "Ranged",
"PhantomAssassin" : "Melee",
"PhantomLancer" : "Melee",
"Phoenix" : "Ranged",
"Puck" : "Ranged",
"Pudge" : "Melee",
"Pugna" : "Ranged",
"QueenofPain" : "Ranged",
"Razor" : "Ranged",
"Riki" : "Melee",
"Rubick" : "Ranged",
"SandKing" : "Melee",
"ShadowDemon" : "Ranged",
"ShadowFiend" : "Ranged",
"ShadowShaman" : "Ranged",
"Silencer" : "Ranged",
"SkywrathMage" : "Ranged",
"Slardar" : "Melee",
"Slark" : "Melee",
"Sniper" : "Ranged",
"Spectre" : "Melee",
"SpiritBreaker" : "Melee",
"StormSpirit" : "Ranged",
"Sven" : "Melee",
"Techies" : "Ranged",
"TemplarAssassin" : "Ranged",
"Terrorblade" : "Melee",
"Tidehunter" : "Melee",
"Timbersaw" : "Melee",
"Tinker" : "Ranged",
"Tiny" : "Melee",
"TreantProtector" : "Melee",
"TrollWarlord" : "Ranged",
"Tusk" : "Melee",
"Underlord" : "Melee",
"Undying" : "Melee",
"Ursa" : "Melee",
"VengefulSpirit" : "Ranged",
"Venomancer" : "Ranged",
"Viper" : "Ranged",
"Visage" : "Ranged",
"Warlock" : "Ranged",
"Weaver" : "Ranged",
"Windranger" : "Ranged",
"WinterWyvern" : "Ranged",
"WitchDoctor" : "Ranged",
"WraithKing" : "Melee",
"Zeus" : "Ranged"}
| [
"dha861@fox01.cs.utsa.edu"
] | dha861@fox01.cs.utsa.edu |
e0a83c4a6640aa9ae36b4004cd85e1a20fd7a84b | 28729bdabcb1c83429752bc15b14f2ac1950028f | /firmware/python_modules/newline2020/dashboard/launcher.py | e8effa148ddfacb867e2bcf5a634b515fa1095af | [] | no_license | badgeteam/ESP32-platform-firmware | 434020769b36df164fd1719b3bcf996851d55294 | 04282f7fe84ddd0f0c3887fa948da68a9ade8126 | refs/heads/master | 2023-08-17T07:07:51.048777 | 2023-08-14T20:53:37 | 2023-08-14T20:53:37 | 194,534,857 | 31 | 49 | null | 2023-08-15T21:00:09 | 2019-06-30T15:59:30 | C | UTF-8 | Python | false | false | 20 | py | terminal/launcher.py | [
"renze@rnplus.nl"
] | renze@rnplus.nl |
fc97a0d7aa4c2fab939444e80fd0c1e8be10e8ee | 3a7bcd709a5ca3b387cbf1d25f21f41c1a3ece33 | /Homework3.py | ecaf2a7b2b68f9ddd389f6f5fbd919707c6693fb | [] | no_license | Rafokost/homework | cad9d0a686290814882619d98f4a0f03155e59bb | 3d0508dd3f8229b5d233c0a16b8cff8170330737 | refs/heads/master | 2020-07-25T13:51:14.980975 | 2019-10-22T17:45:13 | 2019-10-22T17:45:13 | 208,312,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | import datetime
print(datetime.date.today())
U=input('U is inner energy = ')
U=int(U)
print(U<10 and U not=5)
P=input('P is gas pressure = ')
P=int(P)
print(P<=10 or U!=-9)
V=input('V is volume of gas = ')
V=int(V)
print(V==8 and V!=9 or V<3)
print('H=U+P*V')
H=U+P*V
H=str(H)
print('the value of H is',H)
H=int(H)
print(bool(H) and H>=0)
| [
"rafokost@mail.ru"
] | rafokost@mail.ru |
4eb5ec29014a4988620c2a2929b6d12c4505a545 | adb4e6b82e5f969fc46f7c58e70e49c5d53a6fe3 | /exotica_examples/scripts/example_ik_joint_torque_minimization_proxy | 0901046a933acbf7f9646c953cfa6c056823cbc9 | [
"BSD-3-Clause"
] | permissive | ipab-slmc/exotica | 8d9b531916f1e7422f85854597aa925091a7b7c4 | be580d162c5798d976f138cc1cd99474aef9c6fe | refs/heads/master | 2021-12-15T15:19:26.471745 | 2021-12-08T03:43:44 | 2021-12-08T03:43:44 | 44,607,894 | 144 | 55 | NOASSERTION | 2021-07-23T10:51:43 | 2015-10-20T13:24:57 | C++ | UTF-8 | Python | false | false | 2,359 | #!/usr/bin/env python
from __future__ import print_function
import pyexotica as exo
import numpy as np
import math
from pyexotica.publish_trajectory import *
import exotica_core_task_maps_py
import exotica_scipy_solver
from time import sleep, time
import signal
def figure_eight(t):
return np.array([0.6, -0.1 + math.sin(t * 2.0 * math.pi * 0.5) * 0.1, 0.5 + math.sin(t * math.pi * 0.5) * 0.2])
use_scipy_solver = True
exo.Setup.init_ros()
problem = None
solver = None
if use_scipy_solver:
problem = exo.Setup.load_problem('{exotica_examples}/resources/configs/example_ik_joint_torque_minimization_proxy.xml')
solver = exotica_scipy_solver.SciPyEndPoseSolver(problem, method='trust-constr')
solver.max_iterations = 10
solver.debug = False
else:
# Requires to uncomment one of the C++ solvers in the XML
solver = exo.Setup.load_solver(
'{exotica_examples}/resources/configs/example_ik_joint_torque_minimization_proxy.xml')
problem = solver.get_problem()
# Get the task so we can modify the h
joint_torque_minimization_proxy_task = problem.get_task_maps()["JointTorqueMinimizationProxy"]
dt = 0.002
t = 0.0
q = np.array([0.2]*7)
print('Publishing IK')
signal.signal(signal.SIGINT, sig_int_handler)
while True:
try:
start_time = time()
# Update h
joint_torque_minimization_proxy_task.h = np.array([1,1,1,0,0,0])
# Update goal position for end-effector
problem.set_goal_eq('Position', figure_eight(t))
# Set last solution as initialisation for optimisation problem
problem.start_state = q
# Solve the problem
s = time()
q = solver.solve()[0]
e = time()
# Extract the positions of the individual links
problem.update(q)
for i in range(7):
name = "lwr_arm_" + str(i) + "_link"
position = problem.get_scene().fk(name).get_translation()
print(name, position)
# Publish to RViz (visualisation)
publish_pose(q, problem)
# Sleep if there is remaining time in the control loop
end_time = time()
if dt > end_time - start_time:
sleep(dt - (end_time - start_time))
print("Optimisation took", e-s, "\tTotal loop time:", end_time - start_time)
t = t + dt
except KeyboardInterrupt:
break
| [
"w.merkt@gmail.com"
] | w.merkt@gmail.com | |
d78f0ff1ab306c956ad4bbd555dafdf870f3ac0c | 360f08a6381e5ab1b2552303e0b96826d43b7de7 | /photo/urls.py | d4d2434397ccb16dc4d3e8de9cfc27c462278fee | [
"MIT"
] | permissive | Mukantwarivirginie/Instagram | abc11d603ed56a957f926dcc477c252fdbd57c46 | 91cf561e696638ec0ef410cc6410b844cf9bc2e7 | refs/heads/master | 2020-05-01T00:52:55.502953 | 2019-03-29T10:52:29 | 2019-03-29T10:52:29 | 177,173,304 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,317 | py |
"""instagram URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from . import views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
url(r'^$', views.instagram, name = 'instagram'),
url(r'^search/', views.search_results, name='search_results'),
url(r'^profile/(\d+)',views.profile,name ='profile'),
url(r'^new/profile$', views.new_profile, name='new-profile'),
url(r'^new/view_profile$', views.view_profile, name='view_profile'),
url(r'^new/addimage$', views.postimage, name='postimage'),
]
if settings.DEBUG:
urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
| [
"rwjpyy@gmail.com"
] | rwjpyy@gmail.com |
31e556001e587936a0ac18a2f7b0b4c686c3554e | f4713505e4647059cdfce9c1a7958ca823c763e8 | /Evaluacion/settings.py | 4dd3da306597120783da47d8b166ec623a367a39 | [] | no_license | iv-castrom/mi-primer-blog | 8591e38c6cf85f3a90dd5ae46c63959b2e29948e | d07bb0f55e5c529b9b2ce00e2808512875e14163 | refs/heads/main | 2023-02-03T04:38:17.820041 | 2020-12-10T21:04:03 | 2020-12-10T21:04:03 | 320,385,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,930 | py | """
Django settings for Evaluacion project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^9$n^_6-l%x0f(f&479)pf!hol2p+)#2nl84)9k*tzg+d_&46z'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
CORS_ORIGIN_ALLOW_ALL = True
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'socios.apps.SociosConfig',
'rest_framework',
'api.apps.ApiConfig',
'corsheaders',
'pwa',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.facebook',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware',
]
ROOT_URLCONF = 'Evaluacion.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Evaluacion.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = (
# Necesario para logear por username en Django admin, sin importar allauth
'django.contrib.auth.backends.ModelBackend',
# Metodo de autenticación especifico de allauth, como logear por email
'allauth.account.auth_backends.AuthenticationBackend',
)
SITE_ID=1
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'es-cl'
TIME_ZONE = 'America/Santiago'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR / 'static'
STATICFILES_DIRS = (
BASE_DIR / "common_static",
)
# SUBIDA DE ARCHIVOS
MEDIA_ROOT = BASE_DIR / 'media'
MEDIA_URL = '/media/'
# REST FRAMEWORK PLUGIN
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10
}
# Localización server worker de PWA
PWA_SERVICE_WORKER_PATH = os.path.join(BASE_DIR, 'serviceworker.js')
#Configuracion del manifiest PWA
PWA_APP_NAME = 'Among Us'
PWA_APP_DESCRIPTION = "App de among us"
PWA_APP_THEME_COLOR = '#85D4B7'
PWA_APP_BACKGROUND_COLOR = '#ffffff'
PWA_APP_DISPLAY = 'standalone'
PWA_APP_SCOPE = '/'
PWA_APP_ORIENTATION = 'any'
PWA_APP_START_URL = '/'
PWA_APP_STATUS_BAR_COLOR = 'default'
PWA_APP_ICONS = [
{
"src": "static/images/icons/icon-144x144.png",
"sizes": "144x144"
}
]
PWA_APP_SPLASH_SCREEN = [
{
"src": "static/imagenes/splash-art.png",
"media": "(device-width: 320px) and (device-height: 568px) and (-webkit-device-pixel-ratio: 2)"
}
]
PWA_APP_DIR = 'ltr'
PWA_APP_LANG = 'es-cl'
PWA_APP_DEBUG_MODE = True
# REDIRECCIONAR A PAGINA PRINCIPAL DESPUES DE LOGIN CON SOCIAL ACCOUNT
LOGIN_REDIRECT_URL = '/' | [
"iv.castrom@alumnos.duoc.cl"
] | iv.castrom@alumnos.duoc.cl |
2ed301967dcb7f052a8c51f56ef1b0bdc1ca357e | fa54359c670fd9d4db543505819ce26481dbcad8 | /setup.py | 4d01cb7c22b59ecad2520a5c62baf9bba188d3c2 | [
"MIT"
] | permissive | masasin/decorating | 4b961e7b2201b84a1cf0553c65e4d0c0768723d5 | c19bc19b30eea751409f727b03e156123df704e1 | refs/heads/master | 2021-01-20T16:35:43.333543 | 2016-05-18T08:22:48 | 2016-05-18T08:22:48 | 59,138,136 | 0 | 0 | null | 2016-05-18T17:43:23 | 2016-05-18T17:43:23 | null | UTF-8 | Python | false | false | 2,158 | py | #!/usr/bin/env python
# coding=utf-8
#
# Python Script
#
# Copyright © Manoel Vilela
#
#
from setuptools import setup, find_packages
from codecs import open # To use a consistent encoding
from os import path
from warnings import warn
import decorating
try:
import pypandoc
except ImportError:
warn("Only-for-developers: you need pypandoc for upload "
"correct reStructuredText into PyPI home page")
here = path.abspath(path.dirname(__file__))
readme = path.join(here, 'README.md')
if 'pypandoc' in globals():
long_description = pypandoc.convert(readme, 'rst', format='markdown')
else:
# Get the long description from the relevant file
with open(readme, encoding='utf-8') as f:
long_description = f.read()
setup(
name='decorating',
version=decorating.__version__,
description="A useful collection of decorators (focused in animation)",
long_description=long_description,
classifiers=[
"Environment :: Console",
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Operating System :: Unix",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='decorating animation decorators decorator',
author=decorating.__author__,
author_email=decorating.__email__,
url=decorating.__url__,
download_url="{u}/archive/v{v}.tar.gz".format(u=decorating.__url__,
v=decorating.__version__),
zip_safe=False,
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples',
'tests', 'docs', '__pycache__']),
platforms='unix',
install_requires=[
x.strip() for x in open('requirements.txt').readlines()
],
entry_points={ # no entry-points yet
# 'console_scripts': [
# 'decorating = decorating.cli:main'
# ]
}
)
| [
"manoel_vilela@engineer.com"
] | manoel_vilela@engineer.com |
57d5f77871d2e59fdda4f2f31e1e2a4423ec1a1a | 8e24e8bba2dd476f9fe612226d24891ef81429b7 | /geeksforgeeks/algorithm/expert_algo/2_6.py | 48d002b4b59871316db762455451642b74ab27c3 | [] | no_license | qmnguyenw/python_py4e | fb56c6dc91c49149031a11ca52c9037dc80d5dcf | 84f37412bd43a3b357a17df9ff8811eba16bba6e | refs/heads/master | 2023-06-01T07:58:13.996965 | 2021-06-15T08:39:26 | 2021-06-15T08:39:26 | 349,059,725 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,332 | py | Minimum Bipartite Groups
Given Adjacency List representation of graph of **N** vertices from **1 to N**
, the task is to count the minimum bipartite groups of the given graph.
**Examples:**
> **Input:** N = 5
> Below is the given graph with number of nodes is 5:
>
>
> 
>
> **Output:** 3
> **Explanation:**
> Possible groups satisfying the Bipartite property: [2, 5], [1, 3], [4]
> Below is the number of bipartite groups can be formed:
>
>
>
>
>
>
>
>
> 
Recommended: Please try your approach on _**_{IDE}_**_ first, before moving on
to the solution.
**Approach:**
The idea is to find the maximum height of all the Connected Components in the
given graph of **N** nodes to find the minimum bipartite groups. Below are the
steps:
1. For all the non-visited vertex in the given graph, find the height of the current Connected Components starting from the current vertex.
2. Start DFS Traversal to find the height of all the Connected Components.
3. The maximum of the heights calculated for all the Connected Components gives the minimum bipartite groups required.
Below is the implementation of the above approach:
## C++
__
__
__
__
__
__
__
#include<bits/stdc++.h>
using namespace std;
// Function to find the height sizeof
// the current component with vertex s
int height(int s, vector<int> adj[],
int* visited)
{
// Visit the current Node
visited[s] = 1;
int h = 0;
// Call DFS recursively to find the
// maximum height of current CC
for (auto& child : adj[s]) {
// If the node is not visited
// then the height recursively
// for next element
if (visited[child] == 0) {
h = max(h, 1 + height(child, adj,
visited));
}
}
return h;
}
// Function to find the minimum Groups
int minimumGroups(vector<int> adj[], int N)
{
// Intialise with visited array
int visited[N + 1] = { 0 };
// To find the minimum groups
int groups = INT_MIN;
// Traverse all the non visited Node
// and calculate the height of the
// tree with current node as a head
for (int i = 1; i <= N; i++) {
// If the current is not visited
// therefore, we get another CC
if (visited[i] == 0) {
int comHeight;
comHeight = height(i, adj, visited);
groups = max(groups, comHeight);
}
}
// Return the minimum bipartite matching
return groups;
}
// Function that adds the current edges
// in the given graph
void addEdge(vector<int> adj[], int u, int v)
{
adj[u].push_back(v);
adj[v].push_back(u);
}
// Drivers Code
int main()
{
int N = 5;
// Adjacency List
vector<int> adj[N + 1];
// Adding edges to List
addEdge(adj, 1, 2);
addEdge(adj, 3, 2);
addEdge(adj, 4, 3);
cout << minimumGroups(adj, N);
}
---
__
__
## Java
__
__
__
__
__
__
__
import java.util.*;
class GFG{
// Function to find the height sizeof
// the current component with vertex s
static int height(int s, Vector<Integer> adj[],
int []visited)
{
// Visit the current Node
visited[s] = 1;
int h = 0;
// Call DFS recursively to find the
// maximum height of current CC
for (int child : adj[s]) {
// If the node is not visited
// then the height recursively
// for next element
if (visited[child] == 0) {
h = Math.max(h, 1 + height(child, adj,
visited));
}
}
return h;
}
// Function to find the minimum Groups
static int minimumGroups(Vector<Integer> adj[], int N)
{
// Intialise with visited array
int []visited= new int[N + 1];
// To find the minimum groups
int groups = Integer.MIN_VALUE;
// Traverse all the non visited Node
// and calculate the height of the
// tree with current node as a head
for (int i = 1; i <= N; i++) {
// If the current is not visited
// therefore, we get another CC
if (visited[i] == 0) {
int comHeight;
comHeight = height(i, adj, visited);
groups = Math.max(groups, comHeight);
}
}
// Return the minimum bipartite matching
return groups;
}
// Function that adds the current edges
// in the given graph
static void addEdge(Vector<Integer> adj[], int u, int v)
{
adj[u].add(v);
adj[v].add(u);
}
// Drivers Code
public static void main(String[] args)
{
int N = 5;
// Adjacency List
Vector<Integer> []adj = new Vector[N + 1];
for (int i = 0 ; i < N + 1; i++)
adj[i] = new Vector<Integer>();
// Adding edges to List
addEdge(adj, 1, 2);
addEdge(adj, 3, 2);
addEdge(adj, 4, 3);
System.out.print(minimumGroups(adj, N));
}
}
// This code is contributed by 29AjayKumar
---
__
__
## Python3
__
__
__
__
__
__
__
import sys
# Function to find the height sizeof
# the current component with vertex s
def height(s, adj, visited):
# Visit the current Node
visited[s] = 1
h = 0
# Call DFS recursively to find the
# maximum height of current CC
for child in adj[s]:
# If the node is not visited
# then the height recursively
# for next element
if (visited[child] == 0):
h = max(h, 1 + height(child, adj,
visited))
return h
# Function to find the minimum Groups
def minimumGroups(adj, N):
# Intialise with visited array
visited = [0 for i in range(N + 1)]
# To find the minimum groups
groups = -sys.maxsize
# Traverse all the non visited Node
# and calculate the height of the
# tree with current node as a head
for i in range(1, N + 1):
# If the current is not visited
# therefore, we get another CC
if (visited[i] == 0):
comHeight = height(i, adj, visited)
groups = max(groups, comHeight)
# Return the minimum bipartite matching
return groups
# Function that adds the current edges
# in the given graph
def addEdge(adj, u, v):
adj[u].append(v)
adj[v].append(u)
# Driver code
if __name__=="__main__":
N = 5
# Adjacency List
adj = [[] for i in range(N + 1)]
# Adding edges to List
addEdge(adj, 1, 2)
addEdge(adj, 3, 2)
addEdge(adj, 4, 3)
print(minimumGroups(adj, N))
# This code is contributed by rutvik_56
---
__
__
## C#
__
__
__
__
__
__
__
using System;
using System.Collections.Generic;
class GFG{
// Function to find the height sizeof
// the current component with vertex s
static int height(int s, List<int> []adj,
int []visited)
{
// Visit the current Node
visited[s] = 1;
int h = 0;
// Call DFS recursively to find the
// maximum height of current CC
foreach (int child in adj[s]) {
// If the node is not visited
// then the height recursively
// for next element
if (visited[child] == 0) {
h = Math.Max(h, 1 + height(child, adj,
visited));
}
}
return h;
}
// Function to find the minimum Groups
static int minimumGroups(List<int> []adj, int N)
{
// Intialise with visited array
int []visited= new int[N + 1];
// To find the minimum groups
int groups = int.MinValue;
// Traverse all the non visited Node
// and calculate the height of the
// tree with current node as a head
for (int i = 1; i <= N; i++) {
// If the current is not visited
// therefore, we get another CC
if (visited[i] == 0) {
int comHeight;
comHeight = height(i, adj, visited);
groups = Math.Max(groups, comHeight);
}
}
// Return the minimum bipartite matching
return groups;
}
// Function that adds the current edges
// in the given graph
static void addEdge(List<int> []adj, int u, int v)
{
adj[u].Add(v);
adj[v].Add(u);
}
// Drivers Code
public static void Main(String[] args)
{
int N = 5;
// Adjacency List
List<int> []adj = new List<int>[N + 1];
for (int i = 0 ; i < N + 1; i++)
adj[i] = new List<int>();
// Adding edges to List
addEdge(adj, 1, 2);
addEdge(adj, 3, 2);
addEdge(adj, 4, 3);
Console.Write(minimumGroups(adj, N));
}
}
// This code is contributed by Rajput-Ji
---
__
__
**Output:**
3
**Time Complexity:** O(V+E), where V is the number of vertices and E is the
set of edges.
Attention reader! Don’t stop learning now. Get hold of all the important DSA
concepts with the **DSA Self Paced Course** at a student-friendly price and
become industry ready. To complete your preparation from learning a language
to DS Algo and many more, please refer **Complete Interview Preparation
Course** **.**
My Personal Notes _arrow_drop_up_
Save
| [
"qmnguyenw@gmail.com"
] | qmnguyenw@gmail.com |
411ad6583c126a7921c2c44b6ea8f5cd407c757b | 2b106f5b8fca4ad998de1e2e7bc4d9f4b37a52cf | /models/payment.py | 4e21385b68306fd0db84887b22ac87e411432348 | [] | no_license | vimit/unknown_test | 8ee89e679edbc85bf999f05a03c2d286043b9dd1 | 519fb4e7df1f42ca3d6f759651dd5cff8380b2e6 | refs/heads/master | 2020-04-03T15:48:15.720976 | 2018-10-30T12:02:56 | 2018-10-30T12:02:56 | 155,378,145 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,595 | py | # coding: utf-8
import logging
import json
import pprint
import stripe
from odoo.tools import float_compare
from odoo import api, fields, models, _
from odoo.addons.payment.models.payment_acquirer import ValidationError
from odoo.tools.float_utils import float_round
_logger = logging.getLogger(__name__)
# Force the API version to avoid breaking in case of update on Stripe side
# cf https://stripe.com/docs/api#versioning
# changelog https://stripe.com/docs/upgrades#api-changelog
STRIPE_HEADERS = {'Stripe-Version': '2016-03-07'}
# The following currencies are integer only, see https://stripe.com/docs/currencies#zero-decimal
INT_CURRENCIES = [
u'BIF', u'XAF', u'XPF', u'CLP', u'KMF', u'DJF', u'GNF', u'JPY', u'MGA', u'PYG', u'RWF', u'KRW',
u'VUV', u'VND', u'XOF'
]
class PaymentAcquirerStripe(models.Model):
_inherit = 'payment.acquirer'
provider = fields.Selection(selection_add=[('stripe', 'Stripe')])
stripe_secret_key = fields.Char(required_if_provider='stripe', groups='base.group_user')
stripe_publishable_key = fields.Char(required_if_provider='stripe', groups='base.group_user')
stripe_image_url = fields.Char(
"Checkout Image URL", groups='base.group_user',
help="A relative or absolute URL pointing to a square image of your "
"brand or product. As defined in your Stripe profile. See: "
"https://stripe.com/docs/checkout")
@api.multi
def stripe_form_generate_values(self, tx_values):
self.ensure_one()
stripe_tx_values = dict(tx_values)
temp_stripe_tx_values = {
'company': self.company_id.name,
'amount': tx_values['amount'], # Mandatory
'currency': tx_values['currency'].name, # Mandatory anyway
'currency_id': tx_values['currency'].id, # same here
'address_line1': tx_values.get('partner_address'), # Any info of the partner is not mandatory
'address_city': tx_values.get('partner_city'),
'address_country': tx_values.get('partner_country') and tx_values.get('partner_country').name or '',
'email': tx_values.get('partner_email'),
'address_zip': tx_values.get('partner_zip'),
'name': tx_values.get('partner_name'),
'phone': tx_values.get('partner_phone'),
}
temp_stripe_tx_values['returndata'] = stripe_tx_values.pop('return_url', '')
stripe_tx_values.update(temp_stripe_tx_values)
return stripe_tx_values
@api.model
def _get_stripe_api_url(self):
return 'api.stripe.com/v1'
@api.model
def stripe_s2s_form_process(self, data):
payment_token = self.env['payment.token'].sudo().create({
'iban': data['iban'],
'acquirer_id': int(data['acquirer_id']),
'partner_id': int(data['partner_id'])
})
return payment_token
@api.multi
def stripe_s2s_form_validate(self, data):
self.ensure_one()
# mandatory fields
for field_name in ["iban", ]:
if not data.get(field_name):
return False
return True
def _get_feature_support(self):
"""Get advanced feature support by provider.
Each provider should add its technical in the corresponding
key for the following features:
* fees: support payment fees computations
* authorize: support authorizing payment (separates
authorization and capture)
* tokenize: support saving payment data in a payment.tokenize
object
"""
res = super(PaymentAcquirerStripe, self)._get_feature_support()
res['tokenize'].append('stripe')
return res
class PaymentTransactionStripe(models.Model):
_inherit = 'payment.transaction'
def _create_stripe_charge(self, acquirer_ref=None, tokenid=None, email=None):
payment_acquirer = self.env['payment.acquirer'].browse(self.acquirer_id.id)
stripe.api_key = payment_acquirer.stripe_publishable_key
charge = stripe.Charge.create(
amount=int(self.amount if self.currency_id.name in INT_CURRENCIES else float_round(self.amount * 100, 2)),
currency='eur',
customer=acquirer_ref,
source=str(tokenid),
)
return charge
@api.multi
def stripe_s2s_do_transaction(self, **kwargs):
self.ensure_one()
result = self._create_stripe_charge(acquirer_ref=self.payment_token_id.acquirer_ref, tokenid=self.payment_token_id.name, email=self.partner_email)
return self._stripe_s2s_validate_tree(result)
# def _create_stripe_refund(self):
# api_url_refund = 'https://%s/refunds' % (self.acquirer_id._get_stripe_api_url())
#
# refund_params = {
# 'charge': self.acquirer_reference,
# 'amount': int(float_round(self.amount * 100, 2)), # by default, stripe refund the full amount (we don't really need to specify the value)
# 'metadata[reference]': self.reference,
# }
#
# r = requests.post(api_url_refund,
# auth=(self.acquirer_id.stripe_secret_key, ''),
# params=refund_params,
# headers=STRIPE_HEADERS)
# return r.json()
# @api.multi
# def stripe_s2s_do_refund(self, **kwargs):
# self.ensure_one()
# self.state = 'refunding'
# result = self._create_stripe_refund()
# return self._stripe_s2s_validate_tree(result)
@api.model
def _stripe_form_get_tx_from_data(self, data):
""" Given a data dict coming from stripe, verify it and find the related
transaction record. """
reference = data.get('metadata', {}).get('reference')
if not reference:
stripe_error = data.get('error', {}).get('message', '')
_logger.error('Stripe: invalid reply received from stripe API, looks like '
'the transaction failed. (error: %s)', stripe_error or 'n/a')
error_msg = _("We're sorry to report that the transaction has failed.")
if stripe_error:
error_msg += " " + (_("Stripe gave us the following info about the problem: '%s'") %
stripe_error)
error_msg += " " + _("Perhaps the problem can be solved by double-checking your "
"credit card details, or contacting your bank?")
raise ValidationError(error_msg)
tx = self.search([('reference', '=', reference)])
if not tx:
error_msg = (_('Stripe: no order found for reference %s') % reference)
_logger.error(error_msg)
raise ValidationError(error_msg)
elif len(tx) > 1:
error_msg = (_('Stripe: %s orders found for reference %s') % (len(tx), reference))
_logger.error(error_msg)
raise ValidationError(error_msg)
return tx[0]
@api.multi
def _stripe_s2s_validate_tree(self, tree):
self.ensure_one()
if self.state not in ('draft', 'pending', 'refunding'):
_logger.info('Stripe: trying to validate an already validated tx (ref %s)', self.reference)
return True
status = tree.get('status')
if status == 'pending':
new_state = 'pending'
self.write({
'state': new_state,
'date_validate': fields.datetime.now(),
'acquirer_reference': tree.get('id'),
})
self.execute_callback()
if self.payment_token_id:
self.payment_token_id.verified = True
_logger.warning('Waiting For Confirmation')
return True
else:
error = tree['error']['message']
_logger.warn(error)
self.sudo().write({
'state': 'error',
'state_message': error,
'acquirer_reference': tree.get('id'),
'date_validate': fields.datetime.now(),
})
return False
@api.multi
def _stripe_form_get_invalid_parameters(self, data):
invalid_parameters = []
reference = data['metadata']['reference']
if reference != self.reference:
invalid_parameters.append(('Reference', reference, self.reference))
return invalid_parameters
@api.multi
def _stripe_form_validate(self, data):
return self._stripe_s2s_validate_tree(data)
def confirm_invoice_token(self):
""" Confirm a transaction token and call SO confirmation if it is a success.
:return: True if success; error string otherwise """
self.ensure_one()
if self.payment_token_id and self.partner_id == self.account_invoice_id.partner_id:
try:
s2s_result = self.stripe_s2s_do_transaction()
except Exception as e:
_logger.warning(
_("<%s> transaction (%s) failed : <%s>") %
(self.acquirer_id.provider, self.id, str(e)))
return 'pay_invoice_tx_fail'
valid_state = 'pending'
if not s2s_result or self.state != valid_state:
print('problem is here 1')
_logger.warning(
_("<%s> transaction (%s) invalid state : %s") %
(self.acquirer_id.provider, self.id, self.state_message))
return 'pay_invoice_tx_state'
try:
# Auto-confirm SO if necessary
return self._confirm_invoice()
except Exception as e:
print('problem is here 2')
_logger.warning(
_("<%s> transaction (%s) invoice confirmation failed : <%s>") %
(self.acquirer_id.provider, self.id, str(e)))
return 'pay_invoice_tx_confirm'
return 'pay_invoice_tx_token'
def _confirm_invoice(self):
""" Check tx state, confirm and pay potential invoice """
self.ensure_one()
# check tx state, confirm the potential SO
if self.account_invoice_id.state != 'open':
_logger.warning('<%s> transaction STATE INCORRECT for invoice %s (ID %s, state %s)', self.acquirer_id.provider, self.account_invoice_id.number, self.account_invoice_id.id, self.account_invoice_id.state)
return 'pay_invoice_invalid_doc_state'
if not float_compare(self.amount, self.account_invoice_id.amount_total, 2) == 0:
_logger.warning(
'<%s> transaction AMOUNT MISMATCH for invoice %s (ID %s): expected %r, got %r',
self.acquirer_id.provider, self.account_invoice_id.number, self.account_invoice_id.id,
self.account_invoice_id.amount_total, self.amount,
)
self.account_invoice_id.message_post(
subject=_("Amount Mismatch (%s)") % self.acquirer_id.provider,
body=_("The invoice was not confirmed despite response from the acquirer (%s): invoice amount is %r but acquirer replied with %r.") % (
self.acquirer_id.provider,
self.account_invoice_id.amount_total,
self.amount,
)
)
return 'pay_invoice_tx_amount'
if self.state == 'authorized' and self.acquirer_id.capture_manually:
_logger.info('<%s> transaction authorized, nothing to do with invoice %s (ID %s)', self.acquirer_id.provider, self.account_invoice_id.number, self.account_invoice_id.id)
elif self.state == 'pending':
_logger.info('<%s> transaction pending, paying invoice %s (ID %s) in few days,', self.acquirer_id.provider, self.account_invoice_id.number, self.account_invoice_id.id)
else:
_logger.warning('<%s> transaction MISMATCH for invoice %s (ID %s)', self.acquirer_id.provider, self.account_invoice_id.number, self.account_invoice_id.id)
return 'pay_invoice_tx_state'
return True
@api.model
def transaction_status(self):
self.transaction_status_event_listener()
def transaction_status_event_listener(self):
payment_acquirer = self.env['payment.acquirer'].browse(self.acquirer_id.id)
stripe.api_key = payment_acquirer.stripe_publishable_key
events = stripe.Event.list()
for event in events:
if 'charge' in event.get('type'):
charge = event.get('data')['object']
if charge.get('id') == self.acquirer_reference:
if event.get('type') == 'charge.succeeded':
self._pay_invoice()
print('success')
if event.get('type') == 'charge.failed':
# send mail to user
print('failed')
if event.get('type') == 'charge.expired':
# send mail to user
print('expired')
if event.get('type') == 'charge.pending':
print('pending')
class PaymentTokenStripe(models.Model):
_inherit = 'payment.token'
@api.model
def stripe_create(self, values):
token = values.get('stripe_token')
payment_acquirer = self.env['payment.acquirer'].browse(values.get('acquirer_id'))
stripe.api_key = payment_acquirer.stripe_publishable_key
partner = self.env['res.partner'].browse(values['partner_id'])
# when asking to create a token on Stripe servers
if values.get('iban'):
if partner.email:
source = stripe.Source.create(
type='sepa_debit',
sepa_debit={'iban': values['iban']},
currency='eur',
owner={
'name': partner.name,
'email': partner.email,
},
mandate={
'notification_method': 'email',
}
)
else:
source = stripe.Source.create(
type='sepa_debit',
sepa_debit={'iban': values['iban']},
currency='eur',
owner={
'name': partner.name,
}
)
token = source
description = values['iban']
else:
partner_id = self.env['res.partner'].browse(values['partner_id'])
description = 'Partner: %s (id: %s)' % (partner_id.name, partner_id.id)
if not token:
raise Exception('stripe_create: No token provided!')
res = self._stripe_create_customer(token, description, payment_acquirer.id)
# pop credit card info to info sent to create
for field_name in ["iban",]:
values.pop(field_name, None)
return res
def _stripe_create_customer(self, token, description=None, acquirer_id=None):
if token.get('error'):
_logger.error('payment.token.stripe_create_customer: Token error:\n%s', pprint.pformat(token['error']))
raise Exception(token['error']['message'])
if token['object'] != 'source':
_logger.error('payment.token.stripe_create_customer: Cannot create a customer for object type "%s"', token.get('object'))
raise Exception('We are unable to process your credit card information.')
if token['type'] != 'sepa_debit':
_logger.error('payment.token.stripe_create_customer: Cannot create a customer for token type "%s"', token.get('type'))
raise Exception('We are unable to process your credit card information.')
payment_acquirer = self.env['payment.acquirer'].browse(acquirer_id or self.acquirer_id.id)
stripe.api_key = payment_acquirer.stripe_publishable_key
customer = stripe.Customer.create(
email=token.get('owner')['email'],
source=token['id'],
)
if customer.get('error'):
_logger.error('payment.token.stripe_create_customer: Customer error:\n%s', pprint.pformat(customer['error']))
raise Exception(customer['error']['message'])
values = {
'acquirer_ref': customer['id'],
'name': token['id'],
'short_name': 'XXXXXXXXXXXX%s ' % (token['sepa_debit']['last4'],)
}
return values
| [
"imen1aissaoui@gmail.com"
] | imen1aissaoui@gmail.com |
bbabbdcef1783b6b7165e01d98fb992458d41adc | 8667486b32cba50c348c3e8985191753776e16a2 | /Lesson-02 Assignment/subway_search.py | 9e587d112e6c7d8e0a8cb389febb94c79d6a8059 | [] | no_license | feiYang2008/NLP_Assignment | 93c39c0dd698e0bcac49f647d91150f949d4e1af | 6a0db31ec171e28bc56847b6904506933178363f | refs/heads/master | 2021-01-06T22:37:57.325409 | 2019-08-13T10:57:26 | 2019-08-13T10:57:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,126 | py | #! /usr/bin/python
# -*- coding: utf-8 -*-
import os
from collections.abc import Iterable
from collections import defaultdict
import collections
import math
import requests
from urllib import parse
import networkx as nx
from bs4 import *
import re
import matplotlib as mpl
mpl.use('TkAgg')
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签
plt.rcParams['axes.unicode_minus']=False #用来正常显示负号
def flatten(lst): # 嵌套list拉平
for item in lst:
if isinstance(item, collections.Iterable) and not isinstance(item, (str, bytes)):
yield from flatten(item)
else:
yield item
class crawl_subway_geo:
def __init__(self):
self.key = "e86595afe8494e43ac6252b6330636d0"
def get_url_by_gaode(self,address):
url = "https://restapi.amap.com/v3/geocode/geo"
params = {"key": self.key,
"address": address,
"city": "北京"}
return url, params
def get_location_by_gaode(self,address):
url, params = self.get_url_by_gaode(address)
response = requests.get(url, params).json()
if response["status"] == "1" and len(response["geocodes"]) != 0:
for item in response['geocodes']:
location = item["location"].split(",")
longitude = location[0]
latitude = location[1]
return longitude, latitude
else:
return "", ""
def main(self,line_stations):
with open("beijing_subway_geo.txt", "w", encoding="utf-8") as beijing_subway_geo_file:
stations = list(flatten(list(line_stations.values())))
for name in stations:
long,lati = self.get_location_by_gaode(name)
if name and long and lati:
beijing_subway_geo_file.write("{}:{},{}\n".format(name, long,lati))
class crawl_subway_line:
def __init__(self):
user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)" \
" Chrome/75.0.3770.142 Safari/537.36"
self.headers = {'user-agent': user_agent}
def get_request_soup(self,url):
page = requests.get(url, headers=self.headers, allow_redirects=False)
soup = BeautifulSoup(page.content, 'html.parser', from_encoding='utf-8')
return soup
def get_line_url(self):
url_init = "https://baike.baidu.com/item/%E5%8C%97%E4%BA%AC%E5%9C%B0%E9%93%81/408485"
init_soup = self.get_request_soup(url_init)
line_url = dict()
for line in init_soup.find_all("table")[2].find_all("a")[:-1]:
line_url[line.get_text().strip()] = "https://baike.baidu.com{}".format(line.get("href").strip())
return line_url
def get_line_station(self):
station_re = re.compile(r"车站列表|车站信息|车站名称")
station_name_re = re.compile(r"站")
station_info_dict = defaultdict(list)
lines_url = self.get_line_url()
for line_name,url in lines_url.items():
line_soup = self.get_request_soup(url)
if line_name == "北京地铁16号线":
for i, l in enumerate(line_soup.find_all("table")):
th_str = " ".join([item.get_text() for item in l.find_all("th")])
if th_str and station_re.findall(th_str):
a_slice = l.find_all("a")
for s in a_slice:
s_str = s.get_text()
if station_name_re.findall(s_str):
station_info_dict[line_name].append(s_str)
break
else:
for i, l in enumerate(line_soup.find_all("table")):
if l.caption and station_re.findall(l.caption.get_text()):
for s in l.find_all("a"):
s_str = s.get_text()
if station_name_re.findall(s_str):
station_info_dict[line_name].append(s_str)
break
elif l.b and station_re.findall(l.b.get_text()):
a_slice = l.find_all("a")
for s in l.find_all("a"):
s_str = s.get_text()
if station_name_re.findall(s_str):
station_info_dict[line_name].append(s_str)
break
return station_info_dict
def clean_station(self):
"""清洗站名,把(出站)等非地点站名清洗掉"""
get_re = re.compile(r"(?P<station>[A-Za-z0-9\u4e00-\u9fa5]+).*")
del_re = re.compile(r"站台")
line_station = self.get_line_station()
line_station_clean = defaultdict(list)
for name,station_list in line_station.items():
station_clean = []
station_clean_temp = list(map(lambda x:get_re.match(x).group("station"),station_list))
assert len(station_list) == len(station_clean_temp)
for s in station_clean_temp:
if not del_re.findall(s):
if s[-1] == "站":
s_new = s[:-1] + "地铁站"
else:
s_new = s + "地铁站"
line_station_clean[name].append(s_new)
return line_station_clean
def main(self):
with open("beijing_subway_line.txt", "w",encoding="utf-8") as beijing_subway_line_file:
beijing_subway_line = self.clean_station()
for title, line in beijing_subway_line.items():
beijing_subway_line_file.write("{}:{}\n".format(title, ",".join(line)))
return beijing_subway_line
class search_subway_line:
def __init__(self):
path_subway_line = "beijing_subway_line_true.txt"
path_subway_geo = "beijing_subway_geo_true.txt"
self.subway_line = self.load_line_data(path_subway_line)
self.subway_geo = self.load_geo_data(path_subway_geo)
def load_line_data(self,path):
subway_line = dict()
with open(path, "r", encoding="utf-8") as beijing_subway_line_fin:
for line in beijing_subway_line_fin.readlines():
name, stations = line.strip().split(":")
if name.strip() and stations.strip():
subway_line[name.strip()] = [item.strip() for item in stations.strip().split(",")]
return subway_line
def load_geo_data(self,path):
subway_geo = dict()
with open(path, "r", encoding="utf-8") as beijing_subway_geo_fin:
for line in beijing_subway_geo_fin.readlines():
station, location = line.strip().split(":")
if station.strip() and location.strip():
subway_geo[station.strip()] = list(map(lambda x: float(x), location.strip().split(",")))
return subway_geo
def get_station_connection(self):
station_connection = defaultdict(list)
for line_name, line_stat in self.subway_line.items():
for idx, stat in enumerate(line_stat):
if idx == 0:
station_connection[stat].append(line_stat[idx + 1])
elif idx == len(line_stat) - 1:
station_connection[stat].append(line_stat[idx - 1])
else:
station_connection[stat].extend([line_stat[idx - 1], line_stat[idx + 1]])
for stat, stat_adja in station_connection.items():
station_connection[stat] = list(set(stat_adja))
return station_connection
def plt_subway_lines(self):
station_connection = self.get_station_connection()
# nx.draw(city_with_road, city_location, with_labels=True, node_size=30)
nx.draw(nx.Graph(station_connection), self.subway_geo, with_labels=False, node_size=3,
node_color='red', edge_color='blue', font_size=9)
plt.show()
def geo_distance(self,origin, destination):
lat1, lon1 = origin
lat2, lon2 = destination
radius = 6371 # km
dlat = math.radians(lat2 - lat1)
dlon = math.radians(lon2 - lon1)
a = (math.sin(dlat / 2) * math.sin(dlat / 2) +
math.cos(math.radians(lat1)) * math.cos(math.radians(lat2)) *
math.sin(dlon / 2) * math.sin(dlon / 2))
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
d = radius * c
return d
def get_geo_distance(self,city1, city2):
return self.geo_distance(self.subway_geo[city1], self.subway_geo[city2])
def get_line_distance(self,station_line):
d = 0
for idx,item in enumerate(station_line[:-1]):
d += self.get_geo_distance(station_line[idx],station_line[idx+1])
return d
def search(self,start,destination,sort_candidate):
station_connection = self.get_station_connection()
pathes = [[start]]
visited = set()
while pathes:
path = pathes.pop()
frontier = path[-1]
if frontier in visited: continue
successors = station_connection[frontier]
for city in successors:
if city in visited: continue
new_path = path + [city]
pathes = [new_path] + pathes
# pathes.append(new_path)
if city == destination :
return new_path
visited.add(frontier)
if sort_candidate == "minimum_transfer_priority":
pathes = self.minimum_transfer_priority(pathes)
elif sort_candidate == "shortest_path_priority":
pathes = self.shortest_path_priority(pathes)
def shortest_path_priority(self,pathes):
return sorted(pathes, key=self.get_line_distance, reverse=True)
def minimum_transfer_priority(self,pathes):
return sorted(pathes,key =len,reverse=True)
def search_by_way(self,start,destination,by_way,sort_candidate):
station_connection = self.get_station_connection()
pathes = [[start]]
visited = set()
while pathes:
path = pathes.pop()
frontier = path[-1]
if frontier in visited: continue
successors = station_connection[frontier]
for city in successors:
if city in visited: continue
new_path = path + [city]
# pathes = [new_path] + pathes
pathes.append(new_path)
flag = sum([1 if item in new_path else 0 for item in by_way ])
# print(flag)
if city == destination and flag == len(by_way):
return new_path
visited.add(frontier)
if sort_candidate == "minimum_transfer_priority":
pathes = self.minimum_transfer_priority(pathes)
elif sort_candidate == "shortest_path_priority":
pathes = self.shortest_path_priority(pathes)
if __name__ == '__main__':
""" 作业:Finish the search problem"""
#爬取地铁线路
# subway_line = crawl_subway_line()
# line_stations = subway_line.main()
#爬取站点坐标
# subway_geo = crawl_subway_geo()
# subway_geo.main(line_stations)
# 搜索策略
search_line = search_subway_line()
# search_line.plt_subway_lines() #画地铁线路图
# serch_line = search_line.search("奥体中心","天安门东","shortest_path_priority")
search_line_by_way = search_line.search_by_way("奥体中心","天安门东",["安华桥","东四"],"shortest_path_priority")
print(search_line_by_way) #绕远路线做不到过路
#获取
pass | [
"noreply@github.com"
] | noreply@github.com |
3b0d6a8455a25f85ab87e64585230366a5e647bc | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5744014401732608_0/Python/veluca/sol.py | bd10179fcb95ce05a54755b6ee878bca104f9dda | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 581 | py | #!/usr/bin/env pypy3
import sys
def solve():
B, M = map(int, input().split())
if M > 2**(B-2):
return "IMPOSSIBLE"
sol = [['0' for i in range(B)] for i in range(B)]
for i in range(B-1):
for j in range(0, i):
sol[j][i] = '1'
if M == 2**(B-2):
sol[0][B-1] = '1'
M -= 1
for i in range(B-2):
if M & (2**i):
sol[1+i][B-1] = '1'
return "POSSIBLE\n" + "\n".join("".join(sol[i]) for i in range(B))
T = int(input())
for l in range(1, T+1):
print("Case #%d:" % l, end=" ")
print(solve())
| [
"alexandra1.back@gmail.com"
] | alexandra1.back@gmail.com |
2b4c4bd221c3436ef813abb5f350b2c980230d2a | 626abe2cccd31c1560234be9eb142f3c710c9980 | /server/taskbot/views.py | 1bd308887288678131e189326f598b1a06abe700 | [] | no_license | arkross/lingtelli-console3 | 7a8fd74052030cffd8ce694c9dab14b28f2ebc6c | 37f30fc5d8675d4d0878934b3b5c78317f759740 | refs/heads/master | 2022-12-10T11:22:40.994730 | 2019-06-19T04:46:34 | 2019-06-19T04:46:34 | 192,843,924 | 0 | 0 | null | 2022-12-03T14:26:40 | 2019-06-20T03:43:26 | JavaScript | UTF-8 | Python | false | false | 7,341 | py | import json
from django.shortcuts import render
from django.utils.translation import gettext as _
from django.contrib.auth.models import User
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.response import Response
from rest_framework.decorators import action
from rest_framework.status import (
HTTP_200_OK,
HTTP_201_CREATED,
HTTP_204_NO_CONTENT,
HTTP_400_BAD_REQUEST,
HTTP_403_FORBIDDEN,
HTTP_404_NOT_FOUND
)
from chat_console_3 import utils, nlumodel
from chatbot.serilalizers import ChatbotSerializer
from chatbot.models import Chatbot, Line, Facebook, BotThirdPartyGroup
from account.models import AccountInfo
class TaskbotViewset(viewsets.ModelViewSet):
'''Taskbot
Only agent can create taskbot and asign to users
'''
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated, IsAdminUser,)
queryset = Chatbot.objects.filter(bot_type='TASK')
serializer_class = ChatbotSerializer
def get_queryset(self):
user_obj = self.request.user
return self.queryset.filter(user=user_obj)
def create(self, request):
if request.body:
user_obj = request.user
acc_obj = AccountInfo.objects.filter(user=user_obj).first()
bot_data = json.loads(request.body)
bot_keys = ['robot_name', 'greeting_msg', 'failed_msg',
'postback_title', 'language']
err_msg, key_status = utils.key_validator(bot_keys, bot_data)
if not key_status:
return Response({'errors':_(err_msg)},
status=HTTP_403_FORBIDDEN)
bot_data['user_id'] = user_obj.id
bot_data['bot_type'] = 'TASK'
bot_obj = Chatbot.objects.create(**bot_data)
if bot_obj:
bot_obj.vendor_id = utils.generate_uuid(str(bot_obj.id),
bot_obj.robot_name)
paid_type = acc_obj.paid_type
for party in paid_type.third_party.all():
BotThirdPartyGroup.objects.create(chatbot=bot_obj,
third_party=party)
bot_obj.save()
Line.objects.create(chatbot=bot_obj)
Facebook.objects.create(chatbot=bot_obj)
nlumodel.initial_question_answer(bot_obj)
# TODO: Remove this when NLU is working fine
nlu_create_status, err_msg = nlumodel.create_model(bot_obj)
# create_bot_obj = bot_obj
create_bot_obj = \
utils.delete_create_failed_model(nlu_create_status,
bot_obj)
if not create_bot_obj:
return Response({'errors':_('Create bot failed. '+\
'Cause by NLU error.' + err_msg)},
status=HTTP_400_BAD_REQUEST)
res = {}
res['id'] = create_bot_obj.id
res['robot_name'] = create_bot_obj.robot_name
return Response(res, status=HTTP_201_CREATED)
return Response({'errors':_('Create bot failed')},
status=HTTP_400_BAD_REQUEST)
return Response({'errors':_('No content')},
status=HTTP_400_BAD_REQUEST)
def update(self, request, pk=None):
if request.body:
user_obj = request.user
bot_obj = \
Chatbot.objects.filter(id=pk, user=user_obj).first()
if not bot_obj:
return Response({'errors':_('Not found')},
status=HTTP_404_NOT_FOUND)
update_data = json.loads(request.body)
valid_update_key = ['robot_name', 'greeting_msg', 'failed_msg',
'postback_title', 'postback_activate',
'assign_user', 'activate']
not_null = 'robot_name'
if not update_data.get(not_null):
return \
Response({'errors':_('Key missing or empty: robot_name')},
status=HTTP_403_FORBIDDEN)
for k in valid_update_key:
if k == 'assign_user':
mem_obj = \
User.objects.filter(id=update_data.get(k)).first()
bot_obj.assign_user = mem_obj
continue
if update_data.get(k, None):
setattr(bot_obj, k, update_data.get(k))
bot_obj.save()
return Response({'success':_('Update succeeded')},
status=HTTP_200_OK)
return Response({'errors':_('No content')},
status=HTTP_400_BAD_REQUEST)
def destroy(self, request, pk=None):
'''Delete taskbot
Need to check if delete_confirm has become True first
'''
user_obj = request.user
bot_obj = Chatbot.objects.filter(id=pk, user=user_obj).first()
if not bot_obj:
return Response({'errors':_('Not found')},
status=HTTP_404_NOT_FOUND)
if not bot_obj.delete_confirm:
return Response({'errors':_('Please confirm the deletion first')},
status=HTTP_403_FORBIDDEN)
nlumodel.delete_model(bot_obj)
bot_obj.delete()
check_bot_delete = Chatbot.objects.filter(id=pk, user=user_obj).first()
if check_bot_delete:
check_bot_delete.delete_confirm = False
check_bot_delete.save()
return Response({'errors':_('Deleting bot failed')},
status=HTTP_400_BAD_REQUEST)
return Response(status=HTTP_204_NO_CONTENT)
@action(methods=['put'], detail=True,
permission_classes=[IsAuthenticated, IsAdminUser])
def delete_confirm(self, request, pk=None):
'''Chatbot delete confirmation
Request format example:
PUT:
{
"password": "thisisyourpassword"
}
'''
if request.body:
user_obj = request.user
bot_obj = Chatbot.objects.filter(id=pk, user=user_obj).first()
if not bot_obj:
return Response({'errors':_('Not found')},
status=HTTP_404_NOT_FOUND)
request_data = json.loads(request.body)
if not request_data.get('password'):
return Response({'errors':_('Please enter the password')},
status=HTTP_400_BAD_REQUEST)
if user_obj.check_password(request_data.get('password')):
bot_obj.delete_confirm = True
bot_obj.save()
return Response({'success':_('Delete confirmed')},
status=HTTP_200_OK)
return Response({'errors':_('Password is not correct')},
status=HTTP_403_FORBIDDEN)
return Response({'errors':_('No content')},
status=HTTP_400_BAD_REQUEST)
| [
"cosmo.hu@lingtelli.com"
] | cosmo.hu@lingtelli.com |
48e1529066f8dbe0f7cb66b9ea9b63486d1af790 | 1350e9518c6f86f38b926482524f4d841cbff9cb | /src/train.py | 0a760468cb83c24af424875a2792494e7610849f | [
"MIT"
] | permissive | popcornell/SLOCount | 1f6d66a457c8aad25c857350a9b95821ba889076 | 62d52c9141ba8def92408d54ba6a644b9df7f910 | refs/heads/master | 2020-09-12T13:38:56.819755 | 2019-11-18T12:23:22 | 2019-11-18T12:23:22 | 216,560,127 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,651 | py | import os
import math
import numpy as np
import torch
import traceback
from .assets import Binarymetrics
from .eval import validate
from .architecture import get_SLOCountNet
from torch.nn.parallel.data_parallel import DataParallel
from tqdm import tqdm
def train(args, pt_dir, chkpt_path, trainloader, devloader, writer, logger, hp, hp_str):
model = get_SLOCountNet(hp).cuda()
print("FOV: {}", model.get_fov(hp.features.n_fft))
model_parameters = filter(lambda p: p.requires_grad, model.parameters())
params = sum([np.prod(p.size()) for p in model_parameters])
print("N_parameters : {}".format(params))
model = DataParallel(model)
if hp.train.optimizer == 'adam':
optimizer = torch.optim.Adam(model.parameters(),
lr=hp.train.adam)
else:
raise Exception("%s optimizer not supported" % hp.train.optimizer)
epoch = 0
best_loss = np.inf
if chkpt_path is not None:
logger.info("Resuming from checkpoint: %s" % chkpt_path)
checkpoint = torch.load(chkpt_path)
model.load_state_dict(checkpoint['model'])
optimizer.load_state_dict(checkpoint['optimizer'])
epoch = checkpoint['step']
# will use new given hparams.
if hp_str != checkpoint['hp_str']:
logger.warning("New hparams is different from checkpoint.")
else:
logger.info("Starting new training run")
try:
for epoch in range(epoch, hp.train.n_epochs):
vad_scores = Binarymetrics.BinaryMeter() # activity scores
vod_scores = Binarymetrics.BinaryMeter() # overlap scores
count_scores = Binarymetrics.MultiMeter() # Countnet scores
model.train()
tot_loss = 0
with tqdm(trainloader) as t:
t.set_description("Epoch: {}".format(epoch))
for count, batch in enumerate(trainloader):
features, labels = batch
features = features.cuda()
labels = labels.cuda()
preds = model(features)
loss = criterion(preds, labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
# compute proper metrics for VAD
loss = loss.item()
if loss > 1e8 or math.isnan(loss): # check if exploded
logger.error("Loss exploded to %.02f at step %d!" % (loss, epoch))
raise Exception("Loss exploded")
VADpreds = torch.sum(torch.exp(preds[:, 1:5, :]), dim=1).unsqueeze(1)
VADlabels = torch.sum(labels[:, 1:5, :], dim=1).unsqueeze(1)
vad_scores.update(VADpreds, VADlabels)
VODpreds = torch.sum(torch.exp(preds[:, 2:5, :]), dim=1).unsqueeze(1)
VODlabels = torch.sum(labels[:, 2:5, :], dim=1).unsqueeze(1)
vod_scores.update(VODpreds, VODlabels)
count_scores.update(torch.argmax(torch.exp(preds), 1).unsqueeze(1),
torch.argmax(labels, 1).unsqueeze(1))
tot_loss += loss
vad_fa = vad_scores.get_fa().item()
vad_miss = vad_scores.get_miss().item()
vad_precision = vad_scores.get_precision().item()
vad_recall = vad_scores.get_recall().item()
vad_matt = vad_scores.get_matt().item()
vad_f1 = vad_scores.get_f1().item()
vad_tp = vad_scores.tp.item()
vad_tn = vad_scores.tn.item()
vad_fp = vad_scores.fp.item()
vad_fn = vad_scores.fn.item()
vod_fa = vod_scores.get_fa().item()
vod_miss = vod_scores.get_miss().item()
vod_precision = vod_scores.get_precision().item()
vod_recall = vod_scores.get_recall().item()
vod_matt = vod_scores.get_matt().item()
vod_f1 = vod_scores.get_f1().item()
vod_tp = vod_scores.tp.item()
vod_tn = vod_scores.tn.item()
vod_fp = vod_scores.fp.item()
vod_fn = vod_scores.fn.item()
count_fa = count_scores.get_accuracy().item()
count_miss = count_scores.get_miss().item()
count_precision = count_scores.get_precision().item()
count_recall = count_scores.get_recall().item()
count_matt = count_scores.get_matt().item()
count_f1 = count_scores.get_f1().item()
count_tp = count_scores.get_tp().item()
count_tn = count_scores.get_tn().item()
count_fp = count_scores.get_fp().item()
count_fn = count_scores.get_fn().item()
t.set_postfix(loss=tot_loss / (count + 1), vad_miss=vad_miss, vad_fa=vad_fa, vad_prec=vad_precision,
vad_recall=vad_recall, vad_matt=vad_matt, vad_f1=vad_f1,
vod_miss=vod_miss, vod_fa=vod_fa, vod_prec=vod_precision,
vod_recall=vod_recall, vod_matt=vod_matt, vod_f1 = vod_f1,
count_miss=count_miss, count_fa=count_fa, count_prec=count_precision,
count_recall=count_recall, count_matt=count_matt, count_f1= count_f1
)
t.update()
writer.log_metrics("train_vad", loss, vad_fa, vad_miss, vad_recall, vad_precision, vad_f1,
vad_matt, vad_tp, vad_tn, vad_fp, vad_fn, epoch)
writer.log_metrics("train_vod", loss, vod_fa, vod_miss, vod_recall, vod_precision, vod_f1,
vod_matt,vod_tp, vod_tn, vod_fp, vod_fn, epoch)
writer.log_metrics("train_count", loss, count_fa, count_miss, count_recall, count_precision, count_f1,
count_matt, count_tp, count_tn, count_fp, count_fn, epoch)
# end epoch save model and validate it
val_loss = validate(hp, model, devloader, writer, epoch)
if hp.train.save_best == 0:
save_path = os.path.join(pt_dir, 'chkpt_%d.pt' % epoch)
torch.save({
'model': model.state_dict(),
'optimizer': optimizer.state_dict(),
'step': epoch,
'hp_str': hp_str,
}, save_path)
logger.info("Saved checkpoint to: %s" % save_path)
else:
if val_loss < best_loss: # save only when best
best_loss = val_loss
save_path = os.path.join(pt_dir, 'chkpt_%d.pt' % epoch)
torch.save({
'model': model.state_dict(),
'optimizer': optimizer.state_dict(),
'step': epoch,
'hp_str': hp_str,
}, save_path)
logger.info("Saved checkpoint to: %s" % save_path)
return best_loss
except Exception as e:
logger.info("Exiting due to exception: %s" % e)
traceback.print_exc()
def criterion(preds, labels):
# preds = predicted log-probabilities
# labels = ground truth probabilities
return -labels.mul(preds).sum(1).mean()
| [
"cornellsamuele@gmail.com"
] | cornellsamuele@gmail.com |
b738f11079341122d83b2768d0c53eee79af3968 | 93df2cc5bb5ecc1a36bc6c900c9d9e4850833d54 | /cbrGui4.py | 3776d2976217fa715ff6831e1f26b5b3739055e8 | [] | no_license | Ramonahydoin/cbrPethon | 699e1fa5fddc43dc8e950a8e8aedcdfb1ebf5feb | 501f53940615b204794e68cbfb403f0df9e5fe51 | refs/heads/master | 2021-05-27T14:35:14.880925 | 2014-05-28T00:24:43 | 2014-05-28T00:24:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,775 | py | import fileOps,math
from Tkinter import *
from simMetrics import *
from collections import defaultdict
products = fileOps.openCsv("products.csv")
newOrderDict = {}
customerProducts = []
prodArr = []
amountArr = []
orderSimilarity = {}
similarityThreshold = 0.3
#similarityThreshold = raw_input('Enter threshold:')
state = 1
#Iterate over the CSV.
def main():
orders = fileOps.openCsvDebugAmount("casesDebug.csv")
forgetFrames()
global newOrderDict
newOrderDict = {}
customerProducts[:] = []
customerProductsSet = set()
orderSimilarity = {}
custID = custEntry.get()
custType = fileOps.getStockistType("c"+custID)
print custType
print '++=========++'
for key, prods in orders.iteritems():
if "c" + custID == key:
for prod in prods:
print prod[0]
#Once we've find the customer, we've got our list of products and can break the loop.
customerProducts.append(prod[0])
print "+++"+str(customerProducts)
print '----------'
for key, prods in orders.iteritems():
if custID != key:
prodArr[:] = []
customerProductsSet = set(customerProducts)
for prod in prods:
prodArr.append(prod[0])
prodSet = set(prodArr)
extraProducts = customerProductsSet ^ prodSet #Xor
extraProducts = prodSet & extraProducts
similarProducts = prodSet & customerProductsSet #interseciton
#Output
print ""
print "Checking customer " + str(key)
print " Number of extra products: " + str(len(extraProducts))
print " Number of similar products: " + str(len(similarProducts))
if(len(similarProducts)!= 0 and len(extraProducts)!=0):
temp1 = len(similarProducts)*len(similarProducts)
temp2 = len(prodSet)*len(prodSet)
sim = math.sqrt(float(temp1)/float(temp2))
orderSimilarity[key] = sim
print "-----"
summaryLabelVar.set("Summary for customer c" + custID)
summaryLabelVar2.set("Number of similar orders: " + str(len(orderSimilarity)))
mostSimOrder = 0
for key, sim in orderSimilarity.iteritems():
if sim > mostSimOrder:
mostSimOrder = sim
keyToSearch = key
summaryLabelVar3.set("The most Similar order is ["+keyToSearch+"] = " + str(mostSimOrder))
#Stockist Type sim
mostSimType = fileOps.getStockistType(keyToSearch)
stockistSim = stockist[custType][mostSimType]
#clear arrays
prodArr[:] = []
amountArr[:] = []
#print "probability of wanting the extra item:"
for key, prods in orders.iteritems():
if key == keyToSearch:
for prod in prods:
prodArr.append(prod[0])
prodSet = set(prodArr)
extraProducts = prodSet ^ customerProductsSet
extraProducts = prodSet & extraProducts
print "Extra Products " + str(extraProducts)
similarProducts = prodSet & customerProductsSet
if len(extraProducts) != 0:
if v.get() == 0:
#get amounts
for extraProd in extraProducts:
extraProdCat = fileOps.getProductCat(extraProd)
extraProdSim = 0
for simProd in similarProducts:
simProdCat = fileOps.getProductCat(simProd)
extraProdSim += category[simProdCat][extraProdCat]
extraProdSim = extraProdSim / len(similarProducts)
print "sim for :" + extraProd + ":"+ str(extraProdSim)
if extraProdSim > similarityThreshold:
newOrderDict.setdefault("c"+custID, [])
for prod in prods:
if prod[0] == extraProd:
amountArr.append(int(float(prod[1])*stockistSim))
newOrderDict["c"+custID].append([extraProd,int(float(prod[1])*stockistSim)])
#print "_______________"
#print newOrderDict
else:
for extraProd in extraProducts:
newOrderDict.setdefault("c"+custID, [])
for prod in prods:
if prod[0] == extraProd:
amountArr.append(int(float(prod[1])*stockistSim))
newOrderDict["c"+custID].append([extraProd,int(float(prod[1])*stockistSim)])
print "========="
print str(amountArr)
suggestLabelVar.set("Suggesting new order of:"+str(newOrderDict))
suggestLabelVar2.set("Order Ok ?:")
summaryFrame.pack()
ordOptionsFrame.pack()
# suggest the sim products
# save to new order
def forgetFrames():
summaryFrame.pack_forget()
ordOptionsFrame.pack_forget()
editFrame.pack_forget()
def writeOrd():
if state == 0:
customerType = custTypeVar.get()
else:
for key in newOrderDict:
custKey = key
customerType = fileOps.getStockistType(custKey)
print "Writing new order to file"
print "-------------------------"
print newOrderDict
temp = int(fileOps.getNextOrderID())
temp = str(temp+1)
fileOps.writeRecordQuant(newOrderDict,temp,customerType)
summaryLabelVar.set("New Order Saved")
newCustProductsVar.set("")
if state == 0:
addOrder()
if state == 1:
showMain()
def addProd():
global newOrderDict
print str(newOrderDict)
present = False
updatedOrder = {}
add = eVar.get()
splitted = add.split(",")
for key, values in newOrderDict.items():
updatedOrder.setdefault(key, [])
for value in values:
updatedOrder[key].append(value)
if value[0] == splitted[0]:
present = True
if present != True:
updatedOrder[key].append(splitted)
newOrderDict = updatedOrder
suggestLabelVar.set("Suggesting new order of:"+str(newOrderDict))
newCustProductsVar.set(str(newOrderDict))
print newOrderDict
def changeProd():
global newOrderDict
print "newOrder"+str(newOrderDict)
remove = eVar.get()
splitted = remove.split(",")
updatedOrder = {}
for key, values in newOrderDict.items():
updatedOrder.setdefault(key, [])
for value in values:
if value[0] == splitted[0]:
updatedOrder[key].append(splitted)
else:
updatedOrder[key].append(value)
newOrderDict = updatedOrder
suggestLabelVar.set("Suggesting new order of:"+str(newOrderDict))
newCustProductsVar.set(str(newOrderDict))
print "-----"
def removeProd():
global newOrderDict
print "newOrder"+str(newOrderDict)
remove = eVar.get()
updatedOrder = {}
for key, values in newOrderDict.items():
updatedOrder.setdefault(key, [])
for value in values:
if value[0] != remove:
print value
updatedOrder[key].append(value)
newOrderDict = updatedOrder
suggestLabelVar.set("Suggesting new order of:"+str(newOrderDict))
newCustProductsVar.set(str(newOrderDict))
print "-----"
def editOrd():
removeLabelVar.set("Enter products to edit")
editFrame.pack()
def setSim():
similarityThreshold = similarityOptionVar.get()
print "sim changed "+ str(similarityThreshold)
def getNextCustID():
orders = fileOps.openCsvDebug("casesDebug.csv")
temp = 0
for key, prods in orders.iteritems():
if int(key[1:]) > temp:
temp = int(key[1:])
temp = temp + 1
temp = 'c' + str(temp)
return temp
def lookUpProduct():
productInfo = fileOps.getProductInfo(prodLvar.get())
print productInfo
prodName.set("Name:"+productInfo['Name'])
prodCat.set("Category:"+productInfo['Category'])
prodPrice.set("Price:"+productInfo['Price'])
prodSize.set("Size:"+productInfo['Size'])
#states
def addOrder():
global newOrderDict
global state
state = 0
newOrderFrame.pack()
configureFrame.pack_forget()
radioFrame.pack_forget()
mainFrame.pack_forget()
summaryFrame.pack_forget()
ordOptionsFrame.pack_forget()
editFrame.pack_forget()
productLookupFrame.pack(side=BOTTOM)
productLookupResults.pack(side=BOTTOM)
newCustIdVar.set("New Customer Order :"+getNextCustID())
key = getNextCustID()
newOrderDict = {}
newOrderDict.setdefault(key, [])
print str(newOrderDict)
def showOptions():
print v.get()
newOrderFrame.pack_forget()
configureFrame.pack()
radioFrame.pack()
mainFrame.pack_forget()
summaryFrame.pack_forget()
ordOptionsFrame.pack_forget()
editFrame.pack_forget()
productLookupFrame.pack_forget()
productLookupResults.pack_forget()
def showMain():
global state
state = 1
newOrderFrame.pack_forget()
configureFrame.pack_forget()
radioFrame.pack_forget()
mainFrame.pack()
summaryFrame.pack_forget()
ordOptionsFrame.pack_forget()
editFrame.pack_forget()
productLookupFrame.pack(side=BOTTOM)
productLookupResults.pack(side=BOTTOM)
#Setup Gui
cbrGui = Tk()
cbrGui.title("Python CBR")
cbrGui.geometry("500x500")
#Setup Menu
menubar = Menu(cbrGui)
cbrGui.config(menu=menubar)
optionsMenu=Menu(menubar,tearoff=0)
menubar.add_cascade(label="Options", menu = optionsMenu)
optionsMenu.add_command(label = "Add new order",command=addOrder)
optionsMenu.add_command(label = "Suggest new order",command=showMain)
optionsMenu.add_command(label = "View orders",command=forgetFrames)
optionsMenu.add_separator()
optionsMenu.add_command(label = "Configure",command=showOptions)
#Add order frame
newCustIdVar = StringVar() #get the last var
newCustProductsVar = StringVar()
custTypeVar = StringVar()
#Configure
v = IntVar()
v1 = IntVar()
similarityOptionVar = StringVar()
#Main Var
custEntry = StringVar()
summaryLabelVar = StringVar()
summaryLabelVar2 = StringVar()
summaryLabelVar3 = StringVar()
suggestLabelVar = StringVar()
suggestLabelVar2 = StringVar()
orderOk = StringVar()
output = StringVar()
#Edit Var
removeLabelVar = StringVar()
eVar = StringVar()
#productLookup
prodLvar = StringVar()
prodName = StringVar()
prodCat = StringVar()
prodPrice = StringVar()
prodSize = StringVar()
#Frames
#------
#Add order frame
newOrderFrame = Frame(cbrGui)
newOrderFrameLabel = Label(newOrderFrame, textvariable = newCustIdVar).pack()
newOrderFrameLabel1 = Label(newOrderFrame, text="Products:").pack()
newOrderFrameLabel2 = Label(newOrderFrame, textvariable = newCustProductsVar).pack()
newOrderFrameLabel1 = Label(newOrderFrame, text="Stockist Type:").pack(side=LEFT)
addOrderProd = Entry(newOrderFrame,textvariable = custTypeVar).pack(side=LEFT)
newOrderFrameLabel1 = Label(newOrderFrame, text="Products to add:").pack(side=LEFT)
addOrderProd = Entry(newOrderFrame,textvariable = eVar).pack(side=LEFT)
addOrderBut = Button(newOrderFrame,text = "Remove", command = removeProd).pack(side=LEFT)
addOrderBut2 = Button(newOrderFrame,text = "Add", command = addProd).pack(side=LEFT)
addOrderBut3 = Button(newOrderFrame,text ='Ok',command = writeOrd).pack(side=BOTTOM)
#Configure Frame
configureFrame = Frame(cbrGui)
similarityLabel = Label(configureFrame, text = "Set the minimum similarity (Def = 0.3): ").pack(side=LEFT)
similarityOption = Entry(configureFrame,textvariable = similarityOptionVar).pack(side=LEFT)
setSimButton = Button(configureFrame,text = "Set", command = setSim).pack(side=LEFT)
radioFrame = Frame(cbrGui)
test4=Radiobutton(radioFrame, text="Quantitive", variable=v, value=1).pack(anchor=W)
test5=Radiobutton(radioFrame, text="Qualitative + Quantitive", variable=v, value=0).pack(anchor=W)
#Main Frame
mainFrame = Frame(cbrGui)
mainFrame.pack()
custIdLab = Label(mainFrame,text = 'Enter Customer ID:').pack()
custE = Entry(mainFrame,textvariable = custEntry).pack()
custIdBut = Button(mainFrame,text ='Go',command = main).pack()
output = Text(mainFrame)
summaryLabel = Label(mainFrame, textvariable = summaryLabelVar).pack() #mainframe to show finished
#Summary Frame
summaryFrame = Frame(cbrGui)
summaryLabel2 = Label(summaryFrame, textvariable = summaryLabelVar2).pack()
summaryLabel3 = Label(summaryFrame, textvariable = summaryLabelVar3).pack()
suggestLabel = Label(summaryFrame, textvariable = suggestLabelVar).pack()
suggestLabel2 = Label(summaryFrame, textvariable = suggestLabelVar2).pack()
#Options Frame
ordOptionsFrame = Frame(cbrGui)
ordEditBut = Button(ordOptionsFrame,text ='Edit',command = editOrd).pack(side=LEFT)
ordOkBut = Button(ordOptionsFrame,text ='Ok',command = writeOrd).pack(side=LEFT)
#Edit Frame
editFrame = Frame(cbrGui)
removeLabel = Label(editFrame, textvariable = removeLabelVar).pack()
removeE = Entry(editFrame,textvariable = eVar).pack(side=LEFT)
removeButton = Button(editFrame,text = "Remove", command = removeProd).pack(side=LEFT)
removeButton = Button(editFrame,text = "Change Quantity", command = changeProd).pack(side=LEFT)
addButton = Button(editFrame,text = "Add", command = addProd).pack(side=LEFT)
#ProductLookup
productLookupFrame = Frame(cbrGui)
productLookupFrame.pack(side=BOTTOM)
productLookupLabel = Label(productLookupFrame, text="Enter product to lookup").pack()
productLookupE = Entry(productLookupFrame,textvariable = prodLvar).pack(side=LEFT)
productLookupButton = Button(productLookupFrame,text = "Lookup", command = lookUpProduct).pack(side=LEFT)
#lookupResults
productLookupResults = Frame(cbrGui)
productLookupResults.pack(side=BOTTOM)
productLookupName = Label(productLookupResults, textvariable = prodName).pack()
productLookupCat = Label(productLookupResults, textvariable = prodCat).pack()
productLookupPrice = Label(productLookupResults, textvariable = prodPrice).pack()
productLookupSize= Label(productLookupResults, textvariable = prodSize).pack()
cbrGui.mainloop()
| [
"jtaylor1205@hotmail.co.uk"
] | jtaylor1205@hotmail.co.uk |
38d9677de9ff7235b683e4a76b6e63e57c6f331d | 6ec824faaf74373f659f6066855780ae875b6e43 | /cs234/assignment3/tests/test_basic.py | 54ce6d641eb803b11168e96a2e89a41d98d17017 | [] | no_license | Felipebpm/stanford | 3a0c5c29010bf8e2fe7ef177eac13f8fdd010e69 | 50063ed6cb81633b9f7eaba815f445c4c5581550 | refs/heads/master | 2023-04-06T04:33:14.984324 | 2020-02-25T23:31:00 | 2020-02-25T23:31:00 | 232,892,974 | 0 | 0 | null | 2023-03-25T00:07:34 | 2020-01-09T19:54:32 | JavaScript | UTF-8 | Python | false | false | 6,605 | py | import unittest
import code
from code.baseline_network import BaselineNetwork
from code.policy_network import PG, build_mlp
from code.config import get_config
import gym
import tensorflow as tf
import numpy as np
import builtins
# Suppress unnecessary logging
# gym.logging.disable(gym.logging.FATAL)
builtins.config = None
class TestBasic(unittest.TestCase):
def setUp(self):
tf.reset_default_graph()
self.policy_model = None
builtins.config = None
def setUpEnv(self, env_name):
config = get_config(env_name, True)
env = gym.make(config.env_name)
builtins.config = config
self.policy_model = PG(env, config, r_seed=15)
self.baseline_network = BaselineNetwork(env, config, self.policy_model.observation_placeholder)
###### Tests for add_placeholders_op ######
def test_observation_placeholder_dtype(self):
self.setUpEnv('cartpole')
self.assertEqual(self.policy_model.observation_placeholder.dtype, tf.float32)
def test_observation_placeholder_shape(self):
self.setUpEnv('cartpole')
self.assertEqual(self.policy_model.observation_placeholder.shape.as_list(), [None, 4])
def test_discrete_action_placeholder_dtype(self):
self.setUpEnv('cartpole')
self.assertTrue(self.policy_model.action_placeholder.dtype
in (tf.uint8, tf.int32, tf.uint32, tf.int64, tf.uint64))
def test_continuous_action_placeholder_dtype(self):
self.setUpEnv('pendulum')
self.assertEqual(self.policy_model.action_placeholder.dtype, tf.float32)
def test_pendulum_continuous_action_placeholder_shape(self):
self.setUpEnv('pendulum')
self.assertEqual(self.policy_model.action_placeholder.shape.as_list(), [None, 1])
def test_cheetah_continuous_action_placeholder_shape(self):
self.setUpEnv('cheetah')
self.assertEqual(self.policy_model.action_placeholder.shape.as_list(), [None, 6])
def test_advantage_placeholder_dtype(self):
self.setUpEnv('cartpole')
self.assertEqual(self.policy_model.advantage_placeholder.dtype, tf.float32)
def test_advantage_placeholder_shape(self):
self.setUpEnv('cartpole')
#self.assertEqual(self.policy_model.advantage_placeholder.shape.as_list(), [None])
###### Tests for get_returns ######
def test_get_returns_zero(self):
self.setUpEnv('cartpole')
paths = [{'reward': np.zeros(11)}]
returns = self.policy_model.get_returns(paths)
expected = np.zeros(11)
self.assertEqual(returns.shape, (11,))
diff = np.sum((returns - expected)**2)
self.assertAlmostEqual(diff, 0, delta=0.01)
###### Tests for build_policy_network_op ######
def test_policy_network_cartpole_sampled_action(self):
self.setUpEnv('cartpole')
self.assertEqual(self.policy_model.sampled_action.shape.as_list(), [None])
def test_policy_network_cartpole_logprob(self):
self.setUpEnv('cartpole')
self.assertEqual(self.policy_model.logprob.shape.as_list(), [None])
def test_policy_network_cartpole_logprob_value(self):
self.setUpEnv('cartpole')
tf.set_random_seed(234)
self.policy_model.initialize()
np.random.seed(234)
ob = np.random.rand(11, 4)
ac = np.random.randint(2, size=[11])
values = self.policy_model.sess.run(
self.policy_model.logprob,
feed_dict={self.policy_model.observation_placeholder: ob,
self.policy_model.action_placeholder: ac})
self.assertTrue((values < 0).all())
def test_policy_network_pendulum_sampled_action(self):
self.setUpEnv('pendulum')
self.assertEqual(self.policy_model.sampled_action.shape.as_list(), [None, 1])
def test_policy_network_pendulum_logprob(self):
self.setUpEnv('pendulum')
self.assertEqual(self.policy_model.logprob.shape.as_list(), [None])
def test_policy_network_cheetah_sampled_action(self):
self.setUpEnv('cheetah')
self.assertEqual(self.policy_model.sampled_action.shape.as_list(), [None, 6])
def test_policy_network_cheetah_logprob(self):
self.setUpEnv('cheetah')
self.assertEqual(self.policy_model.logprob.shape.as_list(), [None])
###### Other tests ######
def test_loss_op(self):
self.setUpEnv('cartpole')
self.policy_model.logprob = tf.placeholder(shape=[None], dtype=tf.float32)
self.policy_model.advantage_placeholder = tf.placeholder(shape=[None], dtype=tf.float32)
self.policy_model.add_loss_op()
logprob = np.random.randn(10)
adv = np.random.randn(10)
with tf.Session() as sess:
res = sess.run(self.policy_model.loss, feed_dict={
self.policy_model.logprob: logprob,
self.policy_model.advantage_placeholder: adv,
})
self.assertAlmostEqual(res, -np.mean(adv*logprob), delta=0.001)
def test_optimizer_op(self):
self.setUpEnv('cartpole')
self.policy_model.lr = 0.01
self.policy_model.loss = tf.square(tf.get_variable(name='loss', shape=[], dtype=tf.float32))
self.policy_model.add_optimizer_op()
self.policy_model.initialize()
for i in range(1000):
self.policy_model.sess.run(self.policy_model.train_op)
loss = self.policy_model.sess.run(self.policy_model.loss)
self.assertAlmostEqual(loss, 0.0, delta=0.001)
def test_baseline_op(self):
tf.set_random_seed(234)
self.setUpEnv('cartpole')
# make sure we can overfit!
np.random.seed(234)
returns = np.random.randn(5)
observations = np.random.randn(5,4)
self.policy_model.initialize()
for i in range(3000):
self.policy_model.baseline_network.update_baseline(returns, observations)
res = self.policy_model.sess.run(self.policy_model.baseline_network.baseline, feed_dict={
self.policy_model.baseline_network.observation_placeholder: observations
})
self.assertAlmostEqual(np.sum(res), np.sum(returns), delta=0.05)
def test_adv_basic(self):
self.setUpEnv('cartpole')
returns = np.random.randn(5)
observations = np.random.randn(5,4)
self.policy_model.config.use_baseline = False
self.policy_model.config.normalize_advantage = False
res = self.policy_model.calculate_advantage(returns, observations)
self.assertAlmostEqual(np.sum(res), np.sum(returns), delta=0.001)
| [
"fbommfim@gmail.com"
] | fbommfim@gmail.com |
da9eb822f0f6cf4952572b117f55847e834f20fe | 20c3dd44dd972ee30b497f56cad323149a9d858e | /app/migrations/0001_initial.py | ce6a4c502b9182a080d8e7daa5aab0e7da409481 | [] | no_license | DiegoIDSUP/EcommerceBack | 7e05b0620edfb9d255db423a048c1f17b3eb459c | e13866cedb5242c115032cdb9ad5abb89a2ce1f3 | refs/heads/master | 2023-01-31T16:38:06.703807 | 2020-12-07T14:36:05 | 2020-12-07T14:36:05 | 319,347,224 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,385 | py | # Generated by Django 3.1.1 on 2020-12-04 10:40
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Domicilio',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('calle', models.CharField(max_length=200)),
('num', models.CharField(max_length=200)),
('numInt', models.CharField(max_length=200)),
('referencia', models.CharField(max_length=200)),
('colonia', models.CharField(max_length=200)),
('cp', models.CharField(max_length=200)),
('ciudad', models.CharField(max_length=200)),
('estado', models.CharField(max_length=200)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'domicilios',
},
),
migrations.CreateModel(
name='Producto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=200)),
('descripcion', models.CharField(max_length=200)),
('stock', models.IntegerField()),
('precio', models.FloatField()),
('show', models.BooleanField(blank=True)),
('imagen', models.ImageField(upload_to='Productos')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'productos',
},
),
migrations.CreateModel(
name='Sessionslog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.CharField(max_length=200)),
('action', models.CharField(max_length=200)),
('intentos', models.IntegerField()),
('release_date', models.DateTimeField()),
],
options={
'db_table': 'sessionslog',
},
),
migrations.CreateModel(
name='Vendedores',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('verified', models.BooleanField(blank=True)),
('nombre', models.CharField(max_length=200)),
('ciudad', models.CharField(max_length=200)),
('estado', models.CharField(max_length=200)),
('rfc', models.CharField(blank=True, max_length=200)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'vendedores',
},
),
migrations.CreateModel(
name='Reporte',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comentario', models.CharField(max_length=200)),
('producto', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app.producto')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'reportes',
},
),
migrations.CreateModel(
name='Pedido',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stock', models.IntegerField()),
('release_date', models.DateTimeField()),
('domicilio', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app.domicilio')),
('producto', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app.producto')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'pedidos',
},
),
migrations.CreateModel(
name='Comentario',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comentario', models.CharField(max_length=200)),
('producto', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app.producto')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'comentarios',
},
),
]
| [
"153241@ids.upchiapas.edu.mx"
] | 153241@ids.upchiapas.edu.mx |
86a9f5d001803a950a30cb45d165d8bdf3b2c778 | 4b5cbd729da144a8160819c7f1462d8fd2669278 | /next_higher_number.py | a943da59d5f7197709d098ef599da91e569457cf | [
"MIT"
] | permissive | DavidEnciso/tests | 0f15fc3115bb4374745a247479cbbb728a92e328 | 414c0dd2dec8ace46dc50f07eccf8ebde0ee6be0 | refs/heads/master | 2021-01-01T18:01:04.991360 | 2017-07-24T19:36:52 | 2017-07-24T19:36:52 | 98,227,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,097 | py | a='12345'
def flip(s, p1, p2):
#print('b:',s)
sd = dict(zip(list(range(len(s))), s))
#print('dict:', sd)
i1 = s[p1]
i2 = s[p2]
sd[p2] = i1
sd[p1] = i2
ret = ''.join(sd.values())
#print('a:',ret)
return ret
def nx(r):
#flip first
o = r
l = len(r)-1
print("back")
m, st = scan(r, o, l, di=-1)
#print(m, st)
#if st:
#return m
print("fwd")
n, st = scan(m, o, l, di=1)
#print(n, st)
if st:
return n
return o
def scan(r, o, l, di=1):
bgn, end = (l, 0) if di == -1 else (0, l)
#print(list(range(bgn, end, di)))
prv = o
for j in list(range(l)):
for i in list(range(bgn, end, di)):
#print(i)
#print(i, i+di)
if di == 1:
sd = flip(r, i, l)
else:
sd = flip(r, i, l)
#print(sd)
if int(sd) > int(prv):
#return sd, True
prv = sd
print(sd)
r = sd
return r, False
print(a)
#print(flip(a, len(a)-1, len(a)-2))
print(nx(a))
| [
"noreply@github.com"
] | noreply@github.com |
a2d87a579f235a85e00ad7c8ef8cd4e1a5debb94 | 240c851de3b7d6a2901795c7f5416f36f8dba693 | /config.py | f1d9d36c0716835a0f807e45d0bff36576245b1c | [] | no_license | gdao-research/2R_Manipulator_Learner | 4c27c39cd6507409de97099b1b5ee370c34a930f | 352451560bcfda757926d1cc52c2b5b0746f96fc | refs/heads/master | 2020-05-04T10:41:09.074975 | 2019-05-03T18:47:07 | 2019-05-03T18:47:07 | 179,093,108 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | from collections import namedtuple
import numpy as np
_CONFIG = {
'PI': np.pi,
'tau': 1e-3,
'clip_norm': None,
'critic_l2_reg': 0,
'discount_factor': 0.9
}
Params = namedtuple(typename='Params', field_names=list(_CONFIG.keys()))
CONFIG = Params(**_CONFIG)
| [
"gdao.research@gmail.com"
] | gdao.research@gmail.com |
8939aa5cea12440890c866f83eaff3e3468a5fb9 | 9c79c683196e0d42b41a831a6e37bb520a75e269 | /bin/read_csv.py | cd747d2de7527220c0d51ccbc09642e1e551c460 | [] | no_license | YutingYao/crater_lakes | 7714cf64cd3649bd93b2c3cafcc8c73b4a3ff05b | b57ac0c18ce37b0f71f59fc8d254fa12890090ee | refs/heads/master | 2023-05-14T08:45:02.290369 | 2017-05-13T00:55:48 | 2017-05-13T00:55:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 710 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
read_csv.py
Created on Fri Feb 10 08:48:07 2017
@author: sam
"""
import os
import pandas as pd
import numpy as np
import datetime
def read_csv(target):
try:
os.chdir('/home/sam/git/crater_lakes/atmcorr/results/'+target)
df = pd.read_csv(target+'.csv')
return {
'r':np.clip(df.red.values,0,1),
'g':np.clip(df.green.values,0,1),
'b':np.clip(df.blue.values,0,1),
'dT':df.dBT.values,
'timestamps':df.timestamp.values,
'datetimes':[datetime.datetime.fromtimestamp(t) for t in df.timestamp.values],
'satellites':df.satellite.values
}
except:
print('File IO error for :'+target) | [
"samsammurphy@gmail.com"
] | samsammurphy@gmail.com |
57c5c07684f1156339e34ede349eefac95e8db9e | 88ed4cedb817f94a7ae907e6a671befc89de4bdc | /nbr/views.py | 24e59a5f6cecf6850bc1e7b1dfff3481307c80ed | [
"MIT"
] | permissive | johnmwangi/Nbr_Hood | 0f2ac0a3177b6164c06d29694774a5743a68a6c9 | 7a4be73db001560a2a17f37166a544381fe07581 | refs/heads/master | 2022-12-03T07:27:40.465021 | 2019-06-03T07:07:29 | 2019-06-03T07:07:29 | 189,603,564 | 0 | 0 | MIT | 2022-11-22T02:56:59 | 2019-05-31T14:02:48 | Python | UTF-8 | Python | false | false | 7,355 | py | from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse, Http404, HttpResponseRedirect, JsonResponse
from django.contrib.auth.decorators import login_required
from .models import *
from .forms import *
from django.contrib.auth import login, authenticate
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
# from .tokens import account_activation_token
from django.contrib.auth.models import User
from django.core.mail import EmailMessage
from django.contrib import messages
# Create your views here.
def home(request):
if request.user.is_authenticated:
if Join.objects.filter(user_id=request.user).exists():
hood = Hood.objects.get(pk=request.user.join.hood_id.id)
posts = Posts.objects.filter(hood=request.user.join.hood_id.id)
businesses = Business.objects.filter(
hood=request.user.join.hood_id.id)
return render(request, 'hoods/hood.html', {"hood": hood, "businesses": businesses, "posts": posts})
else:
neighbourhoods = Hood.objects.all()
return render(request, 'index.html', {"neighbourhoods": neighbourhoods})
else:
neighbourhoods = Hood.objects.all()
return render(request, 'index.html', {"neighbourhoods": neighbourhoods})
def new_business(request):
current_user = request.user
if request.method == 'POST':
form = BusinessForm(request.POST, request.FILES)
if form.is_valid():
business = form.save(commit=False)
business.user = current_user
business.hood = request.user.join.hood_id
business.save()
return redirect('home')
else:
form = BusinessForm()
return render(request, 'business.html', {"form": form})
@login_required(login_url='/accounts/login/')
def profile(request):
profile = Profile.objects.get(user=request.user)
hood = Hood.objects.filter(user=request.user).all()
business = Business.objects.filter(user=request.user).all()
return render(request, 'profiles/profile.html', {"profile": profile, "hoods": hood, "business": business})
@login_required(login_url='/accounts/login/')
def edit_profile(request):
current_user = request.user
profile = Profile.objects.get(user=request.user)
if request.method == 'POST':
form = EditProfileForm(request.POST, request.FILES, instance=profile)
if form.is_valid():
profile = form.save(commit=False)
profile.user = current_user
profile.email = current_user.email
profile.save()
return redirect('profile')
else:
form = EditProfileForm(instance=profile)
return render(request, 'profiles/edit_profile.html', {"form": form})
def hoods(request):
hood = Hood.objects.filter(user=request.user)
return render(request, 'hood/hood.html', {"hood": hood})
@login_required(login_url='/accounts/login/')
def join(request, hoodId):
hood = Hood.objects.get(pk=hoodId)
if Join.objects.filter(user_id=request.user).exists():
Join.objects.filter(user_id=request.user).update(hood_id=hood)
else:
Join(user_id=request.user, hood_id=hood).save()
messages.success(
request, 'Success! You have succesfully joined this Neighbourhood ')
return redirect('home')
@login_required(login_url='/accounts/login/')
def exitHood(request, hoodId):
if Join.objects.filter(user_id=request.user).exists():
Join.objects.get(user_id=request.user).delete()
messages.error(
request, 'You have succesfully exited this Neighbourhood.')
return redirect('home')
def search(request):
if request.GET['search']:
hood_search = request.GET.get("search")
hood = Hood.search_hood(hood_search)
message = f"{hood_search}"
return render(request, 'hood/search.html', {"message": message, "hood": hood})
else:
message = "You Haven't searched for any hood"
return render(request, 'hoods/search.html', {"message": message})
@login_required(login_url='/accounts/login/')
def create_post(request):
if Join.objects.filter(user_id=request.user).exists():
if request.method == 'POST':
form = PostForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.posted_by = request.user
post.hood = request.user.join.hood_id
post.save()
messages.success(
request, 'You have succesfully created a Post')
return redirect('home')
else:
form = PostForm()
return render(request, 'posts/createpost.html', {"form": form})
@login_required(login_url='/accounts/login/')
def add_comment(request, pk):
post = get_object_or_404(Post, pk=pk)
current_user = request.user
if request.method == 'POST':
form = CommentForm(request.POST)
if form.is_valid():
comment = form.save(commit=False)
comment.post = post
comment.poster = current_user
comment.save()
return redirect('home')
else:
form = CommentForm()
return render(request, 'comment.html', {"user": current_user, "comment_form": form})
def delete_post(request, postId):
Posts.objects.filter(pk=postId).delete()
messages.error(request, 'Succesfully Deleted a Post')
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
@login_required(login_url='/accounts/login/')
def create_hood(request):
current_user = request.user
if request.method == 'POST':
form = CreateHoodForm(request.POST, request.FILES)
if form.is_valid():
hood = form.save(commit=False)
hood.user = current_user
hood.save()
messages.success(
request, 'You Have succesfully created a hood.Now proceed and join a hood')
return redirect('home')
else:
form = CreateHoodForm()
return render(request, 'hoods/create_hood.html', {"form": form})
@login_required(login_url='/accounts/login/')
def update_hood(request, id):
current_user = request.user
hood = get_object_or_404(Hood, pk=id)
if request.method == 'POST':
form = CreateHoodForm(request.POST, request.FILES, instance=hood)
if form.is_valid():
hood = form.save(commit=False)
hood.user = current_user
hood.save()
messages.success(
request, 'You Have succesfully Edited Hood Details.')
return redirect('home')
else:
form = CreateHoodForm(instance=hood)
return render(request, 'hood/create_hood.html', {"form": form})
@login_required(login_url='/accounts/login/')
def delete_hood(request, id):
Hood.objects.filter(user=request.user, pk=id).delete()
messages.error(request, 'Succesfully deleted your hood')
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def occupants(request, id):
occupants = Join.objects.filter(id=hood_id).count()
return redirect('home')
| [
"jonesmwas356@gmail.com"
] | jonesmwas356@gmail.com |
51b714d6e10f7256f6b381c9426b24b34e89579b | 9214105eb9e1622f49093b48400955e8cda7bee8 | /assignment1/cs231n/classifiers/softmax.py | d17ec60b37f45a7a1a9c34b07a7e1668ab14e98a | [] | no_license | beefinbj/CS231n | 5ea5798bf5b9e9e202e4ec09e2f3098ec389a7cf | e133f27ff114d7a2da3298c242a5a47e71a76c88 | refs/heads/master | 2021-08-29T04:41:45.691756 | 2017-12-13T12:07:11 | 2017-12-13T12:07:11 | 114,067,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,206 | py | import numpy as np
from random import shuffle
def softmax_loss_naive(W, X, y, reg):
"""
Softmax loss function, naive implementation (with loops)
Inputs have dimension D, there are C classes, and we operate on minibatches
of N examples.
Inputs:
- W: A numpy array of shape (D, C) containing weights.
- X: A numpy array of shape (N, D) containing a minibatch of data.
- y: A numpy array of shape (N,) containing training labels; y[i] = c means
that X[i] has label c, where 0 <= c < C.
- reg: (float) regularization strength
Returns a tuple of:
- loss as single float
- gradient with respect to weights W; an array of same shape as W
"""
# Initialize the loss and gradient to zero.
loss = 0.0
dW = np.zeros_like(W)
num_train = X.shape[0]
num_classes = W.shape[1]
for i in xrange(num_train):
f = X[i].dot(W)
correct_f = f[y[i]]
log_term = 0
stability = -max(f)
for j in xrange(num_classes):
term = np.exp(f[j]+stability)
log_term += term
dW[:,j] += term/np.sum(np.exp(f+stability))*X[i].T
if j == y[i]:
dW[:,j] -= X[i].T
loss += np.log(log_term)
loss -= correct_f+stability
loss /= num_train
loss += 0.5 * reg * np.sum(W * W)
dW /= num_train
dW += reg*W
return loss, dW
def softmax_loss_vectorized(W, X, y, reg):
"""
Softmax loss function, vectorized version.
Inputs and outputs are the same as softmax_loss_naive.
"""
# Initialize the loss and gradient to zero.
loss = 0.0
dW = np.zeros_like(W)
num_train = X.shape[0]
num_classes = W.shape[1]
f = X.dot(W)
max_f = np.amax(f,axis=1)
shifted_f = f-max_f[:,np.newaxis]
correct_f = shifted_f[np.arange(num_train),y]
loss_i = np.zeros(num_train)
loss_i -= correct_f
loss_i += np.log(np.sum(np.exp(shifted_f),axis=1))
loss = sum(loss_i)
loss /= num_train
loss += 0.5 * reg * np.sum(W * W)
coeffs_i = np.ones((num_train,num_classes))
coeffs_i *= np.exp(shifted_f)
coeffs_i /= np.sum(np.exp(shifted_f),axis=1)[:,np.newaxis]
coeffs_i[np.arange(num_train),y] -= 1
dW = X.T.dot(coeffs_i)
dW /= num_train
dW += reg*W
return loss, dW
| [
"angus.ning@yahoo.com"
] | angus.ning@yahoo.com |
cec7c19376dd2dd84269594743f4ce3fc83a2cda | 45712328b046b5c5ea8a28e76d77ce353d43b94e | /in_Python/02-week/Efficient Algos/08-Last digit of sum of squares of fibonacci number-Efficient Algo.py | e5b745c223dfdeef19e417c19d71dcc0c850daee | [] | no_license | abdulwahid40/Data-Structures-and-Algorithms | 6259a67811da51c0691e3b0d7e2b678f636ab059 | 5823ac2c4018960ccf4b00ced4a92b35838017ee | refs/heads/master | 2022-11-25T23:26:34.594058 | 2022-11-14T16:48:27 | 2022-11-14T16:48:27 | 251,221,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 6 00:46:56 2020
@author: Abdul Wahid
"""
def pisano_period(m):
previous = 0
current = 1
for counter in range(0, m*m):
previous, current = current, (previous + current) % m
if previous == 0 and current == 1:
return counter + 1
def fab_num_remainder(n, m):
pisano = pisano_period(m)
n = n % pisano
if n == 0:
return n
previous = 0
current = 1
total = 0
for _ in range(n):
total += current ** 2
previous, current = current, previous + current
return total % m
if __name__ == '__main__':
num = int(input())
mod = 10
print(fab_num_remainder(num, mod)) | [
"abdul.wahid1257@gmail.com"
] | abdul.wahid1257@gmail.com |
cabbb422a3ca7bc2796e7304b97b475d7a2b5d6b | a8a8d889e640c16eaccca3f813d9600c678fec50 | /tail_weight_comp.py | 408f398114b1499777b2996e43e8f97de16f6a29 | [] | no_license | nmohdkha/triton_eye | dee1370e294ff71f83689f9989cfe87fc33d5a85 | 79daba711db834c2261e5d03c891f889c8cea9c2 | refs/heads/master | 2020-05-23T09:04:07.071119 | 2019-05-25T20:19:31 | 2019-05-25T20:19:31 | 186,701,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py | import numpy as np
from openmdao.api import ExplicitComponent
class TailWeightComp(ExplicitComponent):
def setup(self):
self.add_input('S_t')
self.add_output('W_Tail')
#self.declare_partials('W_LG','W_0)',method='cs')
def compute(self, inputs, outputs):
S_t = inputs['S_t']
outputs['W_Tail'] = 3*S_t
# def compute_partials(self, inputs, partials):
# partials['W_tail', 'S_t'] = 3 | [
"noreply@github.com"
] | noreply@github.com |
a9eca2d20dd542ef6aa215bdf112c85e1995dacc | 604402249f2ceb44153492e39959afc6dcd1e5d3 | /TextEdit.py | 888da59938d21aa50a3a0db4fd01f52bbde17c3a | [] | no_license | Sreeram2006/My-Python-Projects | 512ed03b8f9c2bbf24329394b053266d78d6c45a | 5b3365004597809e1319c1b30dc5d1f82d266bda | refs/heads/main | 2023-05-22T15:23:58.343964 | 2021-05-26T12:52:58 | 2021-05-26T12:52:58 | 371,031,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,653 | py | from tkinter import *
from tkinter import filedialog
from tkinter import font
root = Tk()
root.title("TextEdit")
root.geometry("1200x660+0+0")
def new_file():
my_text.delete("1.0", END)
root.title('New File- TextEdit')
status_bar.config(text="New File ")
def open_file():
my_text.delete("1.0", END)
text_file = filedialog.askopenfilename(initialdir="F:\HTML", title="Open File", filetypes=(
("All Files", "*.*"), ("Text Files", "*.txt"), ("HTML Files", "*.html"), ("Python Files", "*.py"), ("PDF Files", "*.pdf")))
name = text_file
status_bar.config(text=f'{name} ')
name = name.replace("F:\HTML", "")
root.title(f'{name}- TextEdit')
text_file = open(text_file, 'r')
stuff = text_file.read()
my_text.insert(END, stuff) # Add file to textbox
text_file.close() # Close the text file
def save_as_file():
text_file = filedialog.asksaveasfilename(defaultextension=".*", initialdir="F:\HTML", title="Save File", filetypes=(
("All Files", "*.*"), ("Text Files", "*.txt"), ("HTML Files", "*.html"), ("Python Files", "*.py"), ("PDF Files", "*.pdf")))
if text_file:
name = text_file
status_bar.config(text=f'{name} ')
name = name.replace("F\HTML", "")
text_file = open(text_file, "w")
text_file.write(1.0, END)
text_file.close()
f1 = Frame(root)
f1.pack(pady=5)
text_scroll = Scrollbar(f1)
text_scroll.pack(side=RIGHT, fill=Y)
my_text = Text(f1, width=97, height=25, font=("Lucida Typewriter", 16), selectbackground="yellow",
selectforeground="black", undo="True", yscrollcommand=text_scroll.set)
my_text.pack()
text_scroll.config(command=my_text.yview)
my_menu = Menu(root)
root.config(menu=my_menu)
# File Menu
file_menu = Menu(my_menu, tearoff=False)
my_menu.add_cascade(label="File", menu=file_menu)
file_menu.add_command(label="New", command=new_file)
file_menu.add_command(label="Open", command=open_file)
file_menu.add_command(label="Save")
file_menu.add_command(label="Save As", command=save_as_file)
file_menu.add_separator()
file_menu.add_command(label="Exit", command=root.quit)
# Edit Menu
edit_menu = Menu(my_menu, tearoff=False)
my_menu.add_cascade(label="Edit", menu=edit_menu)
edit_menu.add_command(label="Cut")
edit_menu.add_command(label="Copy")
edit_menu.add_command(label="Paste")
edit_menu.add_command(label="Undo")
edit_menu.add_command(label="Redo")
# Status Bar
status_bar = Label(root, text="Ready ", anchor=E)
status_bar.pack(fill=X, side=BOTTOM, ipady=5)
root.mainloop()
| [
"noreply@github.com"
] | noreply@github.com |
7fe6f3939974f57cf54ee1ec2edd52ceee019a19 | 1a559ace834d300aca6cde608ac9351032d4cd2e | /populate_rango.py | b3a91a7ecc126bedb52b90155a01ca50008142ec | [] | no_license | akilud/TangoTest | d9af5976c47e18056588c641c1aba8d249118a52 | 759cf7567318645f42e07c13c4e52874a7c164cd | refs/heads/master | 2021-01-01T05:05:22.712421 | 2016-04-22T03:12:40 | 2016-04-22T03:12:40 | 56,822,525 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,913 | py | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Tango.settings')
import django
django.setup()
from rango.models import Category, Page
def populate():
python_cat = add_cat('Python',views=128,likes=64)
add_page(cat=python_cat,
title="Official Python Tutorial",
url="http://docs.python.org/2/tutorial/")
add_page(cat=python_cat,
title="How to Think like a Computer Scientist",
url="http://www.greenteapress.com/thinkpython/")
add_page(cat=python_cat,
title="Learn Python in 10 Minutes",
url="http://www.korokithakis.net/tutorials/python/")
django_cat = add_cat("Django",views=64,likes=32)
add_page(cat=django_cat,
title="Official Django Tutorial",
url="https://docs.djangoproject.com/en/1.5/intro/tutorial01/")
add_page(cat=django_cat,
title="Django Rocks",
url="http://www.djangorocks.com/")
add_page(cat=django_cat,
title="How to Tango with Django",
url="http://www.tangowithdjango.com/")
frame_cat = add_cat("Other Frameworks",views=32,likes=12)
add_page(cat=frame_cat,
title="Bottle",
url="http://bottlepy.org/docs/dev/")
add_page(cat=frame_cat,
title="Flask",
url="http://flask.pocoo.org")
# Print out what we have added to the user.
for c in Category.objects.all():
for p in Page.objects.filter(category=c):
print ("- {0} - {1}".format(str(c), str(p)))
def add_page(cat, title, url, views=0):
p = Page.objects.get_or_create(category=cat, title=title)[0]
p.url=url
p.views=views
p.save()
return p
def add_cat(name,views=0,likes=0):
c = Category.objects.get_or_create(name=name)[0]
c.views=views
c.likes=likes
c.save()
return c
# Start execution here!
if __name__ == '__main__':
print ("Starting Rango population script...")
populate() | [
"akilud@gmail.com"
] | akilud@gmail.com |
6484c408d3f3b2c47a03d92785378350e8552ecb | f8ac9081d4bfcb5ae0ed4d7e194a023805541d34 | /accounts/tests/test_models.py | 06adb66d899e641874b5a812eda1ff4ff76971e7 | [] | no_license | cmermingas/tddbook | 9327a2710c3d678bd1645b2f1d8cd711580cddd0 | a957c5be16a408ecba326550d4564b8987de6dbe | refs/heads/master | 2020-12-24T19:46:55.455074 | 2016-05-14T19:03:03 | 2016-05-14T19:03:03 | 58,326,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | from django.test import TestCase
from django.contrib.auth import get_user_model
User = get_user_model()
class UserModelTest(TestCase):
def test_user_is_valid_with_email_only(self):
user = User(email='a@b.com')
user.full_clean() # Should not raise
def test_email_is_primary_key(self):
user = User()
self.assertFalse(hasattr(user, 'id'))
def test_is_authenticated(self):
user = User()
self.assertTrue(user.is_authenticated()) | [
"cmermingas@gmail.com"
] | cmermingas@gmail.com |
76e33f62e127998784a07731521e6ef615bc9c7e | df432760949b3e64bd73f363b92f82871c51ab4f | /sem15-code_clube-porta_da_fortuna-desafio03-perdendo_o_jogo.py | 738ac379fdf4222413059a5979583f8ee2c79805 | [] | no_license | AzzyOxx/pec-atividades | d3a37ecc2156e8682f90941bdf89da8b5dda435c | 3a89c1135de3caf0a19cffbf6142ca28ca012295 | refs/heads/master | 2023-05-31T15:26:29.938246 | 2021-07-06T23:48:27 | 2021-07-06T23:48:27 | 370,512,103 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,762 | py | from random import *
def desafio_perdendo_o_jogo():
#imprime as três portas e as instruções do jogo
print('''Porta da Fortuna!
=========
Existe um super prêmio atrás de uma dessas 3 portas!
Adivinhe qual é a porta certa para ganhar o prémio!
_____ _____ _____
| | | | | |
| [1] | | [2] | | [3] |
| o| | o| | o|
|_____| |_____| |_____|
''')
score = 0
#o usuário muda esta variável para terminar o jogo
jogando = True
#repetir, enquanto a variável 'jogando' estiver com valor "True"
while jogando == True:
print('\nEscolha um porta (1, 2 ou 3):')
#get the chosen door and store it as an integer (whole number)
chosenDoor = input()
chosenDoor = int(chosenDoor)
#randomly choose the winning door number (between 1 and 3)
winningDoor = randint(1,3)
#show the player the winning and chosen door numbers
print("A porta escolhida foi a", chosenDoor)
print("A pota certa é a", winningDoor)
#player wins if the chosen door and winning door number are the same
if chosenDoor == winningDoor:
print("Parabéns!")
score += 1
else:
score = 0
print("Que peninha!\nSua pontuação foi zerada T.T")
#pergunte ao jogador se ele quer continuar jogando
print("\nVocê quer jogar de novo?(s/n)")
resposta = input()[0].lower()
#termina o jogo se o jogador digitar 'n'
if resposta == 'n':
jogando = False
print('Obrigado por jogar.')
print("Sua pontuação final é", score,'.')
def main():
desafio_perdendo_o_jogo()
if __name__ == '__main__':
main()
| [
"catce.2021111mtds0316@aluno.ifpi.edu.br"
] | catce.2021111mtds0316@aluno.ifpi.edu.br |
cd5826257d8650a010bad89f2914a65cba719f9e | 5c019f5d465436a752cfc29f33353453e79c7e76 | /backend/mall/goods/goods_model.py | 55016886eba8f0016ad3173cef6e0404ad42613e | [] | no_license | mminhou/mall | 262110bcd802cadaa234758f192bffd15dfdb5c8 | 6261a15efa9a1ecba70263e17991e1f30608257d | refs/heads/master | 2021-09-12T13:21:13.552284 | 2018-04-17T07:39:55 | 2018-04-17T07:39:55 | 100,087,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 842 | py | from django.db import models
from mall.category.category_model import HighCategory, LowCategory
class Goods(models.Model):
style_num = models.IntegerField(primary_key=True)
high_category = models.ForeignKey(HighCategory, related_name="Goods", on_delete=models.CASCADE)
low_category = models.ForeignKey(LowCategory, related_name="Goods", on_delete=models.CASCADE)
goods_name = models.CharField(max_length=100, unique=True)
goods_price = models.CharField(max_length=30)
goods_detail = models.TextField()
goods_created = models.DateField(auto_now_add=True)
goods_mainImage = models.ImageField('MainImage')
goods_subImage = models.ImageField('SubImage', null=True, blank=True)
goods_subImage2 = models.ImageField('SubImage2', null=True, blank=True)
def __str__(self):
return self.goods_name
| [
"alsgh1003@hanmail.net"
] | alsgh1003@hanmail.net |
c7f7dc9027e7c74dc467b0c29e884e7db7d62e4f | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/slxos/v17r_1_01a/brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/__init__.py | 96131278f35a517493f7e62b5cba8e2907096906 | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,501 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class hop(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /brocade_mpls_rpc/show-mpls-lsp-name-detail/output/lsp/show-mpls-lsp-detail-info/show-mpls-lsp-instances-info/lsp-instances/lsp-rsvp-session-rro-hops/show-mpls-lsp-hop-list/hop. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__lsp_hop_address','__lsp_hop_strict_hop','__lsp_hop_loose_hop','__lsp_hop_is_router_id','__lsp_hop_has_protection','__lsp_hop_has_node_protection','__lsp_hop_has_bandwidth_protection','__lsp_hop_has_protection_in_use','__lsp_hop_avoid_node','__lsp_hop_avoid_local','__lsp_hop_avoid_remote',)
_yang_name = 'hop'
_rest_name = 'hop'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__lsp_hop_avoid_remote = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-remote", rest_name="lsp-hop-avoid-remote", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_avoid_node = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-node", rest_name="lsp-hop-avoid-node", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_has_protection_in_use = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-protection-in-use", rest_name="lsp-hop-has-protection-in-use", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_has_protection = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-protection", rest_name="lsp-hop-has-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_avoid_local = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-local", rest_name="lsp-hop-avoid-local", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_has_bandwidth_protection = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-bandwidth-protection", rest_name="lsp-hop-has-bandwidth-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_strict_hop = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-strict-hop", rest_name="lsp-hop-strict-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_is_router_id = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-is-router-id", rest_name="lsp-hop-is-router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-hop-address", rest_name="lsp-hop-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
self.__lsp_hop_has_node_protection = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-node-protection", rest_name="lsp-hop-has-node-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
self.__lsp_hop_loose_hop = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-loose-hop", rest_name="lsp-hop-loose-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_mpls_rpc', u'show-mpls-lsp-name-detail', u'output', u'lsp', u'show-mpls-lsp-detail-info', u'show-mpls-lsp-instances-info', u'lsp-instances', u'lsp-rsvp-session-rro-hops', u'show-mpls-lsp-hop-list', u'hop']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'show-mpls-lsp-name-detail', u'output', u'lsp', u'lsp-instances', u'lsp-rsvp-session-rro-hops', u'hop']
def _get_lsp_hop_address(self):
"""
Getter method for lsp_hop_address, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_address (inet:ipv4-address)
YANG Description: Hop IP address
"""
return self.__lsp_hop_address
def _set_lsp_hop_address(self, v, load=False):
"""
Setter method for lsp_hop_address, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_address (inet:ipv4-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_address() directly.
YANG Description: Hop IP address
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-hop-address", rest_name="lsp-hop-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_address must be of a type compatible with inet:ipv4-address""",
'defined-type': "inet:ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-hop-address", rest_name="lsp-hop-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)""",
})
self.__lsp_hop_address = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_address(self):
self.__lsp_hop_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="lsp-hop-address", rest_name="lsp-hop-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
def _get_lsp_hop_strict_hop(self):
"""
Getter method for lsp_hop_strict_hop, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_strict_hop (boolean)
YANG Description: CSPF path Strict hop
"""
return self.__lsp_hop_strict_hop
def _set_lsp_hop_strict_hop(self, v, load=False):
"""
Setter method for lsp_hop_strict_hop, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_strict_hop (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_strict_hop is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_strict_hop() directly.
YANG Description: CSPF path Strict hop
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-strict-hop", rest_name="lsp-hop-strict-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_strict_hop must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-strict-hop", rest_name="lsp-hop-strict-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_strict_hop = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_strict_hop(self):
self.__lsp_hop_strict_hop = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-strict-hop", rest_name="lsp-hop-strict-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_loose_hop(self):
"""
Getter method for lsp_hop_loose_hop, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_loose_hop (boolean)
YANG Description: CSPF path Loose hop
"""
return self.__lsp_hop_loose_hop
def _set_lsp_hop_loose_hop(self, v, load=False):
"""
Setter method for lsp_hop_loose_hop, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_loose_hop (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_loose_hop is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_loose_hop() directly.
YANG Description: CSPF path Loose hop
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-loose-hop", rest_name="lsp-hop-loose-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_loose_hop must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-loose-hop", rest_name="lsp-hop-loose-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_loose_hop = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_loose_hop(self):
self.__lsp_hop_loose_hop = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-loose-hop", rest_name="lsp-hop-loose-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_is_router_id(self):
"""
Getter method for lsp_hop_is_router_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_is_router_id (boolean)
YANG Description: Hop is a router id hop
"""
return self.__lsp_hop_is_router_id
def _set_lsp_hop_is_router_id(self, v, load=False):
"""
Setter method for lsp_hop_is_router_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_is_router_id (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_is_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_is_router_id() directly.
YANG Description: Hop is a router id hop
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-is-router-id", rest_name="lsp-hop-is-router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_is_router_id must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-is-router-id", rest_name="lsp-hop-is-router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_is_router_id = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_is_router_id(self):
self.__lsp_hop_is_router_id = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-is-router-id", rest_name="lsp-hop-is-router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_has_protection(self):
"""
Getter method for lsp_hop_has_protection, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_has_protection (boolean)
YANG Description: RRO hop Protection available
"""
return self.__lsp_hop_has_protection
def _set_lsp_hop_has_protection(self, v, load=False):
"""
Setter method for lsp_hop_has_protection, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_has_protection (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_has_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_has_protection() directly.
YANG Description: RRO hop Protection available
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-protection", rest_name="lsp-hop-has-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_has_protection must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-protection", rest_name="lsp-hop-has-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_has_protection = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_has_protection(self):
self.__lsp_hop_has_protection = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-protection", rest_name="lsp-hop-has-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_has_node_protection(self):
"""
Getter method for lsp_hop_has_node_protection, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_has_node_protection (boolean)
YANG Description: RRO hop Node Protection available
"""
return self.__lsp_hop_has_node_protection
def _set_lsp_hop_has_node_protection(self, v, load=False):
"""
Setter method for lsp_hop_has_node_protection, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_has_node_protection (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_has_node_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_has_node_protection() directly.
YANG Description: RRO hop Node Protection available
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-node-protection", rest_name="lsp-hop-has-node-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_has_node_protection must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-node-protection", rest_name="lsp-hop-has-node-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_has_node_protection = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_has_node_protection(self):
self.__lsp_hop_has_node_protection = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-node-protection", rest_name="lsp-hop-has-node-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_has_bandwidth_protection(self):
"""
Getter method for lsp_hop_has_bandwidth_protection, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_has_bandwidth_protection (boolean)
YANG Description: RRO hop bandwidth Protection available
"""
return self.__lsp_hop_has_bandwidth_protection
def _set_lsp_hop_has_bandwidth_protection(self, v, load=False):
"""
Setter method for lsp_hop_has_bandwidth_protection, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_has_bandwidth_protection (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_has_bandwidth_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_has_bandwidth_protection() directly.
YANG Description: RRO hop bandwidth Protection available
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-bandwidth-protection", rest_name="lsp-hop-has-bandwidth-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_has_bandwidth_protection must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-bandwidth-protection", rest_name="lsp-hop-has-bandwidth-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_has_bandwidth_protection = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_has_bandwidth_protection(self):
self.__lsp_hop_has_bandwidth_protection = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-bandwidth-protection", rest_name="lsp-hop-has-bandwidth-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_has_protection_in_use(self):
"""
Getter method for lsp_hop_has_protection_in_use, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_has_protection_in_use (boolean)
YANG Description: RRO hop protection is in use
"""
return self.__lsp_hop_has_protection_in_use
def _set_lsp_hop_has_protection_in_use(self, v, load=False):
"""
Setter method for lsp_hop_has_protection_in_use, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_has_protection_in_use (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_has_protection_in_use is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_has_protection_in_use() directly.
YANG Description: RRO hop protection is in use
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-protection-in-use", rest_name="lsp-hop-has-protection-in-use", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_has_protection_in_use must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-protection-in-use", rest_name="lsp-hop-has-protection-in-use", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_has_protection_in_use = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_has_protection_in_use(self):
self.__lsp_hop_has_protection_in_use = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-has-protection-in-use", rest_name="lsp-hop-has-protection-in-use", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_avoid_node(self):
"""
Getter method for lsp_hop_avoid_node, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_avoid_node (boolean)
YANG Description: Avoid address type is node
"""
return self.__lsp_hop_avoid_node
def _set_lsp_hop_avoid_node(self, v, load=False):
"""
Setter method for lsp_hop_avoid_node, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_avoid_node (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_avoid_node is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_avoid_node() directly.
YANG Description: Avoid address type is node
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-node", rest_name="lsp-hop-avoid-node", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_avoid_node must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-node", rest_name="lsp-hop-avoid-node", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_avoid_node = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_avoid_node(self):
self.__lsp_hop_avoid_node = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-node", rest_name="lsp-hop-avoid-node", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_avoid_local(self):
"""
Getter method for lsp_hop_avoid_local, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_avoid_local (boolean)
YANG Description: Avoid address type is local
"""
return self.__lsp_hop_avoid_local
def _set_lsp_hop_avoid_local(self, v, load=False):
"""
Setter method for lsp_hop_avoid_local, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_avoid_local (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_avoid_local is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_avoid_local() directly.
YANG Description: Avoid address type is local
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-local", rest_name="lsp-hop-avoid-local", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_avoid_local must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-local", rest_name="lsp-hop-avoid-local", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_avoid_local = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_avoid_local(self):
self.__lsp_hop_avoid_local = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-local", rest_name="lsp-hop-avoid-local", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
def _get_lsp_hop_avoid_remote(self):
"""
Getter method for lsp_hop_avoid_remote, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_avoid_remote (boolean)
YANG Description: Avoid address type is remote
"""
return self.__lsp_hop_avoid_remote
def _set_lsp_hop_avoid_remote(self, v, load=False):
"""
Setter method for lsp_hop_avoid_remote, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_name_detail/output/lsp/show_mpls_lsp_detail_info/show_mpls_lsp_instances_info/lsp_instances/lsp_rsvp_session_rro_hops/show_mpls_lsp_hop_list/hop/lsp_hop_avoid_remote (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_avoid_remote is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_avoid_remote() directly.
YANG Description: Avoid address type is remote
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-remote", rest_name="lsp-hop-avoid-remote", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_avoid_remote must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-remote", rest_name="lsp-hop-avoid-remote", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)""",
})
self.__lsp_hop_avoid_remote = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_avoid_remote(self):
self.__lsp_hop_avoid_remote = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-hop-avoid-remote", rest_name="lsp-hop-avoid-remote", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)
lsp_hop_address = __builtin__.property(_get_lsp_hop_address, _set_lsp_hop_address)
lsp_hop_strict_hop = __builtin__.property(_get_lsp_hop_strict_hop, _set_lsp_hop_strict_hop)
lsp_hop_loose_hop = __builtin__.property(_get_lsp_hop_loose_hop, _set_lsp_hop_loose_hop)
lsp_hop_is_router_id = __builtin__.property(_get_lsp_hop_is_router_id, _set_lsp_hop_is_router_id)
lsp_hop_has_protection = __builtin__.property(_get_lsp_hop_has_protection, _set_lsp_hop_has_protection)
lsp_hop_has_node_protection = __builtin__.property(_get_lsp_hop_has_node_protection, _set_lsp_hop_has_node_protection)
lsp_hop_has_bandwidth_protection = __builtin__.property(_get_lsp_hop_has_bandwidth_protection, _set_lsp_hop_has_bandwidth_protection)
lsp_hop_has_protection_in_use = __builtin__.property(_get_lsp_hop_has_protection_in_use, _set_lsp_hop_has_protection_in_use)
lsp_hop_avoid_node = __builtin__.property(_get_lsp_hop_avoid_node, _set_lsp_hop_avoid_node)
lsp_hop_avoid_local = __builtin__.property(_get_lsp_hop_avoid_local, _set_lsp_hop_avoid_local)
lsp_hop_avoid_remote = __builtin__.property(_get_lsp_hop_avoid_remote, _set_lsp_hop_avoid_remote)
_pyangbind_elements = {'lsp_hop_address': lsp_hop_address, 'lsp_hop_strict_hop': lsp_hop_strict_hop, 'lsp_hop_loose_hop': lsp_hop_loose_hop, 'lsp_hop_is_router_id': lsp_hop_is_router_id, 'lsp_hop_has_protection': lsp_hop_has_protection, 'lsp_hop_has_node_protection': lsp_hop_has_node_protection, 'lsp_hop_has_bandwidth_protection': lsp_hop_has_bandwidth_protection, 'lsp_hop_has_protection_in_use': lsp_hop_has_protection_in_use, 'lsp_hop_avoid_node': lsp_hop_avoid_node, 'lsp_hop_avoid_local': lsp_hop_avoid_local, 'lsp_hop_avoid_remote': lsp_hop_avoid_remote, }
| [
"badaniya@brocade.com"
] | badaniya@brocade.com |
33fa1f4b99a1258ca7464dad27008d7d33f81f0c | 75d258d0cc8b07134a3db656a16e8c27557e3572 | /n42_m14/circuit_n42_m14_s1_e6_pEFGH.py | b795237a2e1f2c51ab0b0c43db3ac209a29f512b | [] | no_license | tonybruguier/martinis_et_al_data | 7c5acee8cb18586607c0ffdc25bc9b616e0847be | 1a35e6712c5bd4b48ef0027707b52dd81e5aa3f3 | refs/heads/master | 2023-02-23T09:36:24.179239 | 2021-01-24T20:23:04 | 2021-01-24T20:23:04 | 332,266,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 138,500 | py | import cirq
import numpy as np
QUBIT_ORDER = [
cirq.GridQubit(0, 5),
cirq.GridQubit(0, 6),
cirq.GridQubit(1, 4),
cirq.GridQubit(1, 5),
cirq.GridQubit(1, 6),
cirq.GridQubit(1, 7),
cirq.GridQubit(2, 4),
cirq.GridQubit(2, 5),
cirq.GridQubit(2, 6),
cirq.GridQubit(2, 7),
cirq.GridQubit(2, 8),
cirq.GridQubit(3, 2),
cirq.GridQubit(3, 3),
cirq.GridQubit(3, 5),
cirq.GridQubit(3, 6),
cirq.GridQubit(3, 7),
cirq.GridQubit(3, 8),
cirq.GridQubit(4, 1),
cirq.GridQubit(4, 2),
cirq.GridQubit(4, 3),
cirq.GridQubit(4, 4),
cirq.GridQubit(4, 5),
cirq.GridQubit(4, 6),
cirq.GridQubit(4, 7),
cirq.GridQubit(5, 1),
cirq.GridQubit(5, 2),
cirq.GridQubit(5, 3),
cirq.GridQubit(5, 4),
cirq.GridQubit(5, 5),
cirq.GridQubit(5, 6),
cirq.GridQubit(5, 7),
cirq.GridQubit(6, 1),
cirq.GridQubit(6, 2),
cirq.GridQubit(6, 3),
cirq.GridQubit(6, 4),
cirq.GridQubit(6, 5),
cirq.GridQubit(6, 6),
cirq.GridQubit(7, 2),
cirq.GridQubit(7, 3),
cirq.GridQubit(7, 4),
cirq.GridQubit(7, 5),
cirq.GridQubit(8, 3)
]
CIRCUIT = cirq.Circuit(moments=[
cirq.Moment(operations=[
(cirq.Y**0.5).on(cirq.GridQubit(0, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(0, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 4)),
(cirq.X**0.5).on(cirq.GridQubit(1, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 5)),
(cirq.X**0.5).on(cirq.GridQubit(2, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 2)),
(cirq.X**0.5).on(cirq.GridQubit(3, 3)),
(cirq.X**0.5).on(cirq.GridQubit(3, 5)),
(cirq.X**0.5).on(cirq.GridQubit(3, 6)),
(cirq.X**0.5).on(cirq.GridQubit(3, 7)),
(cirq.X**0.5).on(cirq.GridQubit(3, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 1)),
(cirq.X**0.5).on(cirq.GridQubit(4, 2)),
(cirq.X**0.5).on(cirq.GridQubit(4, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 4)),
(cirq.X**0.5).on(cirq.GridQubit(4, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 2)),
(cirq.X**0.5).on(cirq.GridQubit(6, 3)),
(cirq.X**0.5).on(cirq.GridQubit(6, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 2)),
(cirq.X**0.5).on(cirq.GridQubit(7, 3)),
(cirq.X**0.5).on(cirq.GridQubit(7, 4)),
(cirq.X**0.5).on(cirq.GridQubit(7, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -0.3448162225275961).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * 0.24851733121171846).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -2.079870303178702).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 2.0436918407499873).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * 1.2371391697444234).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * -1.2825274365288457).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -0.6529975013575373).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 0.21248377848559125).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -2.0638841157306445).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * 2.10101302367136).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * 0.02232591119805812).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -0.030028573876142287).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -0.8467509808142173).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 0.8164932597686655).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -0.16310561378711827).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 0.1766183348870303).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -0.22542387771877406).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 0.2814659583608806).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -0.33113463396189063).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 0.40440704518468423).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -0.254599699022151).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 0.3888269305757545).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * -0.4081262439699967).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 0.3666829187201306).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -0.3507308388473503).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 0.37554649493270875).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -1.4187954353764791).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * 1.5102819373895253).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 0.1516394851691686).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * -0.23575835453119093).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.545844435173598, phi=0.5163254336997252).on(
cirq.GridQubit(1, 4), cirq.GridQubit(1, 5)),
cirq.FSimGate(theta=1.5033136051987404, phi=0.5501439149572028).on(
cirq.GridQubit(1, 6), cirq.GridQubit(1, 7)),
cirq.FSimGate(theta=1.5930079664614663, phi=0.5355369376884288).on(
cirq.GridQubit(2, 4), cirq.GridQubit(2, 5)),
cirq.FSimGate(theta=1.59182423935832, phi=-5.773664463980115).on(
cirq.GridQubit(2, 6), cirq.GridQubit(2, 7)),
cirq.FSimGate(theta=1.5886126292316385, phi=0.4838919055156303).on(
cirq.GridQubit(3, 2), cirq.GridQubit(3, 3)),
cirq.FSimGate(theta=1.5286450573669954, phi=0.5113953905811602).on(
cirq.GridQubit(3, 6), cirq.GridQubit(3, 7)),
cirq.FSimGate(theta=1.565622495548066, phi=0.5127256481964074).on(
cirq.GridQubit(4, 2), cirq.GridQubit(4, 3)),
cirq.FSimGate(theta=1.5384796865621224, phi=0.5293381306162406).on(
cirq.GridQubit(4, 6), cirq.GridQubit(4, 7)),
cirq.FSimGate(theta=1.4727562833004122, phi=0.4552443293379814).on(
cirq.GridQubit(5, 2), cirq.GridQubit(5, 3)),
cirq.FSimGate(theta=1.5346175385256955, phi=0.5131039467233695).on(
cirq.GridQubit(5, 4), cirq.GridQubit(5, 5)),
cirq.FSimGate(theta=1.558221035096814, phi=0.4293113178636455).on(
cirq.GridQubit(5, 6), cirq.GridQubit(5, 7)),
cirq.FSimGate(theta=1.5169062231051558, phi=0.46319906116805815).on(
cirq.GridQubit(6, 2), cirq.GridQubit(6, 3)),
cirq.FSimGate(theta=1.5705414623224259, phi=0.4791699064049766).on(
cirq.GridQubit(6, 4), cirq.GridQubit(6, 5)),
cirq.FSimGate(theta=1.5516764540193888, phi=0.505545707839895).on(
cirq.GridQubit(7, 2), cirq.GridQubit(7, 3)),
cirq.FSimGate(theta=1.5699606675525557, phi=0.48292170263262457).on(
cirq.GridQubit(7, 4), cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 1.2570424650348733).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * -1.3533413563507508).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 1.3803105504474993).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -1.4164890128762133).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * -0.7660705551087533).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * 0.7206822883243308).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 1.3183560383893944).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -1.7588697612613406).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 0.9354142698937665).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * -0.8982853619530515).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * 0.5799079899133832).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -0.5876106525914674).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 1.0843371101222938).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -1.1145948311678457).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -1.6258237067659351).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 1.6393364278658469).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 0.7948295009385445).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -0.7387874202964381).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 0.049341949396894985).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 0.02393046182589869).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 0.07085461727529008).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 0.06337261427831344).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * 0.4710627118441926).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -0.5125060370940587).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 2.1645856475342256).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -2.1397699914488673).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 1.2773117920270392).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * -1.1858252900139932).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 0.5606941860998265).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * -0.6448130554618487).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 4)),
(cirq.X**0.5).on(cirq.GridQubit(2, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 8)),
(cirq.X**0.5).on(cirq.GridQubit(3, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 8)),
(cirq.X**0.5).on(cirq.GridQubit(4, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 3)),
(cirq.X**0.5).on(cirq.GridQubit(4, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 5)),
(cirq.X**0.5).on(cirq.GridQubit(4, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 6)),
(cirq.X**0.5).on(cirq.GridQubit(5, 7)),
(cirq.X**0.5).on(cirq.GridQubit(6, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 5)),
(cirq.X**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -2.0179756248661533).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * 2.064958427369896).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * -5.435868884042397).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 5.438497289344933).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -5.19048555249959).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 5.170988862096221).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 3.362366769065076).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -3.655232369531361).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * -4.480708067260001).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 4.525888267898699).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 2.763288476134621).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -2.7382876075948173).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * -4.882352366676035).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * 4.924090864144291).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 2.135954522972214).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -2.1822665205802965).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -3.7780476633662574).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 3.817335880513747).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -2.8819419896554686).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 2.9028256034569604).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 0.7811374803446167).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -0.6780279413275597).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 2.2532274955007456).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * -2.5360843333016145).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 2.3134893226730737).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -2.238493420699622).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -4.378582817568972).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 4.459782783273393).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * 1.42630741834175).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -1.5270341780432073).on(cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.5454967174552687, phi=0.5074540278986153).on(
cirq.GridQubit(0, 5), cirq.GridQubit(0, 6)),
cirq.FSimGate(theta=1.5233234922971755, phi=0.6681144400379464).on(
cirq.GridQubit(1, 5), cirq.GridQubit(1, 6)),
cirq.FSimGate(theta=1.5644541080112795, phi=0.5439498075085039).on(
cirq.GridQubit(2, 5), cirq.GridQubit(2, 6)),
cirq.FSimGate(theta=1.5866139110090092, phi=0.5693597810559818).on(
cirq.GridQubit(2, 7), cirq.GridQubit(2, 8)),
cirq.FSimGate(theta=1.541977006124425, phi=0.6073798124875975).on(
cirq.GridQubit(3, 5), cirq.GridQubit(3, 6)),
cirq.FSimGate(theta=1.5573072833358306, phi=0.5415514987622351).on(
cirq.GridQubit(3, 7), cirq.GridQubit(3, 8)),
cirq.FSimGate(theta=1.5345751514593928, phi=0.472462117170605).on(
cirq.GridQubit(4, 1), cirq.GridQubit(4, 2)),
cirq.FSimGate(theta=1.5138652502397498, phi=0.47710618607286504).on(
cirq.GridQubit(4, 3), cirq.GridQubit(4, 4)),
cirq.FSimGate(theta=1.5849169442855044, phi=0.54346233613361).on(
cirq.GridQubit(4, 5), cirq.GridQubit(4, 6)),
cirq.FSimGate(theta=1.4838884067961586, phi=0.5070681071136852).on(
cirq.GridQubit(5, 1), cirq.GridQubit(5, 2)),
cirq.FSimGate(theta=1.5398075246432927, phi=0.5174515645943538).on(
cirq.GridQubit(5, 3), cirq.GridQubit(5, 4)),
cirq.FSimGate(theta=1.4902099797510393, phi=0.4552057582549894).on(
cirq.GridQubit(6, 1), cirq.GridQubit(6, 2)),
cirq.FSimGate(theta=1.5376836849431186, phi=0.46265685930712236).on(
cirq.GridQubit(6, 3), cirq.GridQubit(6, 4)),
cirq.FSimGate(theta=1.555185434982808, phi=0.6056351386305033).on(
cirq.GridQubit(6, 5), cirq.GridQubit(6, 6)),
cirq.FSimGate(theta=1.4749003996237158, phi=0.4353609222411594).on(
cirq.GridQubit(7, 3), cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 1.6292875119692507).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * -1.5823047094655076).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * 5.79385605258612).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -5.791227647283584).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 5.223139057027918).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -5.242635747431287).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -2.7477760804704774).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 2.454910480004192).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * 5.048199817882042).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -5.0030196172433445).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -2.578152260365417).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 2.60315312890522).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * 4.080045044703728).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * -4.038306547235473).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -2.6543362735839113).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 2.6080242759758283).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 3.9045088495271663).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -3.8652206323796765).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 1.9770644223044243).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -1.9561808085029322).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -1.5516585295358842).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 1.6547680685529413).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -0.5449135022758093).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * 0.2620566644749405).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -2.3490397609251703).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 2.424035662898622).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 5.25154083730089).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -5.170340871596469).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * -1.8655832225378013).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 1.7648564628363437).on(cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
(cirq.X**0.5).on(cirq.GridQubit(0, 5)),
(cirq.X**0.5).on(cirq.GridQubit(0, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 4)),
(cirq.X**0.5).on(cirq.GridQubit(1, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 5)),
(cirq.X**0.5).on(cirq.GridQubit(2, 6)),
(cirq.X**0.5).on(cirq.GridQubit(2, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 6)),
(cirq.X**0.5).on(cirq.GridQubit(3, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 1)),
(cirq.X**0.5).on(cirq.GridQubit(4, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 7)),
(cirq.X**0.5).on(cirq.GridQubit(5, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 2)),
(cirq.X**0.5).on(cirq.GridQubit(5, 3)),
(cirq.X**0.5).on(cirq.GridQubit(5, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 5)),
(cirq.X**0.5).on(cirq.GridQubit(5, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 3)),
(cirq.X**0.5).on(cirq.GridQubit(6, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 5)),
(cirq.X**0.5).on(cirq.GridQubit(6, 6)),
(cirq.X**0.5).on(cirq.GridQubit(7, 2)),
(cirq.X**0.5).on(cirq.GridQubit(7, 3)),
(cirq.X**0.5).on(cirq.GridQubit(7, 4)),
(cirq.X**0.5).on(cirq.GridQubit(7, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 2.8643854265554056).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * -2.9033805954708463).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -2.3793800740028206).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * 2.142523606048688).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -6.196295096608877).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 6.191833422443152).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -5.367868774756692).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 5.257156584109544).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -1.6118072404137829).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 1.5665192386902935).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -1.5736126437571512).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * 1.5796534031340996).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * -8.599392694559281).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * 8.58638977635296).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -5.408932498710608).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 5.396221422935972).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -3.2786928385561493).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 3.339006443218924).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -5.390755870544794).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 5.4172568990486605).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 4.367652291347506).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -3.9105776028384707).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 3.0814399461790716).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -3.1208364909653903).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * 7.0181466269225865).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -7.000766026200176).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * 5.700873278515409).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -5.683378195921049).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 4.586335789661189).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -4.76537552715921).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * 5.424178494472165).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -5.503525609076518).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.4937034321050129, phi=0.5388459463555662).on(
cirq.GridQubit(0, 5), cirq.GridQubit(1, 5)),
cirq.FSimGate(theta=1.5015413274420961, phi=0.51076415920643).on(
cirq.GridQubit(0, 6), cirq.GridQubit(1, 6)),
cirq.FSimGate(theta=1.5588791081427968, phi=0.559649620487243).on(
cirq.GridQubit(2, 5), cirq.GridQubit(3, 5)),
cirq.FSimGate(theta=1.5907035825834708, phi=0.5678223287662552).on(
cirq.GridQubit(2, 6), cirq.GridQubit(3, 6)),
cirq.FSimGate(theta=1.5296321276792553, phi=0.537761951313038).on(
cirq.GridQubit(2, 7), cirq.GridQubit(3, 7)),
cirq.FSimGate(theta=1.619276265426104, phi=0.48310297196088736).on(
cirq.GridQubit(2, 8), cirq.GridQubit(3, 8)),
cirq.FSimGate(theta=1.6116663075637374, phi=0.5343172366969327).on(
cirq.GridQubit(4, 1), cirq.GridQubit(5, 1)),
cirq.FSimGate(theta=1.5306030283605572, phi=0.5257102080843467).on(
cirq.GridQubit(4, 2), cirq.GridQubit(5, 2)),
cirq.FSimGate(theta=1.589821065740506, phi=0.5045391214115686).on(
cirq.GridQubit(4, 3), cirq.GridQubit(5, 3)),
cirq.FSimGate(theta=1.5472406430590444, phi=0.5216932173558055).on(
cirq.GridQubit(4, 4), cirq.GridQubit(5, 4)),
cirq.FSimGate(theta=1.5707871303628709, phi=0.5176678491729374).on(
cirq.GridQubit(4, 6), cirq.GridQubit(5, 6)),
cirq.FSimGate(theta=1.5337916352034444, phi=0.5123546847230711).on(
cirq.GridQubit(4, 7), cirq.GridQubit(5, 7)),
cirq.FSimGate(theta=1.596346344028619, phi=0.5104319949477776).on(
cirq.GridQubit(6, 2), cirq.GridQubit(7, 2)),
cirq.FSimGate(theta=1.53597466118183, phi=0.5584919013659856).on(
cirq.GridQubit(6, 3), cirq.GridQubit(7, 3)),
cirq.FSimGate(theta=1.385350861888917, phi=0.5757363921651084).on(
cirq.GridQubit(6, 4), cirq.GridQubit(7, 4)),
cirq.FSimGate(theta=1.614843449053755, phi=0.5542252229839564).on(
cirq.GridQubit(6, 5), cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -3.72824674565976).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * 3.6892515767443195).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 2.8795906763472114).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * -3.116447144301344).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 6.506615138479995).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -6.511076812645719).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 6.150506057270183).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -6.2612182479173315).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 2.4087294851133443).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -2.4540174868368334).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 2.8100043579049445).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * -2.8039635985279965).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * 9.032480388130898).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * -9.04548330633722).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 4.737705877923889).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -4.750416953698525).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 2.9425087256630427).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -2.882195121000268).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 4.466531408750767).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -4.440030380246901).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -4.89701654221443).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 5.354091230723465).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -3.0747241437239694).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 3.0353275989376507).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * -5.629287261948809).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 5.646667862671219).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * -5.760627714067928).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 5.778122796662288).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -3.985782702743221).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 3.806742965245199).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * -5.681609363423969).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 5.602262248819616).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 4)),
(cirq.X**0.5).on(cirq.GridQubit(2, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 8)),
(cirq.X**0.5).on(cirq.GridQubit(3, 2)),
(cirq.X**0.5).on(cirq.GridQubit(3, 3)),
(cirq.X**0.5).on(cirq.GridQubit(3, 5)),
(cirq.X**0.5).on(cirq.GridQubit(3, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 7)),
(cirq.X**0.5).on(cirq.GridQubit(3, 8)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 2)),
(cirq.X**0.5).on(cirq.GridQubit(4, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 6)),
(cirq.X**0.5).on(cirq.GridQubit(4, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 1)),
(cirq.X**0.5).on(cirq.GridQubit(5, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 4)),
(cirq.X**0.5).on(cirq.GridQubit(5, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 7)),
(cirq.X**0.5).on(cirq.GridQubit(6, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 4)),
(cirq.X**0.5).on(cirq.GridQubit(6, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 5)),
(cirq.X**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -9.272134780175643).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * 9.311987288909458).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * -2.4865845873665364).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 2.4890814068883764).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -2.4240781150731663).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 2.419398026235366).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 2.3861256785493166).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * -2.392456163642626).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 10.821685325451792).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * -10.785875071150537).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 12.703597923836748).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * -12.7869629079138).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 3.782562501914174).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -3.873596611893716).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 4.772639843256901).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -4.771314675186062).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 8.49593730829863).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -8.479908941862229).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 1.639481743922408).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -1.9319083897827265).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * 9.60223181672896).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -9.605639326034064).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 6.330499004273446).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -6.2177071019033425).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 9.851852381617888).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -9.926465199012979).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 6.431104618355057).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -6.38660616379351).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -6.763306761471101).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 6.721685791226169).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.5423469235530667, phi=0.5388088498512879).on(
cirq.GridQubit(1, 4), cirq.GridQubit(2, 4)),
cirq.FSimGate(theta=1.5684106752459124, phi=0.5414007317481024).on(
cirq.GridQubit(1, 5), cirq.GridQubit(2, 5)),
cirq.FSimGate(theta=1.6152322695478165, phi=0.5160697976136035).on(
cirq.GridQubit(1, 6), cirq.GridQubit(2, 6)),
cirq.FSimGate(theta=1.5040835324508275, phi=0.6761565725975858).on(
cirq.GridQubit(1, 7), cirq.GridQubit(2, 7)),
cirq.FSimGate(theta=1.5144175462386844, phi=0.4680444728781228).on(
cirq.GridQubit(3, 2), cirq.GridQubit(4, 2)),
cirq.FSimGate(theta=1.4668587973263782, phi=0.4976074601121169).on(
cirq.GridQubit(3, 3), cirq.GridQubit(4, 3)),
cirq.FSimGate(theta=1.603651215218248, phi=0.46649538437100246).on(
cirq.GridQubit(3, 5), cirq.GridQubit(4, 5)),
cirq.FSimGate(theta=1.6160334279232749, phi=0.4353897326147861).on(
cirq.GridQubit(3, 6), cirq.GridQubit(4, 6)),
cirq.FSimGate(theta=1.5909523830878005, phi=0.5244700889486827).on(
cirq.GridQubit(3, 7), cirq.GridQubit(4, 7)),
cirq.FSimGate(theta=1.2635580943707443, phi=0.3315124918059815).on(
cirq.GridQubit(5, 1), cirq.GridQubit(6, 1)),
cirq.FSimGate(theta=1.5245711693927642, phi=0.4838906581970925).on(
cirq.GridQubit(5, 2), cirq.GridQubit(6, 2)),
cirq.FSimGate(theta=1.5542388360689805, phi=0.5186534637665338).on(
cirq.GridQubit(5, 3), cirq.GridQubit(6, 3)),
cirq.FSimGate(theta=1.5109427139358562, phi=0.4939388316289224).on(
cirq.GridQubit(5, 4), cirq.GridQubit(6, 4)),
cirq.FSimGate(theta=1.57896484905089, phi=0.5081656554152614).on(
cirq.GridQubit(5, 5), cirq.GridQubit(6, 5)),
cirq.FSimGate(theta=1.501781688539034, phi=0.46799927805932284).on(
cirq.GridQubit(7, 3), cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 9.460207801277338).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * -9.420355292543523).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * 2.557874433792943).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -2.555377614271102).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 1.9789952328325573).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -1.9836753216703575).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -2.805807436079691).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * 2.7994769509863815).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -9.972491731044423).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * 10.00830198534568).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -12.477250219528523).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * 12.39388523545147).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -5.4898636407973544).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 5.398829530817813).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -5.863871460773714).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 5.8651966288445525).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -8.850693052252502).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 8.866721418688904).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -2.40381552479658).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 2.1113888789362614).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * -10.03456101076628).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 10.031153501461176).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -5.434421382024706).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 5.54721328439481).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -9.17988634353845).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 9.10527352614336).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -6.5670035038476025).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 6.61150195840915).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 7.956630846615096).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -7.998251816860028).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
(cirq.X**0.5).on(cirq.GridQubit(0, 5)),
(cirq.X**0.5).on(cirq.GridQubit(0, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 6)),
(cirq.X**0.5).on(cirq.GridQubit(2, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 1)),
(cirq.X**0.5).on(cirq.GridQubit(4, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 4)),
(cirq.X**0.5).on(cirq.GridQubit(4, 5)),
(cirq.X**0.5).on(cirq.GridQubit(4, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 5)),
(cirq.X**0.5).on(cirq.GridQubit(5, 6)),
(cirq.X**0.5).on(cirq.GridQubit(5, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 1)),
(cirq.X**0.5).on(cirq.GridQubit(6, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 6)),
(cirq.X**0.5).on(cirq.GridQubit(7, 2)),
(cirq.X**0.5).on(cirq.GridQubit(7, 3)),
(cirq.X**0.5).on(cirq.GridQubit(7, 4)),
(cirq.X**0.5).on(cirq.GridQubit(7, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -4.192816222527567).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * 4.096517331211689).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -13.031870303178678).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 12.995691840749963).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * 5.381139169744492).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * -5.426527436528915).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -6.86899750135751).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 6.428483778485565).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -8.1318841157307).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * 8.169013023671415).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * -0.7176740888019262).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 0.7099714261238419).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -4.694750980814187).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 4.664493259768636).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 3.5368943862129347).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -3.523381665113022).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -1.113423877718808).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 1.1694659583609144).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -3.587134633961795).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 3.6604070451845887).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 1.3734003009778666).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -1.2391730694242633).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * -5.2921262439699195).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 5.250682918720053).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -6.349327548997941).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 6.3741432050833).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -7.486795435376533).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * 7.578281937389579).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -3.5483605148308843).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * 3.464241645468862).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.545844435173598, phi=0.5163254336997252).on(
cirq.GridQubit(1, 4), cirq.GridQubit(1, 5)),
cirq.FSimGate(theta=1.5033136051987404, phi=0.5501439149572028).on(
cirq.GridQubit(1, 6), cirq.GridQubit(1, 7)),
cirq.FSimGate(theta=1.5930079664614663, phi=0.5355369376884288).on(
cirq.GridQubit(2, 4), cirq.GridQubit(2, 5)),
cirq.FSimGate(theta=1.59182423935832, phi=-5.773664463980115).on(
cirq.GridQubit(2, 6), cirq.GridQubit(2, 7)),
cirq.FSimGate(theta=1.5886126292316385, phi=0.4838919055156303).on(
cirq.GridQubit(3, 2), cirq.GridQubit(3, 3)),
cirq.FSimGate(theta=1.5286450573669954, phi=0.5113953905811602).on(
cirq.GridQubit(3, 6), cirq.GridQubit(3, 7)),
cirq.FSimGate(theta=1.565622495548066, phi=0.5127256481964074).on(
cirq.GridQubit(4, 2), cirq.GridQubit(4, 3)),
cirq.FSimGate(theta=1.5384796865621224, phi=0.5293381306162406).on(
cirq.GridQubit(4, 6), cirq.GridQubit(4, 7)),
cirq.FSimGate(theta=1.4727562833004122, phi=0.4552443293379814).on(
cirq.GridQubit(5, 2), cirq.GridQubit(5, 3)),
cirq.FSimGate(theta=1.5346175385256955, phi=0.5131039467233695).on(
cirq.GridQubit(5, 4), cirq.GridQubit(5, 5)),
cirq.FSimGate(theta=1.558221035096814, phi=0.4293113178636455).on(
cirq.GridQubit(5, 6), cirq.GridQubit(5, 7)),
cirq.FSimGate(theta=1.5169062231051558, phi=0.46319906116805815).on(
cirq.GridQubit(6, 2), cirq.GridQubit(6, 3)),
cirq.FSimGate(theta=1.5705414623224259, phi=0.4791699064049766).on(
cirq.GridQubit(6, 4), cirq.GridQubit(6, 5)),
cirq.FSimGate(theta=1.5516764540193888, phi=0.505545707839895).on(
cirq.GridQubit(7, 2), cirq.GridQubit(7, 3)),
cirq.FSimGate(theta=1.5699606675525557, phi=0.48292170263262457).on(
cirq.GridQubit(7, 4), cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 5.1050424650348445).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * -5.201341356350722).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 12.332310550447476).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -12.36848901287619).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * -4.910070555108823).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * 4.864682288324399).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 7.534356038389369).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -7.974869761261314).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 7.00341426989382).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * -6.966285361953106).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * 1.3199079899133674).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -1.3276106525914517).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 4.932337110122265).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -4.9625948311678165).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -5.325823706765988).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 5.3393364278658995).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 1.682829500938578).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -1.6267874202964716).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 3.305341949396799).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -3.232069538174005).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -1.5571453827247277).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 1.691372614278331).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * 5.3550627118441145).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -5.39650603709398).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 8.163182357684818).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -8.138366701599459).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 7.345311792027093).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * -7.253825290014047).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 4.260694186099879).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * -4.344813055461901).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
(cirq.Y**0.5).on(cirq.GridQubit(0, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(0, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 4)),
(cirq.X**0.5).on(cirq.GridQubit(1, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 4)),
(cirq.X**0.5).on(cirq.GridQubit(2, 5)),
(cirq.X**0.5).on(cirq.GridQubit(2, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 8)),
(cirq.X**0.5).on(cirq.GridQubit(3, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 5)),
(cirq.X**0.5).on(cirq.GridQubit(3, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 8)),
(cirq.X**0.5).on(cirq.GridQubit(4, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 2)),
(cirq.X**0.5).on(cirq.GridQubit(4, 3)),
(cirq.X**0.5).on(cirq.GridQubit(4, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 2)),
(cirq.X**0.5).on(cirq.GridQubit(5, 3)),
(cirq.X**0.5).on(cirq.GridQubit(5, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 4)),
(cirq.X**0.5).on(cirq.GridQubit(6, 5)),
(cirq.X**0.5).on(cirq.GridQubit(6, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 5)),
(cirq.X**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -5.865975624866123).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * 5.912958427369866).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * -17.867868884042345).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 17.87049728934488).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -17.622485552499665).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 17.602988862096296).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 11.206366769065067).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -11.499232369531354).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * -15.28470806725993).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 15.329888267898626).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 9.27528847613456).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -9.250287607594759).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * -14.50235236667596).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * 14.544090864144218).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 7.019954522972137).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -7.066266520580219).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -13.842047663366333).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 13.881335880513822).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -7.765941989655391).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 7.786825603456883).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 3.001137480344569).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -2.8980279413275123).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 5.509227495500649).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * -5.792084333301517).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 7.868086032823645).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -7.793090130850194).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -16.218582817568983).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 16.299782783273404).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * 4.3863074183418185).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -4.487034178043276).on(cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.5454967174552687, phi=0.5074540278986153).on(
cirq.GridQubit(0, 5), cirq.GridQubit(0, 6)),
cirq.FSimGate(theta=1.5233234922971755, phi=0.6681144400379464).on(
cirq.GridQubit(1, 5), cirq.GridQubit(1, 6)),
cirq.FSimGate(theta=1.5644541080112795, phi=0.5439498075085039).on(
cirq.GridQubit(2, 5), cirq.GridQubit(2, 6)),
cirq.FSimGate(theta=1.5866139110090092, phi=0.5693597810559818).on(
cirq.GridQubit(2, 7), cirq.GridQubit(2, 8)),
cirq.FSimGate(theta=1.541977006124425, phi=0.6073798124875975).on(
cirq.GridQubit(3, 5), cirq.GridQubit(3, 6)),
cirq.FSimGate(theta=1.5573072833358306, phi=0.5415514987622351).on(
cirq.GridQubit(3, 7), cirq.GridQubit(3, 8)),
cirq.FSimGate(theta=1.5345751514593928, phi=0.472462117170605).on(
cirq.GridQubit(4, 1), cirq.GridQubit(4, 2)),
cirq.FSimGate(theta=1.5138652502397498, phi=0.47710618607286504).on(
cirq.GridQubit(4, 3), cirq.GridQubit(4, 4)),
cirq.FSimGate(theta=1.5849169442855044, phi=0.54346233613361).on(
cirq.GridQubit(4, 5), cirq.GridQubit(4, 6)),
cirq.FSimGate(theta=1.4838884067961586, phi=0.5070681071136852).on(
cirq.GridQubit(5, 1), cirq.GridQubit(5, 2)),
cirq.FSimGate(theta=1.5398075246432927, phi=0.5174515645943538).on(
cirq.GridQubit(5, 3), cirq.GridQubit(5, 4)),
cirq.FSimGate(theta=1.4902099797510393, phi=0.4552057582549894).on(
cirq.GridQubit(6, 1), cirq.GridQubit(6, 2)),
cirq.FSimGate(theta=1.5376836849431186, phi=0.46265685930712236).on(
cirq.GridQubit(6, 3), cirq.GridQubit(6, 4)),
cirq.FSimGate(theta=1.555185434982808, phi=0.6056351386305033).on(
cirq.GridQubit(6, 5), cirq.GridQubit(6, 6)),
cirq.FSimGate(theta=1.4749003996237158, phi=0.4353609222411594).on(
cirq.GridQubit(7, 3), cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 5.477287511969221).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * -5.430304709465478).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * 18.225856052586064).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -18.223227647283533).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 17.655139057028).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -17.674635747431363).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -10.591776080470469).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 10.298910480004182).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * 15.852199817881967).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -15.80701961724327).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -9.090152260365358).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 9.11515312890516).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * 13.700045044703652).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * -13.658306547235396).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -7.538336273583833).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 7.492024275975751).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 13.968508849527241).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -13.929220632379753).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 6.861064422304347).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -6.840180808502855).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -3.771658529535837).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 3.874768068552894).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -3.800913502275713).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * 3.5180566644748446).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -7.9036364710757425).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 7.978632373049194).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 17.0915408373009).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -17.01034087159648).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * -4.825583222537869).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 4.724856462836412).on(cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
(cirq.X**0.5).on(cirq.GridQubit(0, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 5)),
(cirq.X**0.5).on(cirq.GridQubit(1, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 6)),
(cirq.X**0.5).on(cirq.GridQubit(2, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 2)),
(cirq.X**0.5).on(cirq.GridQubit(3, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 1)),
(cirq.X**0.5).on(cirq.GridQubit(4, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 5)),
(cirq.X**0.5).on(cirq.GridQubit(4, 6)),
(cirq.X**0.5).on(cirq.GridQubit(4, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 1)),
(cirq.X**0.5).on(cirq.GridQubit(5, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 4)),
(cirq.X**0.5).on(cirq.GridQubit(5, 5)),
(cirq.X**0.5).on(cirq.GridQubit(5, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 7)),
(cirq.X**0.5).on(cirq.GridQubit(6, 1)),
(cirq.X**0.5).on(cirq.GridQubit(6, 2)),
(cirq.X**0.5).on(cirq.GridQubit(6, 3)),
(cirq.X**0.5).on(cirq.GridQubit(6, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 6)),
(cirq.X**0.5).on(cirq.GridQubit(7, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 8.044385426555426).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * -8.083380595470867).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -5.783380074002775).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * 5.546523606048641).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -15.816295096608934).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 15.811833422443211).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -13.3598687747566).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 13.249156584109453).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -4.127807240413703).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 4.082519238690215).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -5.421612643757122).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * 5.42765340313407).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * -21.179392694559272).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * 21.166389776352954).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -13.252932498710596).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 13.24022142293596).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -8.162692838556204).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 8.223006443218978).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -12.938755870544817).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 12.965256899048683).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -12.724144773112773).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 12.73446915351482).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 11.027652291347495).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -10.570577602838458).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 7.6694399461790255).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -7.7088364909653455).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * 17.082146626922658).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -17.06476602620025).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * 14.58087327851535).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -14.563378195920992).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 10.871739079510629).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -11.050778817008649).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * 14.00817849447214).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -14.087525609076494).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.4937034321050129, phi=0.5388459463555662).on(
cirq.GridQubit(0, 5), cirq.GridQubit(1, 5)),
cirq.FSimGate(theta=1.5015413274420961, phi=0.51076415920643).on(
cirq.GridQubit(0, 6), cirq.GridQubit(1, 6)),
cirq.FSimGate(theta=1.5588791081427968, phi=0.559649620487243).on(
cirq.GridQubit(2, 5), cirq.GridQubit(3, 5)),
cirq.FSimGate(theta=1.5907035825834708, phi=0.5678223287662552).on(
cirq.GridQubit(2, 6), cirq.GridQubit(3, 6)),
cirq.FSimGate(theta=1.5296321276792553, phi=0.537761951313038).on(
cirq.GridQubit(2, 7), cirq.GridQubit(3, 7)),
cirq.FSimGate(theta=1.619276265426104, phi=0.48310297196088736).on(
cirq.GridQubit(2, 8), cirq.GridQubit(3, 8)),
cirq.FSimGate(theta=1.6116663075637374, phi=0.5343172366969327).on(
cirq.GridQubit(4, 1), cirq.GridQubit(5, 1)),
cirq.FSimGate(theta=1.5306030283605572, phi=0.5257102080843467).on(
cirq.GridQubit(4, 2), cirq.GridQubit(5, 2)),
cirq.FSimGate(theta=1.589821065740506, phi=0.5045391214115686).on(
cirq.GridQubit(4, 3), cirq.GridQubit(5, 3)),
cirq.FSimGate(theta=1.5472406430590444, phi=0.5216932173558055).on(
cirq.GridQubit(4, 4), cirq.GridQubit(5, 4)),
cirq.FSimGate(theta=1.5124128267683938, phi=0.5133142626030278).on(
cirq.GridQubit(4, 5), cirq.GridQubit(5, 5)),
cirq.FSimGate(theta=1.5707871303628709, phi=0.5176678491729374).on(
cirq.GridQubit(4, 6), cirq.GridQubit(5, 6)),
cirq.FSimGate(theta=1.5337916352034444, phi=0.5123546847230711).on(
cirq.GridQubit(4, 7), cirq.GridQubit(5, 7)),
cirq.FSimGate(theta=1.596346344028619, phi=0.5104319949477776).on(
cirq.GridQubit(6, 2), cirq.GridQubit(7, 2)),
cirq.FSimGate(theta=1.53597466118183, phi=0.5584919013659856).on(
cirq.GridQubit(6, 3), cirq.GridQubit(7, 3)),
cirq.FSimGate(theta=1.385350861888917, phi=0.5757363921651084).on(
cirq.GridQubit(6, 4), cirq.GridQubit(7, 4)),
cirq.FSimGate(theta=1.614843449053755, phi=0.5542252229839564).on(
cirq.GridQubit(6, 5), cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -8.908246745659781).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * 8.869251576744341).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 6.283590676347165).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * -6.520447144301299).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 16.126615138480055).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -16.131076812645777).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 14.142506057270092).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -14.253218247917241).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 4.924729485113265).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -4.9700174868367535).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 6.6580043579049155).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * -6.651963598527967).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * 21.61248038813089).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * -21.625483306337212).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 12.581705877923879).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -12.594416953698515).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 7.826508725663096).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -7.7661951210003215).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 12.014531408750791).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -11.988030380246926).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 11.590471496440383).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -11.580147116038336).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -11.55701654221442).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 12.014091230723457).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -7.662724143723925).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 7.623327598937605).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * -15.693287261948884).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 15.710667862671292).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * -14.640627714067872).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 14.658122796662232).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -10.271185992592658).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 10.092146255094638).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * -14.265609363423946).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 14.186262248819594).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
(cirq.Y**0.5).on(cirq.GridQubit(0, 5)),
(cirq.X**0.5).on(cirq.GridQubit(0, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 5)),
(cirq.X**0.5).on(cirq.GridQubit(2, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 8)),
(cirq.X**0.5).on(cirq.GridQubit(3, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 3)),
(cirq.X**0.5).on(cirq.GridQubit(3, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 6)),
(cirq.X**0.5).on(cirq.GridQubit(3, 7)),
(cirq.X**0.5).on(cirq.GridQubit(3, 8)),
(cirq.X**0.5).on(cirq.GridQubit(4, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 2)),
(cirq.X**0.5).on(cirq.GridQubit(4, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 7)),
(cirq.X**0.5).on(cirq.GridQubit(5, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 3)),
(cirq.X**0.5).on(cirq.GridQubit(5, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 4)),
(cirq.X**0.5).on(cirq.GridQubit(6, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 2)),
(cirq.X**0.5).on(cirq.GridQubit(7, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -19.484134780175637).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * 19.523987288909453).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * -4.706584587366488).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 4.709081406888329).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -4.644078115073251).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 4.639398026235451).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 4.902125678549236).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * -4.908456163642546).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 21.92168532545182).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * -21.88587507115056).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 26.023597923836856).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * -26.106962907913907).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 8.370562501914259).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -8.461596611893802).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 10.100639843256841).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -10.099314675186001).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 18.263937308298605).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -18.247908941862203).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 4.303481743922509).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -4.595908389782827).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * 20.40623181672889).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -20.409639326033993).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 13.138499004273484).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -13.02570710190338).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 19.994449091768548).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -20.069061909163636).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 13.831104618355031).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -13.786606163793484).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -15.932071921009928).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 16.237358555270973).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * -15.051306761471112).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 15.009685791226179).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.5423469235530667, phi=0.5388088498512879).on(
cirq.GridQubit(1, 4), cirq.GridQubit(2, 4)),
cirq.FSimGate(theta=1.5684106752459124, phi=0.5414007317481024).on(
cirq.GridQubit(1, 5), cirq.GridQubit(2, 5)),
cirq.FSimGate(theta=1.6152322695478165, phi=0.5160697976136035).on(
cirq.GridQubit(1, 6), cirq.GridQubit(2, 6)),
cirq.FSimGate(theta=1.5040835324508275, phi=0.6761565725975858).on(
cirq.GridQubit(1, 7), cirq.GridQubit(2, 7)),
cirq.FSimGate(theta=1.5144175462386844, phi=0.4680444728781228).on(
cirq.GridQubit(3, 2), cirq.GridQubit(4, 2)),
cirq.FSimGate(theta=1.4668587973263782, phi=0.4976074601121169).on(
cirq.GridQubit(3, 3), cirq.GridQubit(4, 3)),
cirq.FSimGate(theta=1.603651215218248, phi=0.46649538437100246).on(
cirq.GridQubit(3, 5), cirq.GridQubit(4, 5)),
cirq.FSimGate(theta=1.6160334279232749, phi=0.4353897326147861).on(
cirq.GridQubit(3, 6), cirq.GridQubit(4, 6)),
cirq.FSimGate(theta=1.5909523830878005, phi=0.5244700889486827).on(
cirq.GridQubit(3, 7), cirq.GridQubit(4, 7)),
cirq.FSimGate(theta=1.2635580943707443, phi=0.3315124918059815).on(
cirq.GridQubit(5, 1), cirq.GridQubit(6, 1)),
cirq.FSimGate(theta=1.5245711693927642, phi=0.4838906581970925).on(
cirq.GridQubit(5, 2), cirq.GridQubit(6, 2)),
cirq.FSimGate(theta=1.5542388360689805, phi=0.5186534637665338).on(
cirq.GridQubit(5, 3), cirq.GridQubit(6, 3)),
cirq.FSimGate(theta=1.5109427139358562, phi=0.4939388316289224).on(
cirq.GridQubit(5, 4), cirq.GridQubit(6, 4)),
cirq.FSimGate(theta=1.57896484905089, phi=0.5081656554152614).on(
cirq.GridQubit(5, 5), cirq.GridQubit(6, 5)),
cirq.FSimGate(theta=1.5287198766338426, phi=0.5026095497404074).on(
cirq.GridQubit(5, 6), cirq.GridQubit(6, 6)),
cirq.FSimGate(theta=1.501781688539034, phi=0.46799927805932284).on(
cirq.GridQubit(7, 3), cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 19.672207801277334).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * -19.632355292543515).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * 4.777874433792896).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -4.775377614271054).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 4.198995232832642).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -4.203675321670441).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -5.321807436079611).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * 5.315476950986302).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -21.072491731044448).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * 21.1083019853457).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -25.79725021952863).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * 25.713885235451578).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -10.07786364079744).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 9.986829530817898).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -11.191871460773655).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 11.193196628844492).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -18.61869305225248).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 18.63472141868888).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -5.067815524796681).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 4.775388878936363).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * -20.83856101076621).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 20.835153501461107).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -12.242421382024746).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 12.35521328439485).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -19.32248305368911).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 19.24787023629402).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -13.967003503847575).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 14.01150195840912).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 15.49043184094976).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -15.185145206688718).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * 16.244630846615102).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -16.286251816860037).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
(cirq.X**0.5).on(cirq.GridQubit(0, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 5)),
(cirq.X**0.5).on(cirq.GridQubit(1, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 4)),
(cirq.X**0.5).on(cirq.GridQubit(2, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 6)),
(cirq.X**0.5).on(cirq.GridQubit(2, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 8)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 1)),
(cirq.X**0.5).on(cirq.GridQubit(4, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 4)),
(cirq.X**0.5).on(cirq.GridQubit(5, 5)),
(cirq.X**0.5).on(cirq.GridQubit(5, 6)),
(cirq.X**0.5).on(cirq.GridQubit(5, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 1)),
(cirq.X**0.5).on(cirq.GridQubit(6, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 5)),
(cirq.X**0.5).on(cirq.GridQubit(6, 6)),
(cirq.X**0.5).on(cirq.GridQubit(7, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 4)),
(cirq.X**0.5).on(cirq.GridQubit(7, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -8.040816222527539).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * 7.944517331211661).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -23.983870303178655).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 23.947691840749943).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * 9.52513916974456).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * -9.570527436528984).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -13.084997501357485).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 12.644483778485537).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -14.199884115730756).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * 14.23701302367147).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * -1.4576740888019104).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 1.4499714261238263).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -8.542750980814159).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 8.512493259768608).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -8.401251133882973).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 8.52245467467511).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 7.236894386212986).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -7.223381665113074).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -2.0014238777188416).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 2.057465958360948).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -6.843134633961698).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 6.916407045184491).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 3.0014003009778842).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -2.8671730694242803).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * -10.176126243969842).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 10.134682918719976).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -12.347924259148533).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 12.372739915233888).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -13.554795435376587).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * 13.646281937389634).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -7.248360514830936).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * 7.1642416454689135).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.545844435173598, phi=0.5163254336997252).on(
cirq.GridQubit(1, 4), cirq.GridQubit(1, 5)),
cirq.FSimGate(theta=1.5033136051987404, phi=0.5501439149572028).on(
cirq.GridQubit(1, 6), cirq.GridQubit(1, 7)),
cirq.FSimGate(theta=1.5930079664614663, phi=0.5355369376884288).on(
cirq.GridQubit(2, 4), cirq.GridQubit(2, 5)),
cirq.FSimGate(theta=1.59182423935832, phi=-5.773664463980115).on(
cirq.GridQubit(2, 6), cirq.GridQubit(2, 7)),
cirq.FSimGate(theta=1.5886126292316385, phi=0.4838919055156303).on(
cirq.GridQubit(3, 2), cirq.GridQubit(3, 3)),
cirq.FSimGate(theta=1.5286450573669954, phi=0.5113953905811602).on(
cirq.GridQubit(3, 6), cirq.GridQubit(3, 7)),
cirq.FSimGate(theta=1.565622495548066, phi=0.5127256481964074).on(
cirq.GridQubit(4, 2), cirq.GridQubit(4, 3)),
cirq.FSimGate(theta=1.5289739216684795, phi=0.5055240639761313).on(
cirq.GridQubit(4, 4), cirq.GridQubit(4, 5)),
cirq.FSimGate(theta=1.5384796865621224, phi=0.5293381306162406).on(
cirq.GridQubit(4, 6), cirq.GridQubit(4, 7)),
cirq.FSimGate(theta=1.4727562833004122, phi=0.4552443293379814).on(
cirq.GridQubit(5, 2), cirq.GridQubit(5, 3)),
cirq.FSimGate(theta=1.5346175385256955, phi=0.5131039467233695).on(
cirq.GridQubit(5, 4), cirq.GridQubit(5, 5)),
cirq.FSimGate(theta=1.558221035096814, phi=0.4293113178636455).on(
cirq.GridQubit(5, 6), cirq.GridQubit(5, 7)),
cirq.FSimGate(theta=1.5169062231051558, phi=0.46319906116805815).on(
cirq.GridQubit(6, 2), cirq.GridQubit(6, 3)),
cirq.FSimGate(theta=1.5705414623224259, phi=0.4791699064049766).on(
cirq.GridQubit(6, 4), cirq.GridQubit(6, 5)),
cirq.FSimGate(theta=1.5516764540193888, phi=0.505545707839895).on(
cirq.GridQubit(7, 2), cirq.GridQubit(7, 3)),
cirq.FSimGate(theta=1.5699606675525557, phi=0.48292170263262457).on(
cirq.GridQubit(7, 4), cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 8.953042465034816).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * -9.049341356350693).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 23.28431055044745).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -23.320489012876163).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * -9.054070555108892).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * 9.008682288324469).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 13.750356038389338).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -14.190869761261286).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 13.071414269893877).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * -13.034285361953161).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * 2.0599079899133517).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -2.067610652591436).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 8.780337110122234).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -8.810594831167785).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 8.199075778124648).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -8.07787223733251).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -9.025823706766039).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 9.039336427865951).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 2.570829500938612).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -2.5147874202965053).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 6.561341949396702).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -6.48806953817391).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -3.1851453827247447).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 3.3193726142783486).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * 10.239062711844038).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -10.280506037093904).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 14.161779067835406).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -14.136963411750049).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 13.413311792027148).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * -13.3218252900141).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 7.960694186099931).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * -8.044813055461953).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
(cirq.Y**0.5).on(cirq.GridQubit(0, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(0, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 8)),
(cirq.X**0.5).on(cirq.GridQubit(3, 2)),
(cirq.X**0.5).on(cirq.GridQubit(3, 3)),
(cirq.X**0.5).on(cirq.GridQubit(3, 5)),
(cirq.X**0.5).on(cirq.GridQubit(3, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 8)),
(cirq.X**0.5).on(cirq.GridQubit(4, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 2)),
(cirq.X**0.5).on(cirq.GridQubit(4, 3)),
(cirq.X**0.5).on(cirq.GridQubit(4, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 5)),
(cirq.X**0.5).on(cirq.GridQubit(4, 6)),
(cirq.X**0.5).on(cirq.GridQubit(4, 7)),
(cirq.X**0.5).on(cirq.GridQubit(5, 1)),
(cirq.X**0.5).on(cirq.GridQubit(5, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 7)),
(cirq.X**0.5).on(cirq.GridQubit(6, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 2)),
(cirq.X**0.5).on(cirq.GridQubit(6, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 4)),
(cirq.X**0.5).on(cirq.GridQubit(6, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 2)),
(cirq.X**0.5).on(cirq.GridQubit(7, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -9.713975624866094).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * 9.760958427369838).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * -30.29986888404229).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 30.302497289344824).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -30.054485552499738).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 30.034988862096366).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 19.050366769065057).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -19.343232369531343).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * -26.08870806725985).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 26.13388826789855).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 15.787288476134503).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -15.762287607594697).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * -24.12235236667589).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * 24.164090864144143).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 11.90395452297206).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -11.950266520580142).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -23.906047663366408).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 23.945335880513902).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -12.64994198965531).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 12.670825603456805).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 5.221137480344522).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -5.118027941327464).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 9.263573798570924).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -9.55041239213535).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 8.765227495500554).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * -9.048084333301423).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 13.422682742974219).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -13.34768684100077).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -28.058582817569).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 28.139782783273418).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * 7.346307418341885).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -7.447034178043343).on(cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.5454967174552687, phi=0.5074540278986153).on(
cirq.GridQubit(0, 5), cirq.GridQubit(0, 6)),
cirq.FSimGate(theta=1.5233234922971755, phi=0.6681144400379464).on(
cirq.GridQubit(1, 5), cirq.GridQubit(1, 6)),
cirq.FSimGate(theta=1.5644541080112795, phi=0.5439498075085039).on(
cirq.GridQubit(2, 5), cirq.GridQubit(2, 6)),
cirq.FSimGate(theta=1.5866139110090092, phi=0.5693597810559818).on(
cirq.GridQubit(2, 7), cirq.GridQubit(2, 8)),
cirq.FSimGate(theta=1.541977006124425, phi=0.6073798124875975).on(
cirq.GridQubit(3, 5), cirq.GridQubit(3, 6)),
cirq.FSimGate(theta=1.5573072833358306, phi=0.5415514987622351).on(
cirq.GridQubit(3, 7), cirq.GridQubit(3, 8)),
cirq.FSimGate(theta=1.5345751514593928, phi=0.472462117170605).on(
cirq.GridQubit(4, 1), cirq.GridQubit(4, 2)),
cirq.FSimGate(theta=1.5138652502397498, phi=0.47710618607286504).on(
cirq.GridQubit(4, 3), cirq.GridQubit(4, 4)),
cirq.FSimGate(theta=1.5849169442855044, phi=0.54346233613361).on(
cirq.GridQubit(4, 5), cirq.GridQubit(4, 6)),
cirq.FSimGate(theta=1.4838884067961586, phi=0.5070681071136852).on(
cirq.GridQubit(5, 1), cirq.GridQubit(5, 2)),
cirq.FSimGate(theta=1.5398075246432927, phi=0.5174515645943538).on(
cirq.GridQubit(5, 3), cirq.GridQubit(5, 4)),
cirq.FSimGate(theta=1.4593314109380113, phi=0.5230636172671492).on(
cirq.GridQubit(5, 5), cirq.GridQubit(5, 6)),
cirq.FSimGate(theta=1.4902099797510393, phi=0.4552057582549894).on(
cirq.GridQubit(6, 1), cirq.GridQubit(6, 2)),
cirq.FSimGate(theta=1.5376836849431186, phi=0.46265685930712236).on(
cirq.GridQubit(6, 3), cirq.GridQubit(6, 4)),
cirq.FSimGate(theta=1.555185434982808, phi=0.6056351386305033).on(
cirq.GridQubit(6, 5), cirq.GridQubit(6, 6)),
cirq.FSimGate(theta=1.4749003996237158, phi=0.4353609222411594).on(
cirq.GridQubit(7, 3), cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 9.325287511969192).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * -9.278304709465448).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * 30.657856052586013).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -30.65522764728348).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 30.087139057028068).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -30.106635747431437).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -18.435776080470458).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 18.142910480004172).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * 26.656199817881895).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -26.611019617243198).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -15.602152260365296).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 15.627153128905102).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * 23.32004504470358).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * -23.27830654723533).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -12.422336273583753).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 12.376024275975672).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 24.032508849527318).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -23.993220632379824).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 11.745064422304269).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -11.724180808502775).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -5.991658529535789).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 6.094768068552847).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -9.293307215154037).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 9.006468621589612).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -7.056913502275617).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * 6.774056664474749).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -13.45823318122632).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 13.53322908319977).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 28.931540837300908).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -28.850340871596494).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * -7.785583222537938).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 7.68485646283648).on(cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
(cirq.X**0.5).on(cirq.GridQubit(0, 5)),
(cirq.X**0.5).on(cirq.GridQubit(0, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 5)),
(cirq.X**0.5).on(cirq.GridQubit(1, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 5)),
(cirq.X**0.5).on(cirq.GridQubit(2, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 8)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 6)),
(cirq.X**0.5).on(cirq.GridQubit(3, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 1)),
(cirq.X**0.5).on(cirq.GridQubit(4, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 4)),
(cirq.X**0.5).on(cirq.GridQubit(4, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 3)),
(cirq.X**0.5).on(cirq.GridQubit(5, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 5)),
(cirq.X**0.5).on(cirq.GridQubit(5, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 1)),
(cirq.X**0.5).on(cirq.GridQubit(6, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 3)),
(cirq.X**0.5).on(cirq.GridQubit(6, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 5)),
(cirq.X**0.5).on(cirq.GridQubit(6, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 5)),
(cirq.X**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 13.22438542655545).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * -13.26338059547089).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -9.187380074002728).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * 8.950523606048595).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -25.436295096608994).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 25.43183342244327).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -21.351868774756507).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 21.24115658410936).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -6.643807240413623).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 6.598519238690134).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -9.269612643757092).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * 9.27565340313404).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * -33.75939269455927).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * 33.74638977635295).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -21.096932498710586).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 21.084221422935954).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -13.046692838556257).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 13.107006443219033).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -20.486755870544844).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 20.51325689904871).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -19.82814477311278).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 19.838469153514826).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 17.687652291347487).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -17.230577602838448).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 12.257439946178984).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -12.296836490965301).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * 27.146146626922736).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -27.128766026200324).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * 23.46087327851529).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -23.443378195920936).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 17.157142369360066).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -17.33618210685809).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * 22.592178494472112).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -22.671525609076465).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.4937034321050129, phi=0.5388459463555662).on(
cirq.GridQubit(0, 5), cirq.GridQubit(1, 5)),
cirq.FSimGate(theta=1.5015413274420961, phi=0.51076415920643).on(
cirq.GridQubit(0, 6), cirq.GridQubit(1, 6)),
cirq.FSimGate(theta=1.5588791081427968, phi=0.559649620487243).on(
cirq.GridQubit(2, 5), cirq.GridQubit(3, 5)),
cirq.FSimGate(theta=1.5907035825834708, phi=0.5678223287662552).on(
cirq.GridQubit(2, 6), cirq.GridQubit(3, 6)),
cirq.FSimGate(theta=1.5296321276792553, phi=0.537761951313038).on(
cirq.GridQubit(2, 7), cirq.GridQubit(3, 7)),
cirq.FSimGate(theta=1.619276265426104, phi=0.48310297196088736).on(
cirq.GridQubit(2, 8), cirq.GridQubit(3, 8)),
cirq.FSimGate(theta=1.6116663075637374, phi=0.5343172366969327).on(
cirq.GridQubit(4, 1), cirq.GridQubit(5, 1)),
cirq.FSimGate(theta=1.5306030283605572, phi=0.5257102080843467).on(
cirq.GridQubit(4, 2), cirq.GridQubit(5, 2)),
cirq.FSimGate(theta=1.589821065740506, phi=0.5045391214115686).on(
cirq.GridQubit(4, 3), cirq.GridQubit(5, 3)),
cirq.FSimGate(theta=1.5472406430590444, phi=0.5216932173558055).on(
cirq.GridQubit(4, 4), cirq.GridQubit(5, 4)),
cirq.FSimGate(theta=1.5124128267683938, phi=0.5133142626030278).on(
cirq.GridQubit(4, 5), cirq.GridQubit(5, 5)),
cirq.FSimGate(theta=1.5707871303628709, phi=0.5176678491729374).on(
cirq.GridQubit(4, 6), cirq.GridQubit(5, 6)),
cirq.FSimGate(theta=1.5337916352034444, phi=0.5123546847230711).on(
cirq.GridQubit(4, 7), cirq.GridQubit(5, 7)),
cirq.FSimGate(theta=1.596346344028619, phi=0.5104319949477776).on(
cirq.GridQubit(6, 2), cirq.GridQubit(7, 2)),
cirq.FSimGate(theta=1.53597466118183, phi=0.5584919013659856).on(
cirq.GridQubit(6, 3), cirq.GridQubit(7, 3)),
cirq.FSimGate(theta=1.385350861888917, phi=0.5757363921651084).on(
cirq.GridQubit(6, 4), cirq.GridQubit(7, 4)),
cirq.FSimGate(theta=1.614843449053755, phi=0.5542252229839564).on(
cirq.GridQubit(6, 5), cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -14.088246745659802).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * 14.049251576744364).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 9.687590676347119).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * -9.924447144301253).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 25.746615138480117).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -25.75107681264584).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 22.13450605727).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -22.245218247917148).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 7.440729485113184).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -7.486017486836674).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 10.506004357904885).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * -10.499963598527936).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * 34.19248038813088).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * -34.20548330633721).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 20.425705877923868).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -20.4384169536985).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 12.71050872566315).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -12.650195121000372).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 19.562531408750814).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -19.53603038024695).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 18.69447149644039).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -18.684147116038343).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -18.21701654221441).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 18.674091230723448).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -12.250724143723879).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 12.21132759893756).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * -25.757287261948953).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 25.774667862671368).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * -23.52062771406781).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 23.538122796662165).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -16.556589282442097).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 16.377549544944078).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * -22.849609363423916).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 22.770262248819563).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 7)),
(cirq.X**0.5).on(cirq.GridQubit(3, 8)),
(cirq.X**0.5).on(cirq.GridQubit(4, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 3)),
(cirq.X**0.5).on(cirq.GridQubit(4, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 5)),
(cirq.X**0.5).on(cirq.GridQubit(4, 6)),
(cirq.X**0.5).on(cirq.GridQubit(4, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 2)),
(cirq.X**0.5).on(cirq.GridQubit(5, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 7)),
(cirq.X**0.5).on(cirq.GridQubit(6, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 2)),
(cirq.X**0.5).on(cirq.GridQubit(6, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 4)),
(cirq.X**0.5).on(cirq.GridQubit(6, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 3)),
(cirq.X**0.5).on(cirq.GridQubit(7, 4)),
(cirq.X**0.5).on(cirq.GridQubit(7, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -29.696134780175626).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * 29.735987288909445).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * -6.926584587366442).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 6.929081406888282).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -6.864078115073335).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 6.859398026235534).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 7.418125678549155).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * -7.424456163642465).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 33.02168532545184).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * -32.98587507115059).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 39.34359792383697).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * -39.42696290791402).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 12.958562501914345).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -13.049596611893888).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 15.428639843256777).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -15.42731467518594).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 28.031937308298577).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -28.01590894186218).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 6.967481743922609).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -7.259908389782927).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * 31.210231816728815).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -31.213639326033913).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 19.946499004273523).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -19.833707101903418).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 30.137045801919207).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -30.211658619314296).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 21.231104618355).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -21.186606163793456).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -24.07207192100989).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 24.377358555270934).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * -23.339306761471114).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 23.297685791226186).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.5423469235530667, phi=0.5388088498512879).on(
cirq.GridQubit(1, 4), cirq.GridQubit(2, 4)),
cirq.FSimGate(theta=1.5684106752459124, phi=0.5414007317481024).on(
cirq.GridQubit(1, 5), cirq.GridQubit(2, 5)),
cirq.FSimGate(theta=1.6152322695478165, phi=0.5160697976136035).on(
cirq.GridQubit(1, 6), cirq.GridQubit(2, 6)),
cirq.FSimGate(theta=1.5040835324508275, phi=0.6761565725975858).on(
cirq.GridQubit(1, 7), cirq.GridQubit(2, 7)),
cirq.FSimGate(theta=1.5144175462386844, phi=0.4680444728781228).on(
cirq.GridQubit(3, 2), cirq.GridQubit(4, 2)),
cirq.FSimGate(theta=1.4668587973263782, phi=0.4976074601121169).on(
cirq.GridQubit(3, 3), cirq.GridQubit(4, 3)),
cirq.FSimGate(theta=1.603651215218248, phi=0.46649538437100246).on(
cirq.GridQubit(3, 5), cirq.GridQubit(4, 5)),
cirq.FSimGate(theta=1.6160334279232749, phi=0.4353897326147861).on(
cirq.GridQubit(3, 6), cirq.GridQubit(4, 6)),
cirq.FSimGate(theta=1.5909523830878005, phi=0.5244700889486827).on(
cirq.GridQubit(3, 7), cirq.GridQubit(4, 7)),
cirq.FSimGate(theta=1.2635580943707443, phi=0.3315124918059815).on(
cirq.GridQubit(5, 1), cirq.GridQubit(6, 1)),
cirq.FSimGate(theta=1.5245711693927642, phi=0.4838906581970925).on(
cirq.GridQubit(5, 2), cirq.GridQubit(6, 2)),
cirq.FSimGate(theta=1.5542388360689805, phi=0.5186534637665338).on(
cirq.GridQubit(5, 3), cirq.GridQubit(6, 3)),
cirq.FSimGate(theta=1.5109427139358562, phi=0.4939388316289224).on(
cirq.GridQubit(5, 4), cirq.GridQubit(6, 4)),
cirq.FSimGate(theta=1.57896484905089, phi=0.5081656554152614).on(
cirq.GridQubit(5, 5), cirq.GridQubit(6, 5)),
cirq.FSimGate(theta=1.5287198766338426, phi=0.5026095497404074).on(
cirq.GridQubit(5, 6), cirq.GridQubit(6, 6)),
cirq.FSimGate(theta=1.501781688539034, phi=0.46799927805932284).on(
cirq.GridQubit(7, 3), cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 29.884207801277327).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * -29.844355292543508).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * 6.997874433792849).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -6.995377614271008).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 6.418995232832726).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -6.423675321670527).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -7.8378074360795305).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * 7.831476950986221).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -32.172491731044474).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * 32.20830198534573).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -39.11725021952874).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * 39.03388523545169).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -14.665863640797525).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 14.574829530817984).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -16.519871460773594).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 16.52119662884443).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -28.386693052252454).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 28.402721418688852).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -7.731815524796781).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 7.439388878936463).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * -31.64256101076613).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 31.63915350146103).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -19.050421382024783).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 19.16321328439489).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -29.465079763839764).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 29.390466946444676).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -21.367003503847553).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 21.411501958409097).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 23.630431840949722).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -23.32514520668868).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * 24.532630846615117).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -24.574251816860045).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
(cirq.Y**0.5).on(cirq.GridQubit(0, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(0, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 8)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 2)),
(cirq.X**0.5).on(cirq.GridQubit(3, 3)),
(cirq.X**0.5).on(cirq.GridQubit(3, 5)),
(cirq.X**0.5).on(cirq.GridQubit(3, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 7)),
(cirq.X**0.5).on(cirq.GridQubit(5, 1)),
(cirq.X**0.5).on(cirq.GridQubit(5, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 3)),
(cirq.X**0.5).on(cirq.GridQubit(5, 4)),
(cirq.X**0.5).on(cirq.GridQubit(5, 5)),
(cirq.X**0.5).on(cirq.GridQubit(5, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 1)),
(cirq.X**0.5).on(cirq.GridQubit(6, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 3)),
(cirq.X**0.5).on(cirq.GridQubit(6, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 5)),
(cirq.X**0.5).on(cirq.GridQubit(6, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -11.88881622252751).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * 11.792517331211629).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -34.93587030317863).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 34.899691840749924).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * 13.66913916974463).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * -13.714527436529053).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -19.300997501357458).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 18.86048377848551).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -20.26788411573081).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * 20.30501302367152).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * -2.1976740888018944).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 2.1899714261238103).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -12.39075098081413).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 12.360493259768578).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -12.10125113388289).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 12.22245467467503).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 10.936894386213037).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -10.923381665113125).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * -2.8894238777188748).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 2.945465958360982).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -10.099134633961603).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 10.172407045184396).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 4.629400300977903).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -4.495173069424299).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * -15.060126243969762).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 15.018682918719897).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -18.34652096929912).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 18.371336625384476).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -19.622795435376638).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * 19.714281937389686).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -10.948360514830984).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * 10.864241645468965).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.545844435173598, phi=0.5163254336997252).on(
cirq.GridQubit(1, 4), cirq.GridQubit(1, 5)),
cirq.FSimGate(theta=1.5033136051987404, phi=0.5501439149572028).on(
cirq.GridQubit(1, 6), cirq.GridQubit(1, 7)),
cirq.FSimGate(theta=1.5930079664614663, phi=0.5355369376884288).on(
cirq.GridQubit(2, 4), cirq.GridQubit(2, 5)),
cirq.FSimGate(theta=1.59182423935832, phi=-5.773664463980115).on(
cirq.GridQubit(2, 6), cirq.GridQubit(2, 7)),
cirq.FSimGate(theta=1.5886126292316385, phi=0.4838919055156303).on(
cirq.GridQubit(3, 2), cirq.GridQubit(3, 3)),
cirq.FSimGate(theta=1.5286450573669954, phi=0.5113953905811602).on(
cirq.GridQubit(3, 6), cirq.GridQubit(3, 7)),
cirq.FSimGate(theta=1.565622495548066, phi=0.5127256481964074).on(
cirq.GridQubit(4, 2), cirq.GridQubit(4, 3)),
cirq.FSimGate(theta=1.5289739216684795, phi=0.5055240639761313).on(
cirq.GridQubit(4, 4), cirq.GridQubit(4, 5)),
cirq.FSimGate(theta=1.5384796865621224, phi=0.5293381306162406).on(
cirq.GridQubit(4, 6), cirq.GridQubit(4, 7)),
cirq.FSimGate(theta=1.4727562833004122, phi=0.4552443293379814).on(
cirq.GridQubit(5, 2), cirq.GridQubit(5, 3)),
cirq.FSimGate(theta=1.5346175385256955, phi=0.5131039467233695).on(
cirq.GridQubit(5, 4), cirq.GridQubit(5, 5)),
cirq.FSimGate(theta=1.558221035096814, phi=0.4293113178636455).on(
cirq.GridQubit(5, 6), cirq.GridQubit(5, 7)),
cirq.FSimGate(theta=1.5169062231051558, phi=0.46319906116805815).on(
cirq.GridQubit(6, 2), cirq.GridQubit(6, 3)),
cirq.FSimGate(theta=1.5705414623224259, phi=0.4791699064049766).on(
cirq.GridQubit(6, 4), cirq.GridQubit(6, 5)),
cirq.FSimGate(theta=1.5516764540193888, phi=0.505545707839895).on(
cirq.GridQubit(7, 2), cirq.GridQubit(7, 3)),
cirq.FSimGate(theta=1.5699606675525557, phi=0.48292170263262457).on(
cirq.GridQubit(7, 4), cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 12.801042465034786).on(cirq.GridQubit(1, 4)),
cirq.rz(np.pi * -12.897341356350665).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 34.236310550447435).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -34.27248901287614).on(cirq.GridQubit(1, 7)),
cirq.rz(np.pi * -13.19807055510896).on(cirq.GridQubit(2, 4)),
cirq.rz(np.pi * 13.152682288324536).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 19.96635603838931).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -20.40686976126126).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 19.13941426989393).on(cirq.GridQubit(3, 2)),
cirq.rz(np.pi * -19.102285361953214).on(cirq.GridQubit(3, 3)),
cirq.rz(np.pi * 2.7999079899133363).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -2.80761065259142).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 12.628337110122207).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -12.658594831167758).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 11.899075778124569).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -11.777872237332431).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -12.725823706766091).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 12.739336427866004).on(cirq.GridQubit(4, 7)),
cirq.rz(np.pi * 3.458829500938646).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -3.4027874202965385).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 9.817341949396608).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -9.744069538173814).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -4.8131453827247626).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 4.9473726142783665).on(cirq.GridQubit(5, 7)),
cirq.rz(np.pi * 15.12306271184396).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -15.164506037093826).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 20.160375777985994).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -20.13556012190064).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 19.481311792027203).on(cirq.GridQubit(7, 2)),
cirq.rz(np.pi * -19.389825290014155).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 11.660694186099983).on(cirq.GridQubit(7, 4)),
cirq.rz(np.pi * -11.744813055462004).on(cirq.GridQubit(7, 5)),
]),
cirq.Moment(operations=[
(cirq.X**0.5).on(cirq.GridQubit(0, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 6)),
(cirq.X**0.5).on(cirq.GridQubit(1, 4)),
(cirq.X**0.5).on(cirq.GridQubit(1, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(1, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 4)),
(cirq.X**0.5).on(cirq.GridQubit(2, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 7)),
(cirq.X**0.5).on(cirq.GridQubit(2, 8)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 6)),
(cirq.X**0.5).on(cirq.GridQubit(3, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 8)),
(cirq.X**0.5).on(cirq.GridQubit(4, 1)),
(cirq.X**0.5).on(cirq.GridQubit(4, 2)),
(cirq.X**0.5).on(cirq.GridQubit(4, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 4)),
(cirq.X**0.5).on(cirq.GridQubit(4, 5)),
(cirq.X**0.5).on(cirq.GridQubit(4, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 2)),
(cirq.X**0.5).on(cirq.GridQubit(5, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(5, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 1)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 2)),
(cirq.X**0.5).on(cirq.GridQubit(6, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 2)),
(cirq.X**0.5).on(cirq.GridQubit(7, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(7, 4)),
(cirq.X**0.5).on(cirq.GridQubit(7, 5)),
(cirq.X**0.5).on(cirq.GridQubit(8, 3)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * -13.561975624866065).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * 13.608958427369807).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * -42.731868884042235).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * 42.73449728934477).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * -42.48648555249982).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * 42.46698886209646).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * 26.894366769065044).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * -27.18723236953133).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * -36.89270806725978).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * 36.93788826789848).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * 22.299288476134443).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * -22.274287607594637).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * -33.74235236667582).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * 33.78409086414407).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * 16.787954522971983).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * -16.834266520580062).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * -33.970047663366486).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * 34.00933588051398).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * -17.533941989655233).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * 17.554825603456727).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * 7.441137480344476).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * -7.338027941327417).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * 12.963573798570843).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * -13.250412392135269).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * 12.021227495500458).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * -12.30408433330133).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * 18.97727945312479).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * -18.902283551151342).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * -39.89858281756901).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * 39.97978278327343).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * 10.306307418341955).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * -10.407034178043412).on(cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.FSimGate(theta=1.5454967174552687, phi=0.5074540278986153).on(
cirq.GridQubit(0, 5), cirq.GridQubit(0, 6)),
cirq.FSimGate(theta=1.5233234922971755, phi=0.6681144400379464).on(
cirq.GridQubit(1, 5), cirq.GridQubit(1, 6)),
cirq.FSimGate(theta=1.5644541080112795, phi=0.5439498075085039).on(
cirq.GridQubit(2, 5), cirq.GridQubit(2, 6)),
cirq.FSimGate(theta=1.5866139110090092, phi=0.5693597810559818).on(
cirq.GridQubit(2, 7), cirq.GridQubit(2, 8)),
cirq.FSimGate(theta=1.541977006124425, phi=0.6073798124875975).on(
cirq.GridQubit(3, 5), cirq.GridQubit(3, 6)),
cirq.FSimGate(theta=1.5573072833358306, phi=0.5415514987622351).on(
cirq.GridQubit(3, 7), cirq.GridQubit(3, 8)),
cirq.FSimGate(theta=1.5345751514593928, phi=0.472462117170605).on(
cirq.GridQubit(4, 1), cirq.GridQubit(4, 2)),
cirq.FSimGate(theta=1.5138652502397498, phi=0.47710618607286504).on(
cirq.GridQubit(4, 3), cirq.GridQubit(4, 4)),
cirq.FSimGate(theta=1.5849169442855044, phi=0.54346233613361).on(
cirq.GridQubit(4, 5), cirq.GridQubit(4, 6)),
cirq.FSimGate(theta=1.4838884067961586, phi=0.5070681071136852).on(
cirq.GridQubit(5, 1), cirq.GridQubit(5, 2)),
cirq.FSimGate(theta=1.5398075246432927, phi=0.5174515645943538).on(
cirq.GridQubit(5, 3), cirq.GridQubit(5, 4)),
cirq.FSimGate(theta=1.4593314109380113, phi=0.5230636172671492).on(
cirq.GridQubit(5, 5), cirq.GridQubit(5, 6)),
cirq.FSimGate(theta=1.4902099797510393, phi=0.4552057582549894).on(
cirq.GridQubit(6, 1), cirq.GridQubit(6, 2)),
cirq.FSimGate(theta=1.5376836849431186, phi=0.46265685930712236).on(
cirq.GridQubit(6, 3), cirq.GridQubit(6, 4)),
cirq.FSimGate(theta=1.555185434982808, phi=0.6056351386305033).on(
cirq.GridQubit(6, 5), cirq.GridQubit(6, 6)),
cirq.FSimGate(theta=1.4749003996237158, phi=0.4353609222411594).on(
cirq.GridQubit(7, 3), cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.rz(np.pi * 13.17328751196916).on(cirq.GridQubit(0, 5)),
cirq.rz(np.pi * -13.126304709465419).on(cirq.GridQubit(0, 6)),
cirq.rz(np.pi * 43.08985605258596).on(cirq.GridQubit(1, 5)),
cirq.rz(np.pi * -43.08722764728342).on(cirq.GridQubit(1, 6)),
cirq.rz(np.pi * 42.51913905702814).on(cirq.GridQubit(2, 5)),
cirq.rz(np.pi * -42.53863574743151).on(cirq.GridQubit(2, 6)),
cirq.rz(np.pi * -26.279776080470445).on(cirq.GridQubit(2, 7)),
cirq.rz(np.pi * 25.98691048000416).on(cirq.GridQubit(2, 8)),
cirq.rz(np.pi * 37.46019981788182).on(cirq.GridQubit(3, 5)),
cirq.rz(np.pi * -37.415019617243125).on(cirq.GridQubit(3, 6)),
cirq.rz(np.pi * -22.114152260365234).on(cirq.GridQubit(3, 7)),
cirq.rz(np.pi * 22.13915312890504).on(cirq.GridQubit(3, 8)),
cirq.rz(np.pi * 32.9400450447035).on(cirq.GridQubit(4, 1)),
cirq.rz(np.pi * -32.89830654723525).on(cirq.GridQubit(4, 2)),
cirq.rz(np.pi * -17.306336273583675).on(cirq.GridQubit(4, 3)),
cirq.rz(np.pi * 17.260024275975592).on(cirq.GridQubit(4, 4)),
cirq.rz(np.pi * 34.09650884952739).on(cirq.GridQubit(4, 5)),
cirq.rz(np.pi * -34.057220632379895).on(cirq.GridQubit(4, 6)),
cirq.rz(np.pi * 16.629064422304193).on(cirq.GridQubit(5, 1)),
cirq.rz(np.pi * -16.6081808085027).on(cirq.GridQubit(5, 2)),
cirq.rz(np.pi * -8.211658529535743).on(cirq.GridQubit(5, 3)),
cirq.rz(np.pi * 8.3147680685528).on(cirq.GridQubit(5, 4)),
cirq.rz(np.pi * -12.993307215153958).on(cirq.GridQubit(5, 5)),
cirq.rz(np.pi * 12.706468621589535).on(cirq.GridQubit(5, 6)),
cirq.rz(np.pi * -10.31291350227552).on(cirq.GridQubit(6, 1)),
cirq.rz(np.pi * 10.030056664474653).on(cirq.GridQubit(6, 2)),
cirq.rz(np.pi * -19.012829891376892).on(cirq.GridQubit(6, 3)),
cirq.rz(np.pi * 19.08782579335034).on(cirq.GridQubit(6, 4)),
cirq.rz(np.pi * 40.77154083730092).on(cirq.GridQubit(6, 5)),
cirq.rz(np.pi * -40.690340871596504).on(cirq.GridQubit(6, 6)),
cirq.rz(np.pi * -10.745583222538006).on(cirq.GridQubit(7, 3)),
cirq.rz(np.pi * 10.644856462836547).on(cirq.GridQubit(7, 4)),
]),
cirq.Moment(operations=[
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(0, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(0, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 5)),
(cirq.X**0.5).on(cirq.GridQubit(1, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(1, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(2, 4)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 5)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 6)),
(cirq.X**0.5).on(cirq.GridQubit(2, 7)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(2, 8)),
(cirq.X**0.5).on(cirq.GridQubit(3, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 3)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 5)),
(cirq.X**0.5).on(cirq.GridQubit(3, 6)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(3, 7)),
(cirq.Y**0.5).on(cirq.GridQubit(3, 8)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 1)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(4, 3)),
(cirq.X**0.5).on(cirq.GridQubit(4, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 6)),
(cirq.Y**0.5).on(cirq.GridQubit(4, 7)),
(cirq.X**0.5).on(cirq.GridQubit(5, 1)),
(cirq.X**0.5).on(cirq.GridQubit(5, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(5, 5)),
(cirq.X**0.5).on(cirq.GridQubit(5, 6)),
(cirq.X**0.5).on(cirq.GridQubit(5, 7)),
(cirq.X**0.5).on(cirq.GridQubit(6, 1)),
(cirq.X**0.5).on(cirq.GridQubit(6, 2)),
cirq.PhasedXPowGate(phase_exponent=0.25,
exponent=0.5).on(cirq.GridQubit(6, 3)),
(cirq.X**0.5).on(cirq.GridQubit(6, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(6, 5)),
(cirq.X**0.5).on(cirq.GridQubit(6, 6)),
(cirq.X**0.5).on(cirq.GridQubit(7, 2)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 3)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 4)),
(cirq.Y**0.5).on(cirq.GridQubit(7, 5)),
(cirq.Y**0.5).on(cirq.GridQubit(8, 3)),
]),
])
| [
"tony.bruguier@gmail.com"
] | tony.bruguier@gmail.com |
664508da8d95fe0b4e34ddd2e210b0035ce7fab8 | 1e0203f40d4cffed0d64449edeaea00311f4b732 | /minimum-cost-to-move-chips-to-the-same-position/solution.py | 88a9a03183306b9e7d560942705966f3b87e3842 | [] | no_license | childe/leetcode | 102e87dd8d918877f64e7157d45f3f45a607b9e4 | d2e8b2dca40fc955045eb62e576c776bad8ee5f1 | refs/heads/master | 2023-01-12T01:55:26.190208 | 2022-12-27T13:25:27 | 2022-12-27T13:25:27 | 39,767,776 | 2 | 1 | null | 2020-10-13T01:29:05 | 2015-07-27T10:05:04 | Python | UTF-8 | Python | false | false | 698 | py | #!/usr/bin/env python3
"""
https://leetcode.cn/problems/minimum-cost-to-move-chips-to-the-same-position/
1 <= position.length <= 100
1 <= position[i] <= 10^9
"""
class Solution:
def minCostToMoveChips(self, position: list[int]) -> int:
"""
>>> s=Solution()
>>> s.minCostToMoveChips(position = [1,2,3])
1
>>> s.minCostToMoveChips(position = [2,2,2,3,3])
2
>>> s.minCostToMoveChips(position = [1,1000000000])
1
"""
p0_count, p1_count = 0, 0
for p in position:
if p % 2 == 0:
p0_count += 1
else:
p1_count += 1
return min(p0_count, p1_count)
| [
"rmself@qq.com"
] | rmself@qq.com |
3cb04f968561e9aa25f6ab04a58640de1dca4fd9 | 5797109b344053721f354b8cd8bd654307068527 | /a4/loop-same-line.py | 30c645cc04e0a9633c40591edc1b20d47963eead | [] | no_license | susanphone/csci107-assignments | 9135b87fac8ef74f105e5aa87be6898e16a5075c | 0f2f0173d26e52667a48940c1b30a31b5b90e87b | refs/heads/master | 2021-08-17T11:46:21.814547 | 2020-01-12T03:31:42 | 2020-01-12T03:31:42 | 219,920,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | string_asterisks = input("Ener the number of asterisks: ")
asterisks = int(string_asterisks)
asterisks = int(input("enter the number of asterisks"))
print("*" * asterisks)
#same as above.
"""
line_of_asterisks = ""
for asterisk in range (asterisks):
# line_of_asterisks += "*" <----- same as one below
line_of_asterisks = line_of_asterisks + "*"
print(line_of_asterisks)
"""
| [
"susan.a.mccartney12@gmail.com"
] | susan.a.mccartney12@gmail.com |
86926e47b54c38a35829cb0b930686dcdfe099c0 | c5a5bcdbb63869465107156cfe2f03da2c7fb8f7 | /Smoke_test_automated/features/pages/student_unenroll_page.py | b75c3bf4c809235adaafe0bc66b72eef1c12e0fb | [] | no_license | Jogues/SmokeTest | dbebadd57fc65f7cd93fbdac0c87b8a4ac5ca875 | 615363199eada0af6503d6dac9465d4445e19481 | refs/heads/master | 2021-04-09T10:20:23.488841 | 2018-03-15T07:00:55 | 2018-03-15T07:00:55 | 107,356,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,218 | py | from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.select import Select
import time
#locators
settingButton = (By.CSS_SELECTOR, '#actions-dropdown-link-0 > span.fa.fa-cog')
myCoursesLink = (By.LINK_TEXT, 'Automated Course')
myCourses = (By.ID, 'my-courses')
dropdownUnenroll = (By.CSS_SELECTOR, '#unenroll-0')
unenrollButton = (By.CSS_SELECTOR, '#unenroll_form > div > input[type="submit"]')
def courses_available(context):
wait = WebDriverWait(context.browser, 20)
elem = wait.until(EC.visibility_of_element_located(myCoursesLink))
def unenroll_from_course(context):
wait = WebDriverWait(context.browser, 20)
elem = wait.until(EC.visibility_of_element_located(settingButton))
elem.click()
elem2 = wait.until(EC.visibility_of_element_located(dropdownUnenroll))
elem2.click()
time.sleep(3)
elem3 = wait.until(EC.visibility_of_element_located(unenrollButton))
elem3.click()
def successfully_unenroll_student(context):
time.sleep(3)
context.browser.save_screenshot('student unenroll from automated course.png')
| [
"noreply@github.com"
] | noreply@github.com |
3242f88ecdf9093acc3e2f8932964d234244f5dc | 0c83546e55b8e3a0f0ce28e69541d2f79d093909 | /transfer.py | 0f14e40f75b60d8d745039d14d6ae66d0acc9882 | [] | no_license | monaanvari/Neural-Style-Transfer-Reddit-Wallpaper | e6a579e7afebe6d60ee77c3feec60b063f18cd2c | 859eecad6f0cbc4f95a1017005b3c8f937c5fcd8 | refs/heads/main | 2023-02-09T01:37:53.686299 | 2020-12-29T22:31:07 | 2020-12-29T22:31:07 | 325,150,505 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,146 | py |
'''built using tensorflow's style transfer tutorial'''
import os
import time
import PIL.Image
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import tensorflow as tf
from os.path import expanduser
home = expanduser("~")
content_layers = ['block5_conv2']
style_layers = ['block1_conv1',
'block2_conv1',
'block3_conv1',
'block4_conv1',
'block5_conv1']
total_variation_weight=30
class StyleContentModel(tf.keras.models.Model):
def __init__(self):
super(StyleContentModel, self).__init__()
self.vgg = vggLayers(style_layers+content_layers)
self.vgg.trainable = False
self.style_weight = 1e-2
self.content_weight = 1e-2
def call(self, inputs):
inputs = inputs*255.0
preprocessed_input = tf.keras.applications.vgg19.preprocess_input(inputs)
outputs = self.vgg(preprocessed_input)
style_outputs = outputs[:len(style_layers)]
content_outputs = outputs[len(style_layers):]
style_outputs = [gramMatrix(style_output) for style_output in style_outputs]
style_dict = {name: output for name, output in zip(style_layers, style_outputs)}
content_dict = {name: output for name, output in zip(content_layers, content_outputs)}
return {'content':content_dict, 'style':style_dict}
def getTargets(self, content_image, style_image):
self.style_targets = self.call(style_image)['style']
self.content_targets = self.call(content_image)['content']
def styleContentLoss(self, outputs):
style_outputs = outputs['style']
content_outputs = outputs['content']
style_loss = tf.add_n([tf.reduce_mean((style_outputs[name]-self.style_targets[name])**2)
for name in style_outputs.keys()])
style_loss *= self.style_weight / len(style_outputs)
content_loss = tf.add_n([tf.reduce_mean((content_outputs[name]-self.content_targets[name])**2)
for name in content_outputs.keys()])
content_loss *= self.content_weight / len(content_outputs)
loss = style_loss + content_loss
return loss
@tf.function()
def train_step(self,image):
with tf.GradientTape() as tape:
outputs = self.call(image)
loss = self.styleContentLoss(outputs)
loss += total_variation_weight*tf.image.total_variation(image)
grad = tape.gradient(loss, image)
self.opt.apply_gradients([(grad,image)])
image.assign(clip0_1(image))
def train(self,image, epochs, steps_per_epoch, opt):
self.opt = opt
start = time.time()
step = 0
for n in range(epochs):
for m in range(steps_per_epoch):
step += 1
self.train_step(image)
print(".", end='')
print("Train step: {}".format(step))
end = time.time()
print("Total time: {:.1f}".format(end-start))
def imshow(image, title=None):
if len(image.shape) > 3:
image = tf.squeeze(image, axis=0)
plt.imshow(image)
if title:
plt.title(title)
def load_img(path_to_img):
max_dim = 512
img = tf.io.read_file(path_to_img)
img = tf.image.decode_image(img, channels=3)
img = tf.image.convert_image_dtype(img, tf.float32)
shape = tf.cast(tf.shape(img)[:-1], tf.float32)
long_dim = max(shape)
scale = max_dim / long_dim
new_shape = tf.cast(shape * scale, tf.int32)
img = tf.image.resize(img, new_shape)
img = img[tf.newaxis, :]
return img
def tensorToImage(tensor):
tensor = tensor*255
tensor = np.array(tensor, dtype=np.uint8)
if np.ndim(tensor)>3:
assert tensor.shape[0] == 1
tensor = tensor[0]
return PIL.Image.fromarray(tensor)
def classifyWithVgg(image):
x = tf.keras.applications.vgg19.preprocess_input(image)
x = tf.image.resize(x, (224, 224))
vgg = tf.keras.applications.VGG19(include_top=True, weights='imagenet')
prediction_probabilities = vgg(x)
predicted_top_5 = tf.keras.applications.vgg19.decode_predictions(prediction_probabilities.numpy())[0]
[(class_name, prob) for (number, class_name, prob) in predicted_top_5]
print(predicted_top_5)\
def vggLayers(layer_names):
vgg = tf.keras.applications.VGG19(include_top=False, weights='imagenet')
outputs = [vgg.get_layer(name).output for name in layer_names]
vgg.trainable = False
model = tf.keras.Model([vgg.input], outputs)
return model
def printVggLayers(layers, outputs):
for name, output in zip(layers, outputs):
print(name)
print(" shape: ", output.numpy().shape)
print(" min: ", output.numpy().min())
print(" max: ", output.numpy().max())
print(" mean: ", output.numpy().mean())
print()
def printModelOutputDetails(model, image):
results = model.call(tf.constant(image))
print('Styles:')
for name, output in sorted(results['style'].items()):
print(" ", name)
print(" shape: ", output.numpy().shape)
print(" min: ", output.numpy().min())
print(" max: ", output.numpy().max())
print(" mean: ", output.numpy().mean())
print()
print("Contents:")
for name, output in sorted(results['content'].items()):
print(" ", name)
print(" shape: ", output.numpy().shape)
print(" min: ", output.numpy().min())
print(" max: ", output.numpy().max())
print(" mean: ", output.numpy().mean())
def gramMatrix(input_tensor):
result = tf.linalg.einsum('bijc,bijd->bcd', input_tensor, input_tensor)
input_shape = tf.shape(input_tensor)
num_locations = tf.cast(input_shape[1]*input_shape[2], tf.float32)
return result/num_locations
def clip0_1(image):
return tf.clip_by_value(image, clip_value_min=0, clip_value_max=1)
def transfer(mainImageUrl, styleImageUrl):
mpl.rcParams['figure.figsize'] = (12,12)
mpl.rcParams['axes.grid'] = False
opt = tf.optimizers.Adam(learning_rate=0.006, beta_1=0.99, epsilon=1e-1)
try:
os.remove(home+'/.keras/datasets/NeuralStyleTransferMain')
os.remove(home+'/.keras/datasets/NeuralStyleTransferStyle')
except:
pass
content_path = tf.keras.utils.get_file('NeuralStyleTransferMain', mainImageUrl)
style_path = tf.keras.utils.get_file('NeuralStyleTransferStyle',styleImageUrl)
content_image = load_img(content_path)
style_image = load_img(style_path)
# classifyWithVgg(content_image)
model = StyleContentModel()
model.getTargets(content_image, style_image)
image = tf.Variable(content_image)
# printVggLayers(style_layers, style_output)
# printModelOutputDetails(model, image)
model.train(image, 10, 300, opt)
plt.subplot(2, 2, 1)
imshow(content_image, 'Content Image')
plt.subplot(2, 2, 2)
imshow(style_image, 'Style Image')
plt.subplot(2, 2, 3)
imshow(np.asarray(image), 'result')
plt.show()
return tensorToImage(image)
| [
"mona@DN0a1e4bc1.SUNet"
] | mona@DN0a1e4bc1.SUNet |
e47531fea5a7ce4506ea4da759be3bf3eac25158 | b99f3fc079b092aae8c58438ea5ff3d5517619f7 | /DailyProgrammer/AnagramDetector.py | 3ab27179b41fc2fcd4f1631f75abffad1b5cec90 | [] | no_license | billykeyss/CodingQustions | 46eeced13275cd9e98c0c7eb91f31bc07f43f688 | f64427287e7497af6dd1b8481ab70c71913f174e | refs/heads/master | 2022-05-10T05:24:32.089050 | 2022-05-06T01:50:00 | 2022-05-06T01:50:00 | 59,133,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,361 | py | # You'll be given two words or sets of words separated by a question mark. Your task is to replace the question mark with information about the validity of the anagram. Example:
# "Clint Eastwood" ? "Old West Action"
# "parliament" ? "partial man"
#
# "Clint Eastwood" is an anagram of "Old West Action"
# "parliament" is NOT an anagram of "partial man"
# https://www.reddit.com/r/dailyprogrammer/comments/52enht/20160912_challenge_283_easy_anagram_detector/
def anagramDet(anagramString):
firstString = anagramString.split("?")[0].lower()
secondString = anagramString.split("?")[1].lower()
firstStringSorted = ''.join(sorted(firstString.replace(" ", "").replace('"', "")))
secondStringSorted = ''.join(sorted(secondString.replace(" ", "").replace('"', "")))
if firstStringSorted != secondStringSorted:
return firstString + " is NOT an anagram of " + secondString
else:
return firstString + " is an anagram of " + secondString
print anagramDet('"Clint Eastwood" ? "Old West Action"')
print anagramDet('"parliament" ? "partial man"')
print anagramDet('"wisdom" ? "mid sow"')
print anagramDet('"Seth Rogan" ? "Gathers No"')
print anagramDet('"Reddit" ? "Eat Dirt"')
print anagramDet('"Schoolmaster" ? "The classroom"')
print anagramDet('"Astronomers" ? "Moon starer"')
print anagramDet('"Dormitory" ? "Dirty Rooms"')
| [
"billhuang@live.ca"
] | billhuang@live.ca |
0fd7fb6a397224ee474e35019801fe163bad38be | 41e9702da8b5d5eba67b4df1a31cdf072bdf99c5 | /Basic_Python/EighthClass_Lambda.py | fb22b5a532a750a424dd4149081274a2e2ad0533 | [] | no_license | Rapha-Y/My_Python_Studies | b72badc3f02ca80e6e57556cee13066a9630baa0 | 8c45fed56eb8ab9b3e38ba05a19d3c2008b256e3 | refs/heads/master | 2022-09-26T00:31:36.104303 | 2020-06-01T17:38:29 | 2020-06-01T17:38:29 | 267,406,447 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | word_counter = lambda item_list: [len(i) for i in item_list]
fruits = ["Caju", "Morango", "Melão", "Pêssego", "Pitaya"]
print(word_counter(fruits))
sum = lambda a, b: a + b
print(sum(5, 10))
calculator = {
'sum': lambda a, b: a + b,
'sub': lambda a, b: a - b,
'mul': lambda a, b: a * b,
'div': lambda a, b: a / b
}
print(calculator['div'](10, 2))
soma = calculator['sum']
print(soma(5, 5)) | [
"raph.yuki@gmail.com"
] | raph.yuki@gmail.com |
6b55598316455e43f008e4b6dad8851ba4ed3aa7 | e9a3f4a6f8828597dae8af8ea318b444af1798ba | /mag_ng/users/migrations/0003_auto_20200818_0517.py | f4d959172de433cee25454c2887bbea24208b12e | [] | no_license | kinsomaz/Online-Magazine-Website | c4a0b3b067a28202763a3646e02db9355e2e98a7 | dbb02225af2202913ea7dcc076f5af0052db117c | refs/heads/master | 2022-12-04T00:46:31.619920 | 2020-08-21T12:53:58 | 2020-08-21T12:53:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | # Generated by Django 3.1 on 2020-08-18 04:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20200818_0506'),
]
operations = [
migrations.AlterField(
model_name='customuser',
name='username',
field=models.CharField(max_length=20, unique=True, verbose_name='username'),
),
]
| [
"alameenraji31@gmail.com"
] | alameenraji31@gmail.com |
2a653c6e01d45ec49bc2116acff021a462af90df | 5c4f7d36cfb3fe1791dfefa5a679bcc5e784490d | /tp2/tabla_5_zotenko_analisis.py | a205b6fdf2e72601f0133370340bd72f88d7f15c | [] | no_license | CarlosAndresRiosChavez/ProyectoRedes | 1ac43dbfe380ef696594c1dd1dab59bd98888914 | eb91bbf1f64e886664799d3d28e2d9facba95ae4 | refs/heads/master | 2020-03-27T05:38:34.377184 | 2018-12-05T15:52:12 | 2018-12-05T15:52:12 | 146,036,730 | 0 | 0 | null | 2018-09-24T16:18:48 | 2018-08-24T20:26:50 | HTML | UTF-8 | Python | false | false | 4,617 | py | from __future__ import division
import networkx as nx
import numpy as np
def ldata(archive):
f=open(archive)
data=[]
for line in f:
line=line.strip()
col=line.split()
data.append(col)
return data
# archivos de redes
file_y2h = "./dataset/yeast_Y2H_curado.txt"
file_apms = "./dataset/yeast_AP-MS_curado.txt"
file_lit = "./dataset/yeast_LIT_curado.txt"
file_reg = "./dataset/yeast_LIT_Reguly_curado.txt"
# archivos de pares totales
pares_y2h = "./dataset/pares_totales_y2h.txt"
pares_apms = "./dataset/pares_totales_apms.txt"
pares_lit = "./dataset/pares_totales_lit.txt"
pares_reg = "./dataset/pares_totales_reg.txt"
# cargo los alpha y beta de cada red. La 1ra tupla tiene los valores simulados, la 2da los valores de las regresiones
# siempre el primero es alpha y el 2do beta
alpha_beta_y2h = [(0.009, 0.11), (0.0206, 0.1606)]
alpha_beta_apms = [(0.035, 0.19), (0.0406, 0.2221)]
alpha_beta_lit = [(0.04, 0.2), (0.0724, 0.2803)]
alpha_beta_reg = [(0.04, 0.14), (0.0485, 0.0575)]
# acomodo las cosas para iterar
files_redes = [file_y2h, file_apms, file_lit, file_reg]
files_pares = [pares_y2h, pares_apms, pares_lit, pares_reg]
nombres = ["red Y2H", "red AP-MS", "red LIT", "red REG"]
alphas_y_betas = [alpha_beta_y2h, alpha_beta_apms, alpha_beta_lit, alpha_beta_reg]
# lista de esenciales:
lista_esenciales_raw = ldata("./dataset/Essential_ORFs_paperHe_curado.txt")
lista_esenciales = []
for l in lista_esenciales_raw:
lista_esenciales.append(l[0])
set_esenciales = set(lista_esenciales)
# defino una funcion que calcula la P_E de un nodo perteneciente a un grafo, con valores de alpha y beta establecidos por un indice "red", donde ademas hay que decir si son simulados o experimentales:
# POR DEFECTO usa los valores simulados
def P_E(nodo, grafo, red, simulado=True):
k = grafo.degree(nodo)
if simulado == True:
alpha = alphas_y_betas[red][0][0]
beta = alphas_y_betas[red][0][1]
if simulado == False:
alpha = alphas_y_betas[red][1][0]
beta = alphas_y_betas[red][1][1]
p = 1 - ( (1 - alpha)**k )*(1 - beta)
return p
############################################################################################################################################
############################################################################################################################################
############################################################################################################################################
## en el caso experimental me estan quedando alpha negativos???
# aca defino si trabajo usando los simulados o los experimentales
es_simulado = True
if es_simulado == True:
print("Usando parametros SIMULADOS")
results = open("./numero_esperado_simulados.txt", "w")
results.write("Usando parametros simulados:\n\n")
if es_simulado == False:
print("Usando parametros EXPERIMENTALES")
results = open("./numero_esperado_experimentales.txt", "w")
results.write("Usando parametros experimentales:\n\n")
results.write("\t\t\tReal\tModelado\n")
for i in range(len(nombres)):
# defino el grafo para pedir el grado de los nodos
lista = ldata(files_redes[i])
G = nx.Graph()
G.add_edges_from(lista)
# selecciono la lista de pares TOTALES correspondiente a la red:
lista_pares = ldata(files_pares[i])
mismo_tipo = []
# separo CUALES pares son del mismo tipo:
for l in lista_pares:
set_l = set(l)
interseccion = set_l.intersection(set_esenciales)
if (len(interseccion) == 0) or (len(interseccion) == 2):
mismo_tipo.append(l)
# los cuento:
cant_mismo_tipo = len(mismo_tipo)
# teniendo los pares del mismo tipo, para cada nodo calculo P_E
proba_mismo_tipo = []
for m in mismo_tipo:
pmt = P_E(m[0], G, i, es_simulado)*P_E(m[1], G, i, es_simulado) + (1 - P_E(m[0], G, i, es_simulado))*(1 - P_E(m[1], G, i, es_simulado))
proba_mismo_tipo.append(pmt)
# la suma de todas las probabilidades de que cada par sea del mismo tipo:
numero_esperado = sum(proba_mismo_tipo)
print("=======")
print(nombres[i])
print("=======")
print("Numero esperado = %d" % (numero_esperado))
print("Numero real = %d" % cant_mismo_tipo)
results.write("%s\t\t%d\t\t%d\n" % (nombres[i], cant_mismo_tipo, numero_esperado))
results.close()
| [
"noreply@github.com"
] | noreply@github.com |
d1441b012702f2751b0bea9934251ad4628a2b71 | afd2087e80478010d9df66e78280f75e1ff17d45 | /torch/ao/pruning/_experimental/pruner/prune_functions.py | c4c94e0887adf43ca07ec7018d1b5e9703519da6 | [
"BSD-3-Clause",
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] | permissive | pytorch/pytorch | 7521ac50c47d18b916ae47a6592c4646c2cb69b5 | a6f7dd4707ac116c0f5fb5f44f42429f38d23ab4 | refs/heads/main | 2023-08-03T05:05:02.822937 | 2023-08-03T00:40:33 | 2023-08-03T04:14:52 | 65,600,975 | 77,092 | 24,610 | NOASSERTION | 2023-09-14T21:58:39 | 2016-08-13T05:26:41 | Python | UTF-8 | Python | false | false | 18,831 | py | """
Collection of conversion functions for linear / conv2d structured pruning
Also contains utilities for bias propagation
"""
from typing import cast, Optional, Callable, Tuple
import torch
from torch import nn, Tensor
from torch.nn.utils import parametrize
from torch.nn.utils.parametrize import ParametrizationList
from .parametrization import FakeStructuredSparsity, BiasHook
# BIAS PROPAGATION
def _remove_bias_handles(module: nn.Module) -> None:
if hasattr(module, "_forward_hooks"):
bias_hooks = []
for key, hook in module._forward_hooks.items():
if isinstance(hook, BiasHook):
bias_hooks.append(key)
for key in bias_hooks:
del module._forward_hooks[key]
def _get_adjusted_next_layer_bias(
next_layer: nn.Module, pruned_biases: Tensor, mask: Tensor
) -> nn.Parameter:
r"""Returns new adjusted bias for the second supported module"""
if parametrize.is_parametrized(next_layer):
# need to access original weight
parametrization_dict = cast(nn.ModuleDict, next_layer.parametrizations)
weight_parameterizations = cast(
ParametrizationList, parametrization_dict.weight
)
next_weight = weight_parameterizations.original
else:
next_weight = cast(Tensor, next_layer.weight)
scaling_weight = next_weight[:, ~mask]
if isinstance(next_layer, nn.Conv2d): # checking for Conv2d
# Propagating first layer pruned biases and calculating the new second layer bias
# involves more steps since the Conv2d scaling weight has extra dimensions,
# so adding bias involves broadcasting, logically:
# for each channel k in range(oC):
# scaled_biases = sum(first_bias[pruned_idx] @ next_weight[k, pruned_idx, :, :].T)
# new_next_bias[k] = old_next_bias[k] + scaled_biases
scaling_product = torch.matmul(
pruned_biases.reshape(1, -1), torch.transpose(scaling_weight, 1, 2)
)
sum_range = list(range(len(scaling_product.shape)))[
1:
] # all but the first dimension
scaled_biases = torch.sum(scaling_product, sum_range)
elif isinstance(next_layer, nn.Linear): # Linear
scaled_biases = torch.matmul(
pruned_biases, torch.transpose(scaling_weight, 0, 1)
) # recall b2_new = b1 @ w2.T + b2
else:
raise NotImplementedError(f"Type {type(next_layer)} not supported yet.")
if (
parametrize.is_parametrized(next_layer)
and getattr(next_layer, "_bias", None) is not None
): # next_layer is parametrized & has original bias ._bias
adjusted_bias = nn.Parameter(scaled_biases + next_layer._bias)
elif (
not parametrize.is_parametrized(next_layer) and next_layer.bias is not None
): # next_layer not parametrized & has .bias
adjusted_bias = nn.Parameter(scaled_biases + next_layer.bias)
else: # next_layer has no bias
adjusted_bias = nn.Parameter(scaled_biases)
return adjusted_bias
def _prune_module_bias(module: nn.Module, mask: Tensor) -> None:
r"""Applies mask to given modules bias"""
# prune bias along with weights, discard pruned indices of bias
original_bias = cast(Tensor, getattr(module, "_bias", module.bias))
if original_bias is not None:
module.bias = nn.Parameter(original_bias[mask])
# remove _bias parameter
if hasattr(module, "_bias"):
delattr(module, "_bias")
def _propogate_module_bias(module: nn.Module, mask: Tensor) -> Optional[Tensor]:
r"""
In the case that we need to propagate biases, this function will return the biases we need
"""
# set current module bias
if module.bias is not None:
module.bias = nn.Parameter(cast(Tensor, module.bias)[mask])
elif getattr(module, "_bias", None) is not None:
module.bias = nn.Parameter(cast(Tensor, module._bias)[mask])
# get pruned biases to propagate to subsequent layer
if getattr(module, "_bias", None) is not None:
pruned_biases = cast(Tensor, module._bias)[~mask]
else:
pruned_biases = None
if hasattr(module, "_bias"):
delattr(module, "_bias")
return pruned_biases
# LINEAR
def _prune_linear_helper(linear: nn.Linear) -> Tensor:
# expects linear to be a parameterized linear module
parametrization_dict = cast(nn.ModuleDict, linear.parametrizations)
weight_parameterizations = cast(ParametrizationList, parametrization_dict.weight)
for p in weight_parameterizations:
if isinstance(p, FakeStructuredSparsity):
mask = cast(Tensor, p.mask)
with torch.no_grad():
parametrize.remove_parametrizations(linear, "weight", leave_parametrized=True)
linear.weight = nn.Parameter(linear.weight[mask])
linear.out_features = linear.weight.shape[0]
_remove_bias_handles(linear)
return mask
def prune_linear(linear: nn.Linear) -> None:
mask = _prune_linear_helper(linear)
if getattr(linear, "prune_bias", False):
_prune_module_bias(linear, mask)
def prune_linear_linear(linear1: nn.Linear, linear2: nn.Linear) -> None:
prune_linear_activation_linear(linear1, None, linear2)
def prune_linear_activation_linear(
linear1: nn.Linear,
activation: Optional[Callable[[Tensor], Tensor]],
linear2: nn.Linear,
):
mask = _prune_linear_helper(linear1)
if getattr(linear1, "prune_bias", False):
_prune_module_bias(linear1, mask)
else:
pruned_biases = _propogate_module_bias(linear1, mask)
if pruned_biases is not None:
if activation:
pruned_biases = activation(pruned_biases)
linear2.bias = _get_adjusted_next_layer_bias(linear2, pruned_biases, mask)
with torch.no_grad():
if parametrize.is_parametrized(linear2):
parametrization_dict = cast(nn.ModuleDict, linear2.parametrizations)
weight_parameterizations = cast(
ParametrizationList, parametrization_dict.weight
)
weight_parameterizations.original = nn.Parameter(
weight_parameterizations.original[:, mask]
)
linear2.in_features = weight_parameterizations.original.shape[1]
else:
linear2.weight = nn.Parameter(linear2.weight[:, mask])
linear2.in_features = linear2.weight.shape[1]
# CONV2D
def _prune_conv2d_helper(conv2d: nn.Conv2d) -> Tensor:
parametrization_dict = cast(nn.ModuleDict, conv2d.parametrizations)
weight_parameterizations = cast(ParametrizationList, parametrization_dict.weight)
for p in weight_parameterizations:
if isinstance(p, FakeStructuredSparsity):
mask = cast(Tensor, p.mask)
with torch.no_grad():
parametrize.remove_parametrizations(conv2d, "weight", leave_parametrized=True)
conv2d.weight = nn.Parameter(conv2d.weight[mask])
conv2d.out_channels = conv2d.weight.shape[0]
_remove_bias_handles(conv2d)
return mask
def prune_conv2d_padded(conv2d_1: nn.Conv2d) -> None:
parametrization_dict = cast(nn.ModuleDict, conv2d_1.parametrizations)
weight_parameterizations = cast(ParametrizationList, parametrization_dict.weight)
for p in weight_parameterizations:
if isinstance(p, FakeStructuredSparsity):
mask = cast(Tensor, p.mask)
with torch.no_grad():
parametrize.remove_parametrizations(conv2d_1, "weight", leave_parametrized=True)
if getattr(conv2d_1, "_bias", None) is not None:
if (
conv2d_1.bias is not None
): # conv2d_1 has original bias and bias propagated from previous layer
new_bias = torch.zeros(conv2d_1.bias.shape)
new_bias[mask] = conv2d_1.bias[mask]
# adjusted bias that to keep in conv2d_1
new_bias[~mask] = cast(Tensor, conv2d_1._bias)[~mask]
# pruned biases that are kept instead of propagated
conv2d_1.bias = nn.Parameter(new_bias)
else: # conv2d_1 has only original bias
conv2d_1.bias = nn.Parameter(cast(Tensor, conv2d_1._bias))
else:
# no original bias, only propagated bias
if (
conv2d_1.bias is not None
): # conv2d_1 has bias propagated from previous layer
conv2d_1.bias.data[~mask] = 0
if hasattr(conv2d_1, "_bias"):
delattr(conv2d_1, "_bias")
def prune_conv2d(conv2d: nn.Conv2d) -> None:
mask = _prune_conv2d_helper(conv2d)
if getattr(conv2d, "prune_bias", False):
_prune_module_bias(conv2d, mask)
def prune_conv2d_conv2d(conv2d_1: nn.Conv2d, conv2d_2: nn.Conv2d) -> None:
prune_conv2d_activation_conv2d(conv2d_1, None, conv2d_2)
def prune_conv2d_activation_conv2d(
conv2d_1: nn.Conv2d,
activation: Optional[Callable[[Tensor], Tensor]],
conv2d_2: nn.Conv2d,
):
r"""
Fusion Pattern for conv2d -> some activation module / function -> conv2d layers
"""
parametrization_dict = cast(nn.ModuleDict, conv2d_1.parametrizations)
weight_parameterizations = cast(ParametrizationList, parametrization_dict.weight)
for p in weight_parameterizations:
if isinstance(p, FakeStructuredSparsity):
mask = cast(Tensor, p.mask)
prune_bias = getattr(conv2d_1, "prune_bias", False)
if (
hasattr(conv2d_2, "padding")
and cast(Tuple[int], conv2d_2.padding) > (0, 0)
and (conv2d_1.bias is not None or getattr(conv2d_1, "_bias", None) is not None)
):
prune_conv2d_padded(conv2d_1)
else:
mask = _prune_conv2d_helper(conv2d_1)
if prune_bias:
_prune_module_bias(conv2d_1, mask)
else:
pruned_biases = _propogate_module_bias(conv2d_1, mask)
if pruned_biases is not None:
if activation:
pruned_biases = activation(pruned_biases)
conv2d_2.bias = _get_adjusted_next_layer_bias(
conv2d_2, pruned_biases, mask
)
if (
not (
hasattr(conv2d_2, "padding")
and cast(Tuple[int], conv2d_2.padding) > (0, 0)
)
or conv2d_1.bias is None
):
with torch.no_grad():
if parametrize.is_parametrized(conv2d_2):
parametrization_dict = cast(
nn.ModuleDict, conv2d_2.parametrizations
)
weight_parameterizations = cast(
ParametrizationList, parametrization_dict.weight
)
weight_parameterizations.original = nn.Parameter(
weight_parameterizations.original[:, mask]
)
conv2d_2.in_channels = weight_parameterizations.original.shape[1]
else:
conv2d_2.weight = nn.Parameter(conv2d_2.weight[:, mask])
conv2d_2.in_channels = conv2d_2.weight.shape[1]
def prune_conv2d_pool_activation_conv2d(
c1: nn.Conv2d,
pool: nn.Module,
activation: Optional[Callable[[Tensor], Tensor]],
c2: nn.Conv2d,
) -> None:
prune_conv2d_activation_conv2d(c1, activation, c2)
def prune_conv2d_activation_pool_conv2d(
c1: nn.Conv2d,
activation: Optional[Callable[[Tensor], Tensor]],
pool: nn.Module,
c2: nn.Conv2d,
) -> None:
prune_conv2d_activation_conv2d(c1, activation, c2)
def prune_conv2d_pool_flatten_linear(
conv2d: nn.Conv2d,
pool: nn.Module,
flatten: Optional[Callable[[Tensor], Tensor]],
linear: nn.Linear,
) -> None:
mask = _prune_conv2d_helper(conv2d)
# We map the pruned indices of the Conv2d output to the flattened indices of the Linear following the Flatten layer.
# we determine the flattening scale (h * w), and readjust `first_pruned_indices`
# (each idx maps to range idx * h * w to (idx+1) * h * w), `first_valid_indices`,
# and `pruned_biases` (repeat each bias by h * w).
if parametrize.is_parametrized(linear):
parametrization_dict = cast(nn.ModuleDict, linear.parametrizations)
weight_parameterizations = cast(
ParametrizationList, parametrization_dict.weight
)
linear_ic = weight_parameterizations.original.shape[1]
else:
linear_ic = linear.weight.shape[1]
conv2d_oc = len(mask)
assert (
linear_ic % conv2d_oc == 0
), f"Flattening from dimensions {conv2d_oc} to {linear_ic} not supported"
flatten_scale = linear_ic // conv2d_oc
flattened_mask = torch.tensor(
[[val] * flatten_scale for val in mask], dtype=torch.bool, device=mask.device
).flatten()
if getattr(conv2d, "prune_bias", False):
_prune_module_bias(conv2d, mask)
else:
pruned_biases = cast(Tensor, _propogate_module_bias(conv2d, mask))
flattened_pruned_biases = torch.tensor(
[[bias] * flatten_scale for bias in pruned_biases], device=mask.device
).flatten()
linear.bias = _get_adjusted_next_layer_bias(
linear, flattened_pruned_biases, flattened_mask
)
with torch.no_grad():
if parametrize.is_parametrized(linear):
parametrization_dict = cast(nn.ModuleDict, linear.parametrizations)
weight_parameterizations = cast(
ParametrizationList, parametrization_dict.weight
)
weight_parameterizations.original = nn.Parameter(
weight_parameterizations.original[:, flattened_mask]
)
linear.in_features = weight_parameterizations.original.shape[1]
else:
linear.weight = nn.Parameter(linear.weight[:, flattened_mask])
linear.in_features = linear.weight.shape[1]
def prune_lstm_output_linear(
lstm: nn.LSTM, getitem: Callable, linear: nn.Linear
) -> None:
prune_lstm_output_layernorm_linear(lstm, getitem, None, linear)
def prune_lstm_output_layernorm_linear(
lstm: nn.LSTM,
getitem: Callable,
layernorm: Optional[nn.LayerNorm],
linear: nn.Linear,
) -> None:
for i in range(lstm.num_layers):
if parametrize.is_parametrized(lstm, f"weight_ih_l{i}"):
parametrization_dict = cast(nn.ModuleDict, lstm.parametrizations)
weight_parameterizations = cast(
ParametrizationList, parametrization_dict[f"weight_ih_l{i}"]
)
mask = weight_parameterizations[0].mask
with torch.no_grad():
parametrize.remove_parametrizations(
lstm, f"weight_ih_l{i}", leave_parametrized=True
)
setattr(
lstm,
f"weight_ih_l{i}",
nn.Parameter(getattr(lstm, f"weight_ih_l{i}")[mask]),
)
setattr(
lstm,
f"bias_ih_l{i}",
nn.Parameter(getattr(lstm, f"bias_ih_l{i}")[mask]),
)
if parametrize.is_parametrized(lstm, f"weight_hh_l{i}"):
parametrization_dict = cast(nn.ModuleDict, lstm.parametrizations)
weight_parameterizations = cast(
ParametrizationList, parametrization_dict[f"weight_hh_l{i}"]
)
mask = weight_parameterizations[0].mask
with torch.no_grad():
parametrize.remove_parametrizations(
lstm, f"weight_hh_l{i}", leave_parametrized=True
)
# splitting out hidden-hidden masks
W_hi, W_hf, W_hg, W_ho = torch.split(
getattr(lstm, f"weight_hh_l{i}"), lstm.hidden_size
)
M_hi, M_hf, M_hg, M_ho = torch.split(mask, lstm.hidden_size)
# resize each individual weight separately
W_hi = W_hi[M_hi][:, M_hi]
W_hf = W_hf[M_hf][:, M_hf]
W_hg = W_hg[M_hg][:, M_hg]
W_ho = W_ho[M_ho][:, M_ho]
# concat, use this as new weight
new_weight = torch.cat((W_hi, W_hf, W_hg, W_ho))
setattr(lstm, f"weight_hh_l{i}", nn.Parameter(new_weight))
setattr(
lstm,
f"bias_hh_l{i}",
nn.Parameter(getattr(lstm, f"bias_hh_l{i}")[mask]),
)
# If this is the final layer, then we need to prune linear layer columns
if i + 1 == lstm.num_layers:
lstm.hidden_size = int(M_hi.sum())
with torch.no_grad():
if parametrize.is_parametrized(linear):
parametrization_dict = cast(
nn.ModuleDict, linear.parametrizations
)
weight_parameterizations = cast(
ParametrizationList, parametrization_dict.weight
)
weight_parameterizations.original = nn.Parameter(
weight_parameterizations.original[:, M_ho]
)
linear.in_features = weight_parameterizations.original.shape[1]
else:
linear.weight = nn.Parameter(linear.weight[:, M_ho])
linear.in_features = linear.weight.shape[1]
# if layernorm module, prune weight and bias
if layernorm is not None:
layernorm.normalized_shape = (linear.in_features,)
layernorm.weight = nn.Parameter(layernorm.weight[M_ho])
layernorm.bias = nn.Parameter(layernorm.bias[M_ho])
# otherwise need to prune the columns of the input of the next LSTM layer
else:
with torch.no_grad():
if parametrize.is_parametrized(lstm, f"weight_ih_l{i+1}"):
parametrization_dict = cast(
nn.ModuleDict, lstm.parametrizations
)
weight_parameterizations = cast(
ParametrizationList,
getattr(parametrization_dict, f"weight_ih_l{i+1}"),
)
weight_parameterizations.original = nn.Parameter(
weight_parameterizations.original[:, M_ho]
)
else:
next_layer_weight = getattr(lstm, f"weight_ih_l{i+1}")
setattr(
lstm,
f"weight_ih_l{i+1}",
nn.Parameter(next_layer_weight[:, M_ho]),
)
| [
"pytorchmergebot@users.noreply.github.com"
] | pytorchmergebot@users.noreply.github.com |
de3776c915a726bc67fd41a7bfba4a7611cdb4b0 | 804bcf4e04aba101440786cdc42db5fa8b818f76 | /new_api/api/article.py | e4502a844769e7bd7091bc757389d90b24d33a1a | [] | no_license | guateam/LabGuide | 481d962139ba182db47faab0f013f8f9e6eefdcf | 75a1162bac10742e7c0d88f85b4ea8acf658753e | refs/heads/master | 2022-11-21T19:08:25.054258 | 2020-04-14T07:01:29 | 2020-04-14T07:01:29 | 186,096,911 | 2 | 0 | null | 2020-07-30T15:01:16 | 2019-05-11T06:33:44 | Vue | UTF-8 | Python | false | false | 6,946 | py | from flasgger import swag_from
from flask import Blueprint, request
from new_api.db import database
from new_api.rights_control.models.AddAllArticle import AddAllArticle
from new_api.rights_control.models.AddAllArticleTag import AddAllArticleTag
from new_api.rights_control.models.AddArticle import AddArticle
from new_api.rights_control.models.AddArticleTag import AddArticleTag
from new_api.rights_control.models.ChangeAllArticle import ChangeAllArticle
from new_api.rights_control.models.ChangeAllArticleTag import ChangeAllArticleTag
from new_api.rights_control.models.ChangeArticle import ChangeArticle
from new_api.rights_control.models.ChangeArticleTag import ChangeArticleTag
from new_api.rights_control.models.DeleteAllArticle import DeleteAllArticle
from new_api.rights_control.models.DeleteAllArticleTag import DeleteAllArticleTag
from new_api.rights_control.models.DeleteArticle import DeleteArticle
from new_api.rights_control.models.DeleteArticleTag import DeleteArticleTag
from new_api.rights_control.models.ReadAllArticle import ReadAllArticle
from new_api.rights_control.models.ReadArticle import ReadArticle
from new_api.rights_control.models.ReadHistoryArticle import ReadHistoryArticle
from new_api.rights_control.rights_control import right_required
from new_api.util.def_methods import reply_json, get_user_id, get_dicts_from_models, \
login_required
article = Blueprint('article', __name__)
@article.route('/get_article')
@login_required
@right_required([ReadArticle, ReadAllArticle])
@swag_from('docs/article/get_article.yml')
def get_article():
"""
获取article
:return:
"""
article_id = request.values.get('article_id')
article_info = database.get('Article', [database.get_model('Article').ID == article_id], first=True)
if article_info:
data = article_info.get_dict(formatted=True)
return reply_json(1, data)
return reply_json(-7)
@article.route('/add_article', methods=['POST'])
@login_required
@right_required([AddArticle, AddAllArticle])
@swag_from('docs/article/add_article.yml')
def add_article():
"""
添加文章
:return:
"""
token = request.form['token']
tag = request.form['tag']
content = request.form['content']
title = request.form['title']
flag = database.add('Article', {'content': content, 'title': title, 'tag': tag, 'author': get_user_id(token)})
if flag:
database.add('History', flag.get_history_format())
return reply_json(1)
return reply_json(-1)
@article.route('/change_article', methods=['POST'])
@login_required
@right_required([ChangeArticle, ChangeAllArticle])
@swag_from('docs/article/change_article.yml')
def change_article():
"""
修改文章
:return:
"""
token = request.form['token']
article_id = request.form['article_id']
content = request.form['content']
title = request.form['title']
tag = request.form['tag']
flag = database.update('Article', [database.get_model('Article').ID == article_id],
{'content': content, 'title': title, 'tag': tag, 'changer': get_user_id(token)})
if flag:
database.add('History', flag.get_history_format(user_id=get_user_id(token)))
return reply_json(1)
return reply_json(-1)
@article.route('/delete_article', methods=['POST'])
@login_required
@right_required([DeleteArticle, DeleteAllArticle])
@swag_from('docs/article/delete_article.yml')
def delete_article():
"""
清除文章
:return:
"""
article_id = request.form['article_id']
flag = database.delete('Article', [database.get_model('Article').ID == article_id])
if flag:
return reply_json(1)
return reply_json(-1)
@article.route('/add_article_tag', methods=['POST'])
@login_required
@right_required([AddArticleTag, AddAllArticleTag])
@swag_from('docs/article/add_article_tag.yml')
def add_article_tag():
"""
添加文章标签
:return:
"""
article_id = request.form['article_id']
name = request.form['name']
tag_type = request.form['tag_type']
icon = request.form['icon']
description = request.form['description']
flag = database.add('ArticleTag',
{'article_id': article_id, 'name': name, 'description': description, 'icon': icon,
'tag_type': tag_type})
if flag:
return reply_json(1)
return reply_json(-1)
@article.route('/change_article_tag', methods=['POST'])
@login_required
@right_required([ChangeArticleTag, ChangeAllArticleTag])
@swag_from('docs/article/change_article_tag.yml')
def change_article_tag():
"""
修改文章标签
:return:
"""
tag_id = request.form['tag_id']
name = request.form['name']
tag_type = request.form['tag_type']
icon = request.form['icon']
description = request.form['description']
flag = database.update('ArticleTag', [database.get_model('ArticleTag').id == tag_id],
{'name': name, 'description': description, 'icon': icon, 'tag_type': tag_type})
if flag:
return reply_json(1)
return reply_json(-1)
@article.route('/delete_article_tag', methods=['POST'])
@login_required
@right_required([DeleteArticleTag, DeleteAllArticleTag])
@swag_from('docs/article/delete_article_tag.yml')
def delete_article_tag():
"""
清除文章标签
:return:
"""
tag_id = request.form['tag_id']
flag = database.delete('ArticleTag', [database.get_model('ArticleTag').id == tag_id])
if flag:
return reply_json(1)
return reply_json(-1)
@article.route('/get_article_tag')
@login_required
@right_required([ReadArticle, ReadAllArticle])
@swag_from('docs/article/get_article_tag.yml')
def get_article_tag():
"""
获取文章标题
:return:
"""
article_id = request.values.get('article_id')
tags = database.get('ArticleTag', [database.get_model('ArticleTag').article_id == article_id])
data = get_dicts_from_models(tags)
return reply_json(1, data=data)
@article.route('/get_history')
@login_required
@right_required([ReadArticle, ReadAllArticle])
@swag_from('docs/article/get_history.yml')
def get_history():
"""
获取历史列表
:return:
"""
article_id = request.values.get('article_id')
history = database.get('History', [database.get_model('History').article_id == article_id])
data = get_dicts_from_models(history, formatted=True)
return reply_json(1, data)
@article.route('/get_history_article')
@login_required
@right_required([ReadHistoryArticle, ReadAllArticle])
@swag_from('docs/article/get_history_article.yml')
def get_history_article():
"""
获取历史文章
:return:
"""
history_id = request.values.get('history_id')
history = database.get('History', [database.get_model('History').id == history_id], first=True)
return reply_json(1, history.get_dict(formatted=True)) if history else reply_json(-7)
| [
"zhangyu199946@gmail.com"
] | zhangyu199946@gmail.com |
4dac548217bff8aa8e8c3333e5616a4dec3b1b16 | e04183c57141b34a4b82aa94542bb5b6e5c666db | /f04_python_code/pyvista_learning/pyvista_01_uniform_grid.py | c247c816a182d8e532f70f5204b6c01225585930 | [] | no_license | ivandudkov/dynamic-surface-class | ac32758e7ea057bc451f24ea0c7831c11efeef8b | 6e58beefe884f312f55ea9460a629568529994ee | refs/heads/main | 2023-07-15T02:15:14.606915 | 2021-08-30T14:41:49 | 2021-08-30T14:41:49 | 386,748,171 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,069 | py | # PyVista tutorial
# https://www.youtube.com/watch?v=FmNmRBsEBHE&ab_channel=SoftwareUnderground
# https://github.com/banesullivan/transform-2021
import numpy as np
import pyvista as pv
# 1. Let's create 3D numpy array
# To do that, firstly we create 1D array of values
values = np.linspace(0, 10, 1000)
# And after convert that array using .reshape method into
# 3D numpy array with dims: x = 20, y = 5, z = 10 i.e. 20x5x10 array
values = values.reshape((20, 5, 10))
# 2. Next, we create the spatial reference for our data - PyVista mesh object
grid = pv.UniformGrid() # Just an instance of the uniform grid object
# 3. After we should populate our object. But before, we have to set up
# dimensionality of our mesh grid. It will slightly differ depending on how we want
# populate our mesh. Do we want populate grid points or grid cells?
# 3.1 Populating grid (mesh) cells:
grid.dimensions = np.array(values.shape) + 1 # where
# np.array(values.shape) is an 1D array containing dimensions of our values
# and + 1 - because we want to inject our values on the CELL data
# 3.2 Populating grid (mesh) points:
# grid.dimensions = np.array(values.shape)
# in that case we are not adding one, because we want to inject our values
# on the POINT data (i.e. number of points == number of values)
# 4. Edit the spatial reference: grid origin and spacing
grid.origin = (100, 33, 55.6) # The bottom left corner of the data set
grid.spacing = [1, 5, 2] # These are cell (voxel) sizes along each axis
# 5. Add the data values to the cell data
grid.cell_arrays['values'] = values.flatten(order='F') # to do that correctly
# we should flatten our array. F-order is a specific of the pyvista array, if we
# do flatten for pyvista we should keep that order 'F' (I really don't know why)
# 5.1 Visualize the mesh:
grid.plot(show_edges=True)
# 6. Add the data values to the point data
# grid.point_arrays['values'] = values.flatten(order='F') # The same as for cell array
# we should flatten our 3D numpy array
# 6.1 Visualize the mesh:
# grid.plot(show_edges=True)
| [
"72301964+Spood-UR@users.noreply.github.com"
] | 72301964+Spood-UR@users.noreply.github.com |
cafd16de8f3a991bc1f93e7117a77c4e815657f0 | ea2bb35913e63c8a54b53a050c2ffe8067ab52e1 | /panepinto/collect.py | 42938746c2c7df6a986d4e6bf33d89cfc17363c8 | [] | no_license | jamesaoverton/obi-panepinto | 25250dfe809422423af5470126108f6eebfeea58 | 5a81d856ab4189b1b0bfcd2bed86476947b02d18 | refs/heads/master | 2021-01-01T03:44:33.641612 | 2016-05-13T14:57:27 | 2016-05-13T14:57:27 | 58,749,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,369 | py | #!/usr/bin/env python3
import argparse, csv, re
def main():
# Parse arguments
parser = argparse.ArgumentParser(
description='Collect labels and terms from a HOWL file')
parser.add_argument('howl',
type=argparse.FileType('r'),
help='a HOWL file')
parser.add_argument('table',
type=argparse.FileType('r'),
help='a TSV file')
args = parser.parse_args()
labels = set()
terms = set()
last = None
for line in args.howl:
if line.strip() != '' and not line.startswith('#'):
if line.startswith('type:> '):
terms.add(line[7:].strip())
if last and last.strip() == '':
labels.add(line.strip())
last = line
for label in sorted(labels):
print('<' + re.sub(r'\W+', '-', label) + '>')
print('label: ' + label)
print()
rows = csv.reader(args.table, delimiter='\t')
headers = next(rows)
for row in rows:
if len(row) < 1:
continue
label = row[1].replace('@en', '')
if label not in terms:
continue
for i in range(0, min(len(headers), len(row))):
if row[i].strip() == '':
continue
if headers[i] == 'SUBJECT':
print(row[i])
elif '%' in headers[i]:
print(headers[i].replace('%', row[i]))
else:
print(headers[i] +' '+ row[i])
print()
# Run the main() function.
if __name__ == "__main__":
main()
| [
"james@overton.ca"
] | james@overton.ca |
a95ad2492276f677f3e6732dccda73ecd536c253 | 040a669f8d27bcf62374e8ae5ce2a2b93a55f681 | /tests/test_new_user.py | ff99bb5887702db7ae3a47122187c770bc4c034d | [] | no_license | Pietertjie000108/Coding-Clinic-Booking-System | 5f31e6b48a15c2c82139c1960419c7903ee70795 | d1ad0a57b52b086b3ffd110c0a99dbe72e8c5afd | refs/heads/main | 2023-01-29T06:45:46.338705 | 2020-12-12T20:21:04 | 2020-12-12T20:21:04 | 309,633,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,487 | py | import sys, os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
import unittest
from authenticator.new_user import new_username, password, validate, user_and_pass
class Test_new_user_creation(unittest.TestCase):
def test_incorrect_input(self):
username = " "
self.maxDiff = None
self.assertNotEqual(new_username,"Please enter the correct username.")
def test_new_user_1(self):
username = "jkokot"
self.maxDiff = None
self.assertNotEqual(new_username,"That user already exists.")
def test_new_user_2(self):
username = "TaiSMail"
self.maxDiff = None
self.assertNotEqual(new_username,"That user already exists.")
def test_auth_password(self) :
self.maxDiff = None
self.assertNotEqual(password,"Please re-enter password: ")
def test_auth_validate(self):
self.assertNotEqual(validate,"Make sure your password is at lest 8 letters")
self.assertNotEqual(validate,"Make sure your password has a number in it")
self.assertNotEqual(validate,"Make sure your password has a capital letter in it")
self.assertNotEqual(validate,"Your password seems fine")
def test_auth_user_and_pass(self):
self.maxDiff = None
self.assertNotEqual(user_and_pass, None)
if __name__ == "__main__":
unittest.main() | [
"67334887+Bubbles1206@users.noreply.github.com"
] | 67334887+Bubbles1206@users.noreply.github.com |
ef769a8b18d70bdc17a21f8b9b959edcf4ea4e8c | be74e1dbaab580a7e4024a595958d1893469863a | /Python/LetterTilePossibilities.py | 4b6263e334207669ea238fa51c1426960635c806 | [] | no_license | super0xie/Solution | 0392b2577c31415c60ebcbeedb3dedcb01c66f02 | d028c2d076b1136ee30673faae5844954674961d | refs/heads/master | 2021-06-21T12:34:40.968649 | 2020-12-06T05:27:34 | 2020-12-06T05:27:34 | 130,928,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 930 | py | class Solution:
f = [1, 1, 2, 6, 24, 120, 720, 5040]
def numTilePossibilities(self, tiles: str) -> int:
map = {}
for i in range(len(tiles)):
if tiles[i] in map:
map[tiles[i]] += 1
else:
map[tiles[i]] = 1
count = []
for i in map:
if map[i] > 0:
count.append(map[i])
self.res = 0
self.dfs(0, 0, count, [0]*len(count))
return int(self.res)
def dfs(self, idx, sum, count, n):
if idx == len(count):
if(sum > 0):
r = self.f[sum]
for i in range(len(count)):
r /= self.f[n[i]]
self.res += r
return
for i in range(count[idx]+1):
n[idx] = i
self.dfs(idx+1, sum+i, count, n)
test = Solution()
print(test.numTilePossibilities("AAB"))
| [
"sxie@alpine-la.com"
] | sxie@alpine-la.com |
d8aa1cf989be9f934534f4189d386207d403ad6d | 12101c4a2c3aeb8eca88c3db89af2aed69208190 | /stream_test.py | 698e377c1c98591f36d05601127cd6987d8f45ad | [] | no_license | BitFloyd/VisBiz | 8085d7f9e32c8fd9b9b66d309f62a889d87f778a | 9b0bcb99091b9845dbaaf3be1b4cd6e8bb2b2c67 | refs/heads/master | 2021-05-09T15:16:21.786198 | 2018-01-31T20:31:32 | 2018-01-31T20:31:32 | 119,087,163 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 786 | py | import cv2
from classico.faces import FaceDetector as FD
import os
cap = cv2.VideoCapture(0)
fd = FD(face_cascade=os.path.join('..','data_folder','haarcascades','haarcascade_profileface.xml'),
face_profile_cascade=os.path.join('..','data_folder','haarcascades','haarcascade_frontalface_default.xml'),
eye_cascade=None,
eyes=False)
while(True):
# Capture frame-by-frame
ret, frame = cap.read()
# Our operations on the frame come here
# gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
detect_frame = fd.detect_face_from_img(frame)
# Display the resulting frame
cv2.imshow('frame',detect_frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows() | [
"sby.jcub@gmail.com"
] | sby.jcub@gmail.com |
7acc20e556f673db572b0311c98a3a3398d490bd | f8cc4185ae04a926b05eaa6ff1c77d36f082dfae | /Python/Fundamentals Practice/MultiplesSumAverage.py | 0de0d4b18f6216bf6bd8a6cb6f45cbacff0fd15e | [] | no_license | KMA91/DojoAssignments | 9a51edcb2eec224ea8e7f73e3789ce791b464865 | c7b06b059e397c280fe17e341dc35c4123502344 | refs/heads/master | 2021-01-22T23:27:02.908845 | 2019-04-11T08:04:29 | 2019-04-11T08:04:29 | 85,640,158 | 0 | 0 | null | 2017-08-22T19:11:54 | 2017-03-21T00:13:20 | null | UTF-8 | Python | false | false | 221 | py | #Part 1
for count in range(1, 1001, 2):
print count
#Part 2
for count in range(5, 1000001, 5):
print count
#Sum List
a = [1, 2, 5, 10, 255, 3]
sum = 0
for i in a:
sum+=i
print sum
#Average List
print sum/len(a) | [
"kevinma91@yahoo.com"
] | kevinma91@yahoo.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.