blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
288
content_id
stringlengths
40
40
detected_licenses
listlengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
684 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
147 values
src_encoding
stringclasses
25 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
128
12.7k
extension
stringclasses
142 values
content
stringlengths
128
8.19k
authors
listlengths
1
1
author_id
stringlengths
1
132
80a6ff4cb5ecc084d016f9301da878f259878fa9
68d38b305b81e0216fa9f6769fe47e34784c77f2
/alascrapy/spiders/digitalspy.py
0a6d6b49df371dfbc569048703b9ba89f5d402f1
[]
no_license
ADJet1437/ScrapyProject
2a6ed472c7c331e31eaecff26f9b38b283ffe9c2
db52844411f6dac1e8bd113cc32a814bd2ea3632
refs/heads/master
2022-11-10T05:02:54.871344
2020-02-06T08:01:17
2020-02-06T08:01:17
237,448,562
0
0
null
null
null
null
UTF-8
Python
false
false
3,075
py
# -*- coding: utf8 -*- from datetime import datetime from scrapy.http import Request from alascrapy.spiders.base_spiders.ala_spider import AlaSpider from alascrapy.lib.generic import get_full_url import alascrapy.lib.dao.incremental_scraping as incremental_utils class DigitalSpySpider(AlaSpider): name = 'digitalspy' allowed_domains = ['digitalspy.co.uk'] start_urls = ['http://www.digitalspy.co.uk/tech/review/'] def __init__(self, *args, **kwargs): super(DigitalSpySpider, self).__init__(self, *args, **kwargs) self.stored_last_date = incremental_utils.get_latest_pro_review_date(self.mysql_manager, self.spider_conf["source_id"]) def parse(self, response): next_page_xpath = "//*[contains(@class, 'pagination')]//a[@title='Next']/@href" review_urls = self.extract_list( response.xpath("//*[@class='content_area']//a[@class='component']/@href")) for review_url in review_urls: review_url = get_full_url(response, review_url) request = Request(review_url, callback=self.parse_review) yield request if self.continue_to_next_page(response): next_page = self.extract(response.xpath(next_page_xpath)) if next_page: next_page = get_full_url(response, next_page) request = Request(next_page, callback=self.parse) yield request def continue_to_next_page(self, response): if not self.stored_last_date: return True review_date_xpath = "//*[@class='content_area']//time/@datetime" review_dates = self.extract_list(response.xpath(review_date_xpath)) if review_dates: last_date_string = review_dates[-1] last_review_date = datetime.strptime(last_date_string[0:-4], "%Y-%m-%d:%H:%M") if self.stored_last_date > last_review_date: return False return True def parse_review(self, response): product_xpaths = { "ProductName": "(//*[@id='articleimage'])[1]//img/@alt", "PicURL": "(//*[@property='og:image'])[1]/@content", "OriginalCategoryName": "(//*[@class='category-chicklets']/li)[last()]//text()" } review_xpaths = { "ProductName": "(//*[@id='articleimage'])[1]//img/@alt", "TestTitle": "//*[@property='og:title']/@content", "TestSummary": "//*[@property='og:description']/@content", "Author": "//a[@rel='author']/text()", "TestDateText": "//time/@datetime" } product = self.init_item_by_xpaths(response, "product", product_xpaths) review = self.init_item_by_xpaths(response, "review", review_xpaths) review["DBaseCategoryName"] = "PRO" review["TestDateText"] = datetime.strptime(review["TestDateText"][0:-4], "%Y-%m-%d:%H:%M").strftime("%Y-%m-%d %H:%M:00") yield product yield review
[ "liangzijie1437@gmail.com" ]
liangzijie1437@gmail.com
bb1c11a5dec23753cf0d15af74e5a1e3f8fb3803
0b88201be895a25c8c321481615b4965f529d6da
/CDTB_Seg/model/sentence.py
c4f574f16d8c9e287b9e6e110b3fa970a1cc8c39
[ "BSD-2-Clause", "MIT" ]
permissive
NLP-Discourse-SoochowU/segmenter2020
1e8335da56b26f52ed48eb462047b9fe9b1e10df
fd71b353c59bcb82ec2cd0bebf943040756faa63
refs/heads/master
2023-01-13T23:14:37.078780
2020-11-24T05:07:26
2020-11-24T05:07:26
283,890,012
0
1
null
null
null
null
UTF-8
Python
false
false
748
py
# -*- coding: utf-8 -*- """ @Author: Lyzhang @Date: @Description: 封装句子,句法信息等 """ from stanfordcorenlp import StanfordCoreNLP from model.edu import EDU path_to_jar = 'stanford-corenlp-full-2018-02-27' nlp = StanfordCoreNLP(path_to_jar) class Sentence: def __init__(self, sentence, edus_list): self.edus = self.build_edus(edus_list) self.sentence_txt = sentence self.dependency = self.gen_dependency() @staticmethod def build_edus(edus_list): edus_ = list() for edu in edus_list: edus_.append(EDU(edu, nlp)) return edus_ def gen_dependency(self): dep = nlp.dependency_parse(self.sentence_txt) return dep
[ "longyin128128@163.com" ]
longyin128128@163.com
40e1d47660e1a1180f87336736369de34c5b4c7a
5a93d5eadf55513020c0c5149b2bc8a52d8ea4c0
/core/ui_mixins/input_panel.py
c491e98d7997e5d3f0827ff5c301b670910427f5
[ "MIT" ]
permissive
jmcollis/GitSavvy
b14dfe7485aa2d7c37c9bd3615a6b9be9fe274e8
de7d01539931b4344a296bd71ed87d9754389f6a
refs/heads/master
2022-10-06T23:38:36.821013
2022-09-26T14:21:33
2022-09-26T14:21:33
165,129,291
0
0
MIT
2019-01-10T20:51:57
2019-01-10T20:51:57
null
UTF-8
Python
false
false
398
py
import sublime def show_single_line_input_panel( caption, initial_text, on_done, on_change=None, on_cancel=None, select_text=True): window = sublime.active_window() v = window.show_input_panel(caption, initial_text, on_done, on_change, on_cancel) if select_text: v.run_command("select_all") v.settings().set("git_savvy.single_line_input_panel", True) return v
[ "randy.cs.lai@gmail.com" ]
randy.cs.lai@gmail.com
0592131ab071183ad4eb44c54e560f11e46ede34
1f0ebcb6f428244c3283466c7f98944349f3df48
/greendoge/wallet/transaction_record.py
256fa58a22fee0e4a5e238d5ab70ed037423f6ee
[ "Apache-2.0" ]
permissive
ymcage/greendoge-blockchain
4b53433c26221ea6cf5665b9a134fff25c676e22
42d5440c3899419f4aa544908a50b1ed78799c13
refs/heads/main
2023-06-10T06:54:40.391343
2021-07-07T11:39:31
2021-07-07T11:39:31
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,501
py
from dataclasses import dataclass from typing import List, Optional, Tuple from greendoge.consensus.coinbase import pool_parent_id, farmer_parent_id from greendoge.types.blockchain_format.coin import Coin from greendoge.types.blockchain_format.sized_bytes import bytes32 from greendoge.types.mempool_inclusion_status import MempoolInclusionStatus from greendoge.types.spend_bundle import SpendBundle from greendoge.util.ints import uint8, uint32, uint64 from greendoge.util.streamable import Streamable, streamable from greendoge.wallet.util.transaction_type import TransactionType @dataclass(frozen=True) @streamable class TransactionRecord(Streamable): """ Used for storing transaction data and status in wallets. """ confirmed_at_height: uint32 created_at_time: uint64 to_puzzle_hash: bytes32 amount: uint64 fee_amount: uint64 confirmed: bool sent: uint32 spend_bundle: Optional[SpendBundle] additions: List[Coin] removals: List[Coin] wallet_id: uint32 # Represents the list of peers that we sent the transaction to, whether each one # included it in the mempool, and what the error message (if any) was sent_to: List[Tuple[str, uint8, Optional[str]]] trade_id: Optional[bytes32] type: uint32 # TransactionType name: bytes32 def is_in_mempool(self) -> bool: # If one of the nodes we sent it to responded with success, we set it to success for (_, mis, _) in self.sent_to: if MempoolInclusionStatus(mis) == MempoolInclusionStatus.SUCCESS: return True # Note, transactions pending inclusion (pending) return false return False def height_farmed(self, genesis_challenge) -> Optional[uint32]: if not self.confirmed: return None if self.type == TransactionType.FEE_REWARD or self.type == TransactionType.COINBASE_REWARD: for block_index in range(self.confirmed_at_height, self.confirmed_at_height - 100, -1): if block_index < 0: return None pool_parent = pool_parent_id(uint32(block_index), genesis_challenge) farmer_parent = farmer_parent_id(uint32(block_index), genesis_challenge) if pool_parent == self.additions[0].parent_coin_info: return uint32(block_index) if farmer_parent == self.additions[0].parent_coin_info: return uint32(block_index) return None
[ "83430349+lionethan@users.noreply.github.com" ]
83430349+lionethan@users.noreply.github.com
2a5e6f9ac45e880f664a9ce232fab5f208239894
544fe02a27cc4d987724b1bf45c2ba2994676521
/Q6.2_brain_teaser.py
f3c435d0d39f9e66d72d19cb75fb37c344115fe5
[ "Unlicense" ]
permissive
latika18/learning
1e7a6dbdea399b845970317dc62089911a13df1c
a57c9aacc0157bf7c318f46c1e7c4971d1d55aea
refs/heads/master
2021-06-16T19:20:28.146547
2019-09-03T06:43:28
2019-09-03T06:43:28
115,537,386
0
0
null
null
null
null
UTF-8
Python
false
false
366
py
There is an 8x8 chess board in which two diagonally opposite corners have been cut off. You are given 31 dominos, and a single domino can cover exactly two squares. Can you use the 31 dominos to cover the entire board? Prove your answer (by providing an example, or showing why it’s impossible). _ ________________________________________________________________
[ "noreply@github.com" ]
latika18.noreply@github.com
6439de8c26738686bf36333467e5cb2eb4570782
c0973d6939ef419ed3d261d95167d537499a553a
/OnePy/builtin_module/backtest_stock/stock_bar.py
3c54b51c0fd2d7d4c0792869b53a43b9813f685e
[ "MIT" ]
permissive
mj3428/OnePy
0c6e4be9b4bb36ae66b566dfa85cd44bae2a07de
8dc13fc21502daa5786aecaa4451ccba32fc8a14
refs/heads/master
2020-04-05T10:28:33.550915
2018-11-08T04:07:05
2018-11-08T04:07:05
134,518,682
0
0
MIT
2018-05-23T05:38:12
2018-05-23T05:38:11
null
UTF-8
Python
false
false
772
py
from OnePy.sys_module.models.base_bar import BarBase class BarAshares(BarBase): @property def pre_date(self) -> str: return self.previous_ohlc['date'] @property def pre_open(self) -> float: return self.previous_ohlc['open'] @property def pre_high(self) -> float: return self.previous_ohlc['high'] @property def pre_low(self) -> float: return self.previous_ohlc['low'] @property def pre_close(self) -> float: return self.previous_ohlc['close'] @property def pre_volume(self) -> float: return self.previous_ohlc['volume'] @property def limit_up(self): return self.pre_close*1.1 @property def limit_down(self): return self.pre_close*0.9
[ "chenjiayicjy@126.com" ]
chenjiayicjy@126.com
2c26f31e646ce45e3dd3e92987df40f489341d47
9788d21a60e7f97cd8dcc6d28a280901cfff7d99
/app/decorators.py
f926a1a5112d53b9c9baa525e6dfced11ed3b4ef
[]
no_license
Tiierr/MovieRental
b928d08b3a139c7c20fbdf1351402d2d8d700ab9
69bfaf3726aa7bedb58ef63a47d5e7b4476b08d9
refs/heads/master
2021-06-11T23:43:12.271576
2017-01-02T15:16:11
2017-01-02T15:16:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
667
py
from functools import wraps from flask import abort from flask_login import current_user from .models import Permission def permission_required(permission): """ 检查用户权限的自定义修饰器。 如果用户不具有指定权限, 则返回 ``403`` 错误码, 即 HTTP ``禁止`` 错误。 """ def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): if not current_user.can(permission): abort(403) return f(*args, **kwargs) return decorated_function return decorator def admin_required(f): return permission_required(Permission.ADMINISTER)(f)
[ "rayyu03@163.com" ]
rayyu03@163.com
17ece5964bcf66ee4adb69316fe20ee8aef56d8d
4fc87c7c55d431943eba76caaa76cc889e99bd3f
/npf/contrib/address/migrations/0002_auto_20151027_1114.py
c5dad8fa654fa93453863a66fbca651cc1f86e22
[]
no_license
Bonasolvo/npf-dev-roles
c774359b79642ae9ca2c82daeb0591677bd8e88c
dbde9493f2d23fd238dd3a6d8771bbbc5a650724
refs/heads/master
2016-09-01T05:35:50.246086
2015-12-15T07:02:40
2015-12-15T07:02:40
48,026,149
2
0
null
null
null
null
UTF-8
Python
false
false
2,988
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import fias.fields.address class Migration(migrations.Migration): dependencies = [ ('fias', '0001_initial'), ('address', '0001_initial'), ] operations = [ migrations.CreateModel( name='House', fields=[ ('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')), ('street', models.CharField(editable=False, blank=True, max_length=255, db_index=True, verbose_name='Улица')), ('index', models.PositiveIntegerField(blank=True, null=True, verbose_name='Почтовый индекс')), ('house', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Дом')), ('corps', models.CharField(blank=True, max_length=2, null=True, verbose_name='Корпус')), ('apartment', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Квартира')), ], options={ 'db_table': 'zlk_house', }, ), migrations.CreateModel( name='Street', fields=[ ('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')), ('street', models.CharField(editable=False, blank=True, max_length=255, db_index=True, verbose_name='Улица')), ], options={ 'db_table': 'zlk_street', }, ), migrations.CreateModel( name='Address', fields=[ ], options={ 'proxy': True, }, bases=('fias.addrobj',), ), migrations.CreateModel( name='Socr', fields=[ ], options={ 'verbose_name': 'Сокращениие наименования адресного объекта', 'verbose_name_plural': 'Список сокращений', 'proxy': True, }, bases=('fias.socrbase',), ), migrations.AddField( model_name='street', name='fias_street', field=fias.fields.address.AddressField(related_name='+', verbose_name='Улица', blank=True, to='fias.AddrObj', null=True), ), migrations.AddField( model_name='house', name='fias_house', field=fias.fields.address.AddressField(related_name='+', verbose_name='Дом', blank=True, to='fias.AddrObj', db_column='fiashouse', null=True), ), migrations.AddField( model_name='house', name='fias_street', field=fias.fields.address.AddressField(related_name='+', verbose_name='Улица', blank=True, to='fias.AddrObj', null=True), ), ]
[ "tymashh@Mac-Tymashh.local" ]
tymashh@Mac-Tymashh.local
2144800d8fcec458abe686f8dab5297deb877026
8a1cc9342312f794c48a857de1444a70491a75fa
/item/admin.py
6d0f120c83410ffb372120fdb92117f26cefbc13
[]
no_license
BijoySingh/Project-Hermes-Django
7adaf6757bc605e9f3781d915c4250bcc348fb47
7cb50402e612ea287d5922a2716a30b7888a0d70
refs/heads/master
2016-09-01T07:04:05.259487
2016-04-09T10:25:21
2016-04-09T10:25:21
55,672,434
1
0
null
null
null
null
UTF-8
Python
false
false
796
py
from django.contrib import admin # Register your models here. from item.models import Item, Comment, Photo, Rating, Reaction @admin.register(Item) class ItemAdmin(admin.ModelAdmin): list_display = ['id', 'title', 'latitude', 'longitude', 'rating', 'author'] @admin.register(Comment) class CommentAdmin(admin.ModelAdmin): list_display = ['id', 'comment', 'upvotes', 'downvotes', 'flags', 'author'] @admin.register(Photo) class PhotoAdmin(admin.ModelAdmin): list_display = ['id', 'picture', 'upvotes', 'downvotes', 'flags', 'author'] @admin.register(Rating) class RatingAdmin(admin.ModelAdmin): list_display = ['id', 'author', 'item', 'rating'] @admin.register(Reaction) class ReactionAdmin(admin.ModelAdmin): list_display = ['id', 'author', 'reactable', 'reaction']
[ "bijoysingh693@gmail.com" ]
bijoysingh693@gmail.com
234d119020fbf20a956843715d516476ec476f75
7b33e9ab949ef2cd985e56abea4a7c0e5d53e5a5
/examples/enwik8_deepspeed/train.py
40980f744df6c18b7b981b8682c8aa486aabcbfc
[ "MIT" ]
permissive
karim-ahmed/linear-attention-transformer
3a416226b5f0707756e3044e6dd1dd65f6239ba7
dd3f7b63fe2235cba97ab1a95840d0484c70c068
refs/heads/master
2023-04-04T01:14:37.871715
2021-04-14T03:14:44
2021-04-14T03:14:44
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,789
py
import deepspeed from linear_attention_transformer import LinearAttentionTransformerLM from linear_attention_transformer.autoregressive_wrapper import AutoregressiveWrapper import argparse import random import tqdm import gzip import numpy as np import torch import torch.optim as optim from torch.nn import functional as F from torch.utils.data import DataLoader, Dataset def add_argument(): parser=argparse.ArgumentParser(description='enwik8') parser.add_argument('--with_cuda', default=False, action='store_true', help='use CPU in case there\'s no GPU support') parser.add_argument('--use_ema', default=False, action='store_true', help='whether use exponential moving average') parser.add_argument('-b', '--batch_size', default=32, type=int, help='mini-batch size (default: 32)') parser.add_argument('-e', '--epochs', default=30, type=int, help='number of total epochs (default: 30)') parser.add_argument('--local_rank', type=int, default=-1, help='local rank passed from distributed launcher') parser = deepspeed.add_config_arguments(parser) args = parser.parse_args() return args # constants VALIDATE_EVERY = 100 GENERATE_EVERY = 500 GENERATE_LENGTH = 1024 SEQ_LEN = 4096 # helpers def decode_token(token): return str(chr(max(32, token))) def decode_tokens(tokens): return ''.join(list(map(decode_token, tokens))) # instantiate model model = LinearAttentionTransformerLM( num_tokens = 256, dim = 512, depth = 8, max_seq_len = SEQ_LEN, heads = 8, causal = True, reversible = True, blindspot_size = 2, n_local_attn_heads = (8, 8, 8, 8, 4, 4, 2, 2) ) model = AutoregressiveWrapper(model) model.cuda() # prepare enwik8 data with gzip.open('./data/enwik8.gz') as file: X = np.fromstring(file.read(int(95e6)), dtype=np.uint8) trX, vaX = np.split(X, [int(90e6)]) data_train, data_val = torch.from_numpy(trX), torch.from_numpy(vaX) class TextSamplerDataset(Dataset): def __init__(self, data, seq_len): super().__init__() self.data = data self.seq_len = seq_len def __getitem__(self, index): rand_start = torch.randint(0, self.data.size(0) - self.seq_len - 1, (1,)) full_seq = self.data[rand_start: rand_start + self.seq_len + 1].long() return full_seq, torch.ones_like(full_seq).bool() def __len__(self): return self.data.size(0) // self.seq_len train_dataset = TextSamplerDataset(data_train, SEQ_LEN) val_dataset = TextSamplerDataset(data_val, SEQ_LEN) # setup deepspeed cmd_args = add_argument() model_engine, optimizer, trainloader, _ = deepspeed.initialize(args=cmd_args, model=model, model_parameters=model.parameters(), training_data=train_dataset) # training for i, (data, mask) in enumerate(trainloader): model_engine.train() data = data.to(model_engine.local_rank) loss = model_engine(data, return_loss = True, randomly_truncate_sequence = True) model_engine.backward(loss) model_engine.step() print(loss.item()) if i % VALIDATE_EVERY == 0: model.eval() with torch.no_grad(): inp, _ = random.choice(val_dataset) loss = model(inp[None, :].cuda(), return_loss = True) print(f'validation loss: {loss.item()}') if i != 0 and model_engine.local_rank == 0 and i % GENERATE_EVERY == 0: model.eval() inp, _ = random.choice(val_dataset) print(inp.shape, inp) prime = decode_tokens(inp) print(f'%s \n\n %s', (prime, '*' * 100)) sample = model.generate(inp.cuda(), GENERATE_LENGTH) output_str = decode_tokens(sample) print(output_str)
[ "lucidrains@gmail.com" ]
lucidrains@gmail.com
0e8a1b16ca9cc75722a90fa7c6b30afde32791a2
2e0908cf2dce87036e9b9d32f23d7b7e20b2127f
/tests/test_algebra_onnx_operator_mixin_syntax.py
76c44cd4a7b0b1415b9dfb9445e83e909f87b88a
[ "MIT" ]
permissive
jtpils/sklearn-onnx
a74b621c47ee02f4b0775c6f99091f6d79873650
5a065b767ec0e658f671e6313c2a3392b0dc81d6
refs/heads/master
2020-06-21T10:52:57.936832
2019-07-17T15:53:40
2019-07-17T15:53:40
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,561
py
import unittest import numpy as np from sklearn.base import BaseEstimator, TransformerMixin from sklearn.cluster import KMeans from sklearn.pipeline import make_pipeline from skl2onnx import convert_sklearn, to_onnx, wrap_as_onnx_mixin from skl2onnx.common.data_types import FloatTensorType from skl2onnx.algebra.onnx_ops import OnnxSub, OnnxDiv from skl2onnx.algebra.onnx_operator_mixin import OnnxOperatorMixin from test_utils import dump_data_and_model class CustomOpTransformer(BaseEstimator, TransformerMixin, OnnxOperatorMixin): def __init__(self): BaseEstimator.__init__(self) TransformerMixin.__init__(self) def fit(self, X, y=None): self.W_ = np.mean(X, axis=0) self.S_ = np.std(X, axis=0) return self def transform(self, X): return (X - self.W_) / self.S_ def onnx_shape_calculator(self): def shape_calculator(operator): operator.outputs[0].type = operator.inputs[0].type return shape_calculator def to_onnx_operator(self, inputs=None, outputs=('Y', )): if inputs is None: raise RuntimeError("inputs should contain one name") i0 = self.get_inputs(inputs, 0) W = self.W_ S = self.S_ return OnnxDiv(OnnxSub(i0, W), S, output_names=outputs) class TestOnnxOperatorMixinSyntax(unittest.TestCase): def test_way1_convert_sklean(self): X = np.arange(20).reshape(10, 2) tr = KMeans(n_clusters=2) tr.fit(X) onx = convert_sklearn( tr, initial_types=[('X', FloatTensorType((1, X.shape[1])))]) dump_data_and_model( X.astype(np.float32), tr, onx, basename="MixinWay1ConvertSklearn") def test_way2_to_onnx(self): X = np.arange(20).reshape(10, 2) tr = KMeans(n_clusters=2) tr.fit(X) onx = to_onnx(tr, X.astype(np.float32)) dump_data_and_model( X.astype(np.float32), tr, onx, basename="MixinWay2ToOnnx") def test_way3_mixin(self): X = np.arange(20).reshape(10, 2) tr = KMeans(n_clusters=2) tr.fit(X) tr_mixin = wrap_as_onnx_mixin(tr) try: onx = tr_mixin.to_onnx() except RuntimeError as e: assert "Method enumerate_initial_types" in str(e) onx = tr_mixin.to_onnx(X.astype(np.float32)) dump_data_and_model( X.astype(np.float32), tr, onx, basename="MixinWay3OnnxMixin") def test_way4_mixin_fit(self): X = np.arange(20).reshape(10, 2) tr = wrap_as_onnx_mixin(KMeans(n_clusters=2)) tr.fit(X) onx = tr.to_onnx(X.astype(np.float32)) dump_data_and_model( X.astype(np.float32), tr, onx, basename="MixinWay4OnnxMixin2") def test_pipe_way1_convert_sklean(self): X = np.arange(20).reshape(10, 2) tr = make_pipeline(CustomOpTransformer(), KMeans(n_clusters=2)) tr.fit(X) onx = convert_sklearn( tr, initial_types=[('X', FloatTensorType((1, X.shape[1])))]) dump_data_and_model( X.astype(np.float32), tr, onx, basename="MixinPipeWay1ConvertSklearn") def test_pipe_way2_to_onnx(self): X = np.arange(20).reshape(10, 2) tr = make_pipeline(CustomOpTransformer(), KMeans(n_clusters=2)) tr.fit(X) onx = to_onnx(tr, X.astype(np.float32)) dump_data_and_model( X.astype(np.float32), tr, onx, basename="MixinPipeWay2ToOnnx") def test_pipe_way3_mixin(self): X = np.arange(20).reshape(10, 2) tr = make_pipeline(CustomOpTransformer(), KMeans(n_clusters=2)) tr.fit(X) tr_mixin = wrap_as_onnx_mixin(tr) try: onx = tr_mixin.to_onnx() except RuntimeError as e: assert "Method enumerate_initial_types" in str(e) onx = tr_mixin.to_onnx(X.astype(np.float32)) dump_data_and_model( X.astype(np.float32), tr, onx, basename="MixinPipeWay3OnnxMixin") def test_pipe_way4_mixin_fit(self): X = np.arange(20).reshape(10, 2) tr = wrap_as_onnx_mixin(make_pipeline( CustomOpTransformer(), KMeans(n_clusters=2))) tr.fit(X) onx = tr.to_onnx(X.astype(np.float32)) dump_data_and_model( X.astype(np.float32), tr, onx, basename="MixinPipeWay4OnnxMixin2") if __name__ == "__main__": unittest.main()
[ "noreply@github.com" ]
jtpils.noreply@github.com
a81398619c9e57b2cc4e2944013390a7e0a0f278
cad762658ab8326d7f43bba6f69df35a8b770e34
/pymarkdown/extension_impl.py
3cbc7feccab92d85f19b654a097b64f69975075c
[ "MIT" ]
permissive
ExternalRepositories/pymarkdown
9c248b519791a4c869d1e71fa405c06d15ce553b
479ace2d2d9dd5def81c72ef3b58bce6fb76f594
refs/heads/main
2023-08-28T03:45:25.536530
2021-10-31T19:39:22
2021-10-31T19:39:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,698
py
""" Module to allow for the details on the extension to be encapsulated. """ # pylint: disable=too-many-instance-attributes class ExtensionDetails: """ Class to allow for the details on the extension to be encapsulated. """ # pylint: disable=too-many-arguments def __init__( self, extension_id, extension_name, extension_description, extension_enabled_by_default, extension_version, extension_interface_version, extension_url=None, extension_configuration=None, ): ( self.__extension_id, self.__extension_name, self.__extension_description, self.__extension_enabled_by_default, self.__extension_version, self.__extension_interface_version, self.__extension_url, self.__extension_configuration, ) = ( extension_id, extension_name, extension_description, extension_enabled_by_default, extension_version, extension_interface_version, extension_url, extension_configuration, ) # pylint: enable=too-many-arguments @property def extension_id(self): """ Property to get the id of the extension. """ return self.__extension_id @property def extension_name(self): """ Property to get the name of the extension. """ return self.__extension_name @property def extension_description(self): """ Property to get the short description of the extension. """ return self.__extension_description @property def extension_enabled_by_default(self): """ Property to get whether the extension is enabled by default. """ return self.__extension_enabled_by_default @property def extension_version(self): """ Property to get the version of the extension. """ return self.__extension_version @property def extension_interface_version(self): """ Property to get the interface version of the extension. """ return self.__extension_interface_version @property def extension_url(self): """ Property to get the optional url for the extension. """ return self.__extension_url @property def extension_configuration(self): """ Property to get the optional configuration items for the extension. """ return self.__extension_configuration # pylint: enable=too-many-instance-attributes
[ "jack.de.winter@outlook.com" ]
jack.de.winter@outlook.com
e8d0c61636321004aa67ee998a47d378eddd38ae
52a4d869976a97498bdf56a8d0ff92cac138a136
/Bioinformatics Textbook Track/Chapter 2/rosalind_ba2e.py
906b2d24c3cd9f5d49524b64c49fbd545e591f26
[]
no_license
aakibinesar/Rosalind
d726369a787d848cc378976b886189978a60a3a5
375bbdbfb16bf11b2f980701bbd0ba74a1605cdb
refs/heads/master
2022-08-18T09:36:00.941080
2020-05-24T18:49:38
2020-05-24T18:49:38
264,722,651
0
0
null
2020-05-17T17:51:03
2020-05-17T17:40:59
null
UTF-8
Python
false
false
1,608
py
def greedymotifsearch(dna,k,t): best = [s[:k] for s in dna] for i in range(len(dna[0])-k+1): tempbest = [dna[0][i:i+k]] for m in range(1,t): matrix = motifsToProfile(tempbest) # different from ba2d tempbest.append(profileMostProbablekmer(dna[m],k,matrix)) if score(tempbest) < score(best): best = tempbest return best def score(motifs): z = zip(*motifs) thescore = 0 for string in z: score = len(string) - max([string.count('A'), string.count('C'), string.count('G'), string.count('T')]) thescore += score return thescore def motifsToProfile(motifs): d = {} n = float(len(motifs)) z = list(zip(*motifs)) for i in range(len(z)): d.setdefault('A', []).append((z[i].count('A')+1)/n/2) d.setdefault('C', []).append((z[i].count('C')+1)/n/2) d.setdefault('G', []).append((z[i].count('G')+1)/n/2) d.setdefault('T', []).append((z[i].count('T')+1)/n/2) return d def profileMostProbablekmer(text, k , matrix): maxp = None probablekmer = None for i in range(len(text)-k+1): kmer = text[i:i+k] pt = 1 for j in range(k): p = matrix[kmer[j]][j] pt *=p if maxp == None or pt > maxp: maxp = pt probablekmer = kmer return probablekmer with open('rosalind_ba2e.txt') as f: k,t = map(int,f.readline().rstrip().split(' ')) strings = [st.rstrip() for st in f.readlines()] print('\n'.join(greedymotifsearch(strings,k,t))) # bug: may be wrong , try several times
[ "noreply@github.com" ]
aakibinesar.noreply@github.com
3767e7c945abe719f74b7ea747b807c21211ab0e
2f89231a207b89acda1c46aba0b03572fb1da8aa
/main.py
b97b41680283a8d71d2b5d7c127bf9028765e375
[]
no_license
BentleyJOakes/wordpress_extractor
8649487562a7113effced06cd06a50ffcf53dff9
02891890b801515e2c7873ccb95f62692a354fa3
refs/heads/master
2023-01-30T01:50:43.290318
2020-12-09T22:18:13
2020-12-09T22:18:13
297,141,840
0
0
null
null
null
null
UTF-8
Python
false
false
788
py
import argparse from scrapy.crawler import CrawlerProcess from WordpressSpider import WordpressSpider from scrapy.utils.project import get_project_settings class WordPressExtractor: def __init__(self): self.process = CrawlerProcess(get_project_settings()) def parse(self, start_urls): for surl in start_urls: WordpressSpider.start_urls = [surl] self.process.crawl(WordpressSpider, domain = surl) self.process.start() if __name__ == "__main__": parser = argparse.ArgumentParser( usage="%(prog)s [OPTION] URL", description="Extract a WordPress site to HTML." ) parser.add_argument('files', nargs='*') args = parser.parse_args() wpe = WordPressExtractor() wpe.parse(list(args.files))
[ "bentleyjoakes@gmail.com" ]
bentleyjoakes@gmail.com
49aeb0cb38e74c85e659bb3dc53c57a0ff8bf0bd
c90674d955fe1399c0e99cf34437e583d1cf9fb9
/application6-webcam motion detector/facedetection/script_face_detector.py
9e1d75fde667176ccd297abb50c5542b6827f488
[]
no_license
TrellixVulnTeam/My_python_code_QQZ2
556878cbe4f8d6d92e71f48285a6d2439b10ca81
8cd8b697d92e1a79cce109baf560eeff27717ce8
refs/heads/master
2023-03-19T15:26:35.836114
2018-06-29T14:09:06
2018-06-29T14:09:06
null
0
0
null
null
null
null
UTF-8
Python
false
false
496
py
import cv2 face_cascade=cv2.CascadeClassifier("haarcascade_frontalface_default.xml") img=cv2.imread("people-6.jpg") gray_img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) faces=face_cascade.detectMultiScale(gray_img,scaleFactor=1.05,minNeighbors=5) print(type(faces)) print(faces) for x,y,w,h in faces: img=cv2.rectangle(img, (x,y),(x+w,y+h),(0,255,0),3) resized=cv2.resize(img,(int(img.shape[1]/2),int(img.shape[0]/2))) cv2.imshow("Gray",img) cv2.waitKey(0) cv2.destroyAllWindows()
[ "apple@Apples-MacBook-Pro.local" ]
apple@Apples-MacBook-Pro.local
6b676d38806dcbf551a398d20280036d0eda2c59
4631798b64f2118b7d8e64483a14d7485163358b
/pizzaim.py
899a6b086debd422f3812a74737846fb8e184845
[]
no_license
royrowe/python
288680aba27b8c2d46368250b45fb1672427fe6a
dc7cebd56aa1bee7b2afd91e3a2a4b03f1775ba5
refs/heads/master
2020-04-15T20:05:02.587794
2019-01-10T02:53:08
2019-01-10T02:53:08
164,978,105
0
0
null
null
null
null
UTF-8
Python
false
false
305
py
#!/usr/bin/env python ''' @File :8.6.1_pizza.py @Copyright :luoming @Date : @Desc : ''' def make_pizza(size,*toppings): '''概述要制作的匹萨''' print("\nMaking a "+str(size)+"-inch pizza with the following toppings:") for topping in toppings: print("-"+ topping)
[ "your email" ]
your email
30d50006b8f6f9c5ac6db3ed6ed1ae6ae151b195
312d8dbbf980bf164f210e7935b17dc08d64ff87
/Model/repeat1_link_prediction_appeared_utilize_existing_attribute/STGGNN/main.py
97389c092854b118fe55455b3f04168c8e8d4bd8
[]
no_license
hsack6/OWGP_NBA
27dafbd6e59c17ce4a66e92132ee56782e2126bf
56656efb5884cd9f806e476a92c5e6485c71adeb
refs/heads/master
2023-02-25T09:52:05.165494
2021-02-03T12:44:04
2021-02-03T12:44:04
288,363,250
0
0
null
null
null
null
UTF-8
Python
false
false
6,315
py
import argparse import random import pandas as pd import torch import torch.nn as nn import torch.optim as optim from model import STGGNN from utils.train import train from utils.valid import valid from utils.test import test from utils.inference import inference from utils.data.dataset import BADataset from utils.data.dataloader import BADataloader from utils.pytorchtools import EarlyStopping import sys import os current_dir = os.path.dirname(os.path.abspath("__file__")) sys.path.append( str(current_dir) + '/../../../' ) from setting_param import Model_repeat1_link_prediction_appeared_utilize_existing_attribute_InputDir as InputDir from setting_param import Model_repeat1_link_prediction_appeared_utilize_existing_attribute_STGGNN_OutputDir as OutputDir from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_worker from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_batchSize from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_lr from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_init_L from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_annotation_dim from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_state_dim from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_output_dim from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_n_steps from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_niter from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_patience parser = argparse.ArgumentParser() parser.add_argument('--workers', type=int, help='number of data loading workers', default=repeat1_link_prediction_appeared_utilize_existing_attribute_worker) parser.add_argument('--batchSize', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_batchSize, help='input batch size') parser.add_argument('--state_dim', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_state_dim, help='GGNN hidden state size') parser.add_argument('--annotation_dim', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_annotation_dim, help='GGNN input annotation size') parser.add_argument('--output_dim', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_output_dim, help='Model output state size') parser.add_argument('--init_L', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_init_L, help='number of observation time step') parser.add_argument('--niter', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_niter, help='number of epochs to train for') parser.add_argument('--n_steps', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_n_steps, help='propogation steps number of GGNN') parser.add_argument('--patience', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_patience, help='Early stopping patience') parser.add_argument('--lr', type=float, default=repeat1_link_prediction_appeared_utilize_existing_attribute_lr, help='learning rate') parser.add_argument('--cuda', action='store_true', help='enables cuda') parser.add_argument('--verbal', action='store_true', help='print training info or not') parser.add_argument('--manualSeed', type=int, help='manual seed') opt = parser.parse_args() print(opt) if opt.manualSeed is None: opt.manualSeed = random.randint(1, 10000) print("Random Seed: ", opt.manualSeed) random.seed(opt.manualSeed) torch.manual_seed(opt.manualSeed) opt.dataroot = InputDir if opt.cuda: torch.cuda.manual_seed_all(opt.manualSeed) opt.L = opt.init_L def main(opt): train_dataset = BADataset(opt.dataroot, opt.L, True, False, False) train_dataloader = BADataloader(train_dataset, batch_size=opt.batchSize, \ shuffle=True, num_workers=opt.workers, drop_last=True) valid_dataset = BADataset(opt.dataroot, opt.L, False, True, False) valid_dataloader = BADataloader(valid_dataset, batch_size=opt.batchSize, \ shuffle=True, num_workers=opt.workers, drop_last=True) test_dataset = BADataset(opt.dataroot, opt.L, False, False, True) test_dataloader = BADataloader(test_dataset, batch_size=opt.batchSize, \ shuffle=True, num_workers=opt.workers, drop_last=True) all_dataset = BADataset(opt.dataroot, opt.L, False, False, False) all_dataloader = BADataloader(all_dataset, batch_size=opt.batchSize, \ shuffle=False, num_workers=opt.workers, drop_last=False) opt.n_edge_types = train_dataset.n_edge_types opt.n_node = train_dataset.n_node net = STGGNN(opt, kernel_size=2, n_blocks=1, state_dim_bottleneck=opt.state_dim, annotation_dim_bottleneck=opt.annotation_dim) net.double() print(net) criterion = nn.BCELoss() if opt.cuda: net.cuda() criterion.cuda() optimizer = optim.Adam(net.parameters(), lr=opt.lr) early_stopping = EarlyStopping(patience=opt.patience, verbose=True) os.makedirs(OutputDir, exist_ok=True) train_loss_ls = [] valid_loss_ls = [] test_loss_ls = [] for epoch in range(0, opt.niter): train_loss = train(epoch, train_dataloader, net, criterion, optimizer, opt) valid_loss = valid(valid_dataloader, net, criterion, opt) test_loss = test(test_dataloader, net, criterion, opt) train_loss_ls.append(train_loss) valid_loss_ls.append(valid_loss) test_loss_ls.append(test_loss) early_stopping(valid_loss, net, OutputDir) if early_stopping.early_stop: print("Early stopping") break df = pd.DataFrame({'epoch':[i for i in range(1, len(train_loss_ls)+1)], 'train_loss': train_loss_ls, 'valid_loss': valid_loss_ls, 'test_loss': test_loss_ls}) df.to_csv(OutputDir + '/loss.csv', index=False) net.load_state_dict(torch.load(OutputDir + '/checkpoint.pt')) inference(all_dataloader, net, criterion, opt, OutputDir) if __name__ == "__main__": main(opt)
[ "yamasaki.shohei@ist.osaka-u.ac.jp" ]
yamasaki.shohei@ist.osaka-u.ac.jp
9b382f90b9dd2ca867af5f2dcd6efb0088ac50d7
ae7e36ede21f38fce15d31caf12d235d76276d54
/tests/component/deb/test_import_dsc.py
41d081e5dee9013cb645ec380bed530384e7e13e
[]
no_license
pombreda/git-buildpackage-rpm
08e658847b5608b21f59abb2f7da0ebec333df45
bd72c30f029c2099ec3ed792b82eefcaedbc79b6
refs/heads/master
2020-12-30T19:45:18.763908
2015-03-31T15:59:37
2015-04-01T06:48:20
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,483
py
# vim: set fileencoding=utf-8 : # # (C) 2013,2014 Guido Günther <agx@sigxcpu.org> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import os from tests.component import (ComponentTestBase, ComponentTestGitRepository) from tests.component.deb import DEB_TEST_DATA_DIR from nose.tools import ok_ from gbp.scripts.import_dsc import main as import_dsc class TestImportDsc(ComponentTestBase): """Test importing of debian source packages""" def test_debian_import(self): """Test that importing of debian native packages works""" def _dsc(version): return os.path.join(DEB_TEST_DATA_DIR, 'dsc-native', 'git-buildpackage_%s.dsc' % version) dsc = _dsc('0.4.14') assert import_dsc(['arg0', dsc]) == 0 repo = ComponentTestGitRepository('git-buildpackage') self._check_repo_state(repo, 'master', ['master']) assert len(repo.get_commits()) == 1 os.chdir('git-buildpackage') dsc = _dsc('0.4.15') assert import_dsc(['arg0', dsc]) == 0 self._check_repo_state(repo, 'master', ['master']) assert len(repo.get_commits()) == 2 dsc = _dsc('0.4.16') assert import_dsc(['arg0', dsc]) == 0 self._check_repo_state(repo, 'master', ['master']) assert len(repo.get_commits()) == 3 def test_create_branches(self): """Test if creating missing branches works""" def _dsc(version): return os.path.join(DEB_TEST_DATA_DIR, 'dsc-3.0', 'hello-debhelper_%s.dsc' % version) dsc = _dsc('2.6-2') assert import_dsc(['arg0', '--verbose', '--pristine-tar', '--debian-branch=master', '--upstream-branch=upstream', dsc]) == 0 repo = ComponentTestGitRepository('hello-debhelper') os.chdir('hello-debhelper') assert len(repo.get_commits()) == 2 self._check_repo_state(repo, 'master', ['master', 'pristine-tar', 'upstream']) dsc = _dsc('2.8-1') assert import_dsc(['arg0', '--verbose', '--pristine-tar', '--debian-branch=foo', '--upstream-branch=bar', '--create-missing-branches', dsc]) == 0 self._check_repo_state(repo, 'master', ['bar', 'foo', 'master', 'pristine-tar', 'upstream']) commits, expected = len(repo.get_commits()), 2 ok_(commits == expected, "Found %d commit instead of %d" % (commits, expected))
[ "agx@sigxcpu.org" ]
agx@sigxcpu.org
3092838cfce71af4c34d48f0d6bbbaf825881518
bc565ca3361eb7119a6ff757201e550c20e1ea84
/Programmers/Lv1/lv1_가운데글자가져오기.py
a1018cc534f5910aa9e173681eb9b1391cd9d583
[]
no_license
seoyoungsoo/CodingTest-Python
1f9a3caaa3a424f4f7bd0e01a30664b183aaf5eb
d47cb46dd78f52b7cfa26846e8e77b63a931161e
refs/heads/master
2023-04-21T20:25:46.663686
2021-05-17T12:17:21
2021-05-17T12:17:21
351,439,429
0
0
null
null
null
null
UTF-8
Python
false
false
321
py
# 가운데 글자 가져오기 def solution(s): answer = '' slen = len(s) if slen % 2 == 0: mid = int(slen / 2 - 1) answer += s[mid] answer += s[mid + 1] else: mid = int(slen / 2) answer += s[mid] return answer # testcase 1 s = "abcde" print(solution(s))
[ "iulove37@naver.com" ]
iulove37@naver.com
ab908d2c9e608b5422fd8f8fed60465b577c9043
587dbdf730b6cc3e693efc5dca5d83d1dd35ee1a
/leetcode/1-300/25.py
933cf1a3090bdb12225bd9e756ef2cfc34a4c9f9
[]
no_license
Rivarrl/leetcode_python
8db2a15646d68e4d84ab263d8c3b6e38d8e3ea99
dbe8eb449e5b112a71bc1cd4eabfd138304de4a3
refs/heads/master
2021-06-17T15:21:28.321280
2021-03-11T07:28:19
2021-03-11T07:28:19
179,452,345
3
1
null
null
null
null
UTF-8
Python
false
false
1,034
py
# -*- coding: utf-8 -*- # ====================================== # @File : 25.py # @Time : 2020/5/16 0:07 # @Author : Rivarrl # ====================================== from algorithm_utils import * class Solution: """ [25. K 个一组翻转链表](https://leetcode-cn.com/problems/reverse-nodes-in-k-group/) """ @timeit def reverseKGroup(self, head: ListNode, k: int) -> ListNode: p = head n = 0 while p: n += 1 p = p.next dummy = last = ListNode(-1) p = head while n > 0: if n < k: last.next = p break q = p for _ in range(k-1): t = p.next p.next = t.next t.next = q q = t last.next = q last = p p = p.next n -= k return dummy.next if __name__ == '__main__': a = Solution() x = construct_list_node([1,2,3,4,5]) a.reverseKGroup(x, 3)
[ "1049793871@qq.com" ]
1049793871@qq.com
1f59d88201b38289c14c3124ed3a3da301396303
6c8d8b1c5b02e3181efd41b5b227f8905f474fa9
/clustering/clustering_categorical_peoples_interests/main.py
f024becc456028afc3f32d2de04be24de61c83c5
[]
no_license
DXV-HUST-SoICT/data_mining_mini_projects
9203990a620546fb61ee571090ef51e16242054d
c1010a4f39b5b114ad58ae1f3224435c3a84f50e
refs/heads/master
2022-11-01T08:38:21.141429
2020-06-16T12:01:32
2020-06-16T12:01:32
257,429,399
0
0
null
null
null
null
UTF-8
Python
false
false
3,780
py
from time import time import numpy as np import matplotlib.pyplot as plt import pandas as pd import joblib from sklearn import metrics from sklearn.cluster import KMeans, DBSCAN from sklearn.datasets import load_digits from sklearn.decomposition import PCA from sklearn.preprocessing import scale from sklearn import preprocessing np.random.seed(42) start = time() # X, y = load_digits(return_X_y=True) raw_data = pd.read_csv('./data/kaggle_Interests_group.csv') X = raw_data.drop(columns = ['group', 'grand_tot_interests']) y = raw_data['group'] X = dict(X) for key in X: for i in range(len(X[key])): if X[key][i] != 1: X[key][i] = 0 X = pd.DataFrame(X) data = scale(X) le = preprocessing.LabelEncoder() fit_list = y le.fit(fit_list) y = le.transform(y) labels = y n_samples, n_features = data.shape n_clusters = 10 print("n_clusters: %d, \t n_samples %d, \t n_features %d" % (n_clusters, n_samples, n_features)) print(82 * '_') print('init\ttime\tinertia\tsilhouette') def bench_k_means(estimator, name, data): t0 = time() estimator.fit(data) silhouette_score = metrics.silhouette_score(data, estimator.labels_, metric='euclidean') print('%-9s\t%.2fs\t%i\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f' % (name, (time() - t0), estimator.inertia_, metrics.homogeneity_score(labels, estimator.labels_), metrics.completeness_score(labels, estimator.labels_), metrics.v_measure_score(labels, estimator.labels_), metrics.adjusted_rand_score(labels, estimator.labels_), metrics.adjusted_mutual_info_score(labels, estimator.labels_), silhouette_score)) filename = './model/' + str(start) + '_' + name + '_n_clusters_' + str(n_clusters) + '_silhouette_score_' + str(silhouette_score) + '.sav' joblib.dump(estimator, filename) pca = PCA(n_components=n_clusters).fit(data) estimators = dict() estimators['k-means_k-means++'] = KMeans(init='k-means++', n_clusters=n_clusters, n_init=10) estimators['k-means_random'] = KMeans(init='random', n_clusters=n_clusters, n_init=10) estimators['k-means_PCA-based'] = KMeans(init=pca.components_, n_clusters=n_clusters, n_init=1) for name in estimators: # name = 'kmeans k-means++' estimator = estimators[name] bench_k_means(estimator=estimator, name=name, data=data) print(82 * '_') # ########################################################### # Visualize the results on PCA-reduced data reduced_data = PCA(n_components=2).fit_transform(data) kmeans = KMeans(init='k-means++', n_clusters=n_clusters, n_init=10) kmeans.fit(reduced_data) # Step size of the mesh. Decrease to increase the quality of the VQ. h = 0.02 # point in the mesh [x_min, x_max]x[y_min, y_max]. # Plot the decision boundary. For that, we will assign a color to each x_min, x_max = reduced_data[:, 0].min() - 1, reduced_data[:, 0].max() + 1 y_min, y_max = reduced_data[:, 1].min() - 1, reduced_data[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) # Obtain labels for each point in mesh. Use last trained model. Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) plt.figure(1) plt.clf() plt.imshow(Z, interpolation='nearest', extent=(xx.min(), xx.max(), yy.min(), yy.max()), cmap=plt.cm.Paired, aspect='auto', origin='lower') plt.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=2) # Plot the centroids as a white X centroids = kmeans.cluster_centers_ plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', s=169, linewidths=3, color='w', zorder=10) plt.title('K-means clustering on the kaggle_Interests_group dataset (PCA-reduced data)\n' 'Centroids are marked with white cross') plt.xlim(x_min, x_max) plt.ylim(y_min, y_max) plt.xticks(()) plt.yticks(()) plt.show()
[ "vuong.1998@gmail.com" ]
vuong.1998@gmail.com
74f80006fb22a26cc258bf1607e458416ed5a188
a34e3d435f48ef87477d3ae13ca8a43015e5052c
/fw2/z.py
8986497000a50feaecab9d3e7771b510e72b2e36
[]
no_license
haehn/sandbox
636069372fc7bb7fd72b5fde302f42b815e8e9b0
e49a0a30a1811adb73577ff697d81db16ca82808
refs/heads/master
2021-01-22T03:39:03.415863
2015-02-11T23:16:22
2015-02-11T23:16:22
26,128,048
1
0
null
null
null
null
UTF-8
Python
false
false
561
py
#!/usr/bin/env python import os import sys import _zstack def print_help( script_name ): ''' ''' description = '' print description print print 'Usage: ' + script_name + ' INPUT_DIRECTORY' print # # entry point # if __name__ == "__main__": # always show the help if no arguments were specified if len(sys.argv) != 0 and len( sys.argv ) < 2: print_help( sys.argv[0] ) sys.exit( 1 ) input_dir = sys.argv[1] manager = _zstack.Manager(input_dir) manager.start() webserver = _zstack.WebServer(manager) webserver.start()
[ "haehn@seas.harvard.edu" ]
haehn@seas.harvard.edu
84b57fa7d45db79a5a17f56d0c3453631c7fc231
ab39a61ff9882e8b06ea6f0d2939dbccb7b75bd5
/src/views.py
1c48266966e228cc0569f6405f284cbb62f5bc27
[]
no_license
ShipraShalini/NotificationCenter
1be9c0774bfce2d75f3c31bef852150d7cb60281
e544a53645b411977c19c7317b80cefe35e8f720
refs/heads/master
2021-01-19T21:40:35.884152
2017-02-26T19:06:43
2017-02-26T19:06:43
82,533,008
0
0
null
null
null
null
UTF-8
Python
false
false
1,289
py
from apscheduler.jobstores.base import JobLookupError from django.http import HttpResponse from django.shortcuts import render_to_response from django.views import View from django.views.generic import FormView from src.forms import NotificationForm, ModifyNotificationForm from src.notification_utils import get_notifications class NotificationView(FormView): template_name = 'notification_form.html' form_class = NotificationForm def form_valid(self, form): job_id = form.schedule_notification() return render_to_response("success.html", context={"job_id_scheduled": job_id}) class ModifyNotificationView(FormView): template_name = 'notification_form.html' form_class = ModifyNotificationForm def form_valid(self, form): try: job_id, action = form.modify() except JobLookupError as e: return render_to_response("error.html", context={"etype": e.__class__.__name__, "message": e.message}) return render_to_response("success.html", context={"job_id_modified": job_id, "action": action}) class ListNotificationView(View): http_method_names = ['get'] def get(self, request): job_list = get_notifications() return HttpResponse(job_list, content_type='application/json')
[ "code.shipra@gmail.com" ]
code.shipra@gmail.com
a70e6430dc82722e95b7aa187dba90f4f65b2478
d860a2c1fa8fffc76a9101e4f91cecc80c27e802
/leetcode/388_Longest_Absolute_File_Path.py
b5c85ee31daee9b4d6fef541d42bacebabeddfca
[]
no_license
heroming/algorithm
80ea8f00ac049b0bc815140253568484e49c39e3
18e510f02bff92bc45cceb7090a79fbd40c209ec
refs/heads/master
2021-01-19T01:27:31.676356
2019-06-09T08:51:16
2019-06-09T08:51:16
62,952,889
3
1
null
null
null
null
UTF-8
Python
false
false
2,833
py
#!/usr/bin/env python # -*- coding: utf-8 -*- class ListNode: def __init__(self, x): self.val = x self.next = None def show(self) : it, v = self, [] while it : v.append(it.val) it = it.next print v class TreeNode(object): def __init__(self, x): self.val = x self.left = None self.right = None class TreeOrder(object): def preorder(self, root) : ans = [] self.dfs(root, ans, 0) print ans def inorder(self, root) : ans = [] self.dfs(root, ans, 1) print ans def postoder(self, root) : ans = [] self.dfs(root, ans, 2) print ans def dfs(self, root, ans, flag) : if root == None : return if (flag == 0) : ans.append(root.val) self.dfs(root.left, ans, flag) if (flag == 1) : ans.append(root.val) self.dfs(root.right, ans, flag) if (flag == 2) : ans.append(root.val) class TrieNode(object) : n = 26 def __init__(self) : self.finish = False self.data = [None for i in range(self.n)] class MinHeap(object) : def __init__(self) : self.data = [] def heapify(self, k) : while True : idx = k l, r = (k << 1) | 1, (k + 1) << 1 if l < len(self.data) and self.data[l] < self.data[idx] : idx = l if r < len(self.data) and self.data[r] < self.data[idx] : idx = r if idx == k : break self.data[k], self.data[idx] = self.data[idx], self.data[k] k = idx def push(self, x) : k = len(self.data) self.data.append(x) while k > 0 : p = (k - 1) >> 1 if self.data[p] <= self.data[k] : break self.data[p], self.data[k] = self.data[k], self.data[p] k = p def top(self) : if not self.data : return None return self.data[0] def pop(self) : if not self.data : return None ret = self.data[0] self.data[0] = self.data[-1] self.data.pop() self.heapify(0) return ret def size() : return len(self.data) def isEmpty(self) : return len(self.data) == 0 class Solution(object) : def lengthLongestPath(self, s) : v = [] ans, cnt = 0, 0 t = s.split("\n") for x in t : k = x.count('\t') while k < len(v) : cnt -= len(v[-1]) v.pop() v.append(x[k:]) cnt += len(v[-1]) if '.' in v[-1] : ans = max(ans, cnt + len(v) - 1) return ans if __name__ == '__main__' : so = Solution()
[ "heroming7788@gmail.com" ]
heroming7788@gmail.com
3b7225d69c4c077284ccdc74dae85cbbc5634161
37530ffbb3d14cc9c7307fb4b1a276d9d26516dc
/api/migrations/0015_auto_20180831_1847.py
cf66e98858b6394528d104f6c5617359338775ad
[]
no_license
aballah-chamakh/Repair-api
567a4f81107569bec67072fa598aa76384343752
22574a9b69df8d62c5f027146fc21d6abf4b245a
refs/heads/master
2020-04-10T12:46:32.309178
2018-12-09T11:20:07
2018-12-09T11:20:07
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,494
py
# Generated by Django 2.0.7 on 2018-08-31 16:47 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('api', '0014_auto_20180630_0054'), ] operations = [ migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.TextField()), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='offer', name='categorie', field=models.CharField(choices=[('web development', 'Web development'), ('all categories', 'All categories'), ('web development', 'Web designe')], max_length=30), ), migrations.AlterField( model_name='profile', name='city', field=models.CharField(choices=[('bizert', 'Bizert'), ('city', 'City'), ('ariana', 'Ariana'), ('tunis', 'Tunis'), ('sfax', 'Sfax')], max_length=30), ), migrations.AddField( model_name='comment', name='product', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Offer'), ), ]
[ "chamakhabdallah8@gmail.com" ]
chamakhabdallah8@gmail.com
2aa60f59119f97962eebc18716cb636a07ebabe5
255e19ddc1bcde0d3d4fe70e01cec9bb724979c9
/dockerized-gists/dc3335ee46ab9f650b19885e8ade6c7a/snippet.py
cc0b25145b92da727de388094323b861968c2325
[ "MIT" ]
permissive
gistable/gistable
26c1e909928ec463026811f69b61619b62f14721
665d39a2bd82543d5196555f0801ef8fd4a3ee48
refs/heads/master
2023-02-17T21:33:55.558398
2023-02-11T18:20:10
2023-02-11T18:20:10
119,861,038
76
19
null
2020-07-26T03:14:55
2018-02-01T16:19:24
Python
UTF-8
Python
false
false
1,929
py
#!/bin/python from flashtext.keyword import KeywordProcessor import random import string import re import time def get_word_of_length(str_length): # generate a random word of given length return ''.join(random.choice(string.ascii_lowercase) for _ in range(str_length)) # generate a list of 100K words of randomly chosen size all_words = [get_word_of_length(random.choice([3, 4, 5, 6, 7, 8])) for i in range(100000)] print('Count | FlashText | Regex ') print('-------------------------------') for keywords_length in range(1, 20002, 1000): # chose 5000 terms and create a string to search in. all_words_chosen = random.sample(all_words, 5000) story = ' '.join(all_words_chosen) # get unique keywords from the list of words generated. unique_keywords_sublist = list(set(random.sample(all_words, keywords_length))) # compile regex # source: https://stackoverflow.com/questions/6116978/python-replace-multiple-strings rep = dict([(key, '_keyword_') for key in unique_keywords_sublist]) compiled_re = re.compile("|".join(rep.keys())) # add keywords to flashtext keyword_processor = KeywordProcessor() for keyword in unique_keywords_sublist: keyword_processor.add_keyword(keyword, '_keyword_') # time the modules start = time.time() _ = keyword_processor.replace_keywords(story) mid = time.time() _ = compiled_re.sub(lambda m: rep[re.escape(m.group(0))], story) end = time.time() # print output print(str(keywords_length).ljust(6), '|', "{0:.5f}".format(mid - start).ljust(9), '|', "{0:.5f}".format(end - mid).ljust(9), '|',) # Count | FlashText | Regex # ------------------------------- # 1 | 0.02141 | 0.00004 | # 1001 | 0.02498 | 0.13180 | # 5001 | 0.03147 | 0.59799 | # 10001 | 0.02858 | 1.08717 | # 15001 | 0.02734 | 1.51461 | # 20001 | 0.03109 | 1.76158 |
[ "gistshub@gmail.com" ]
gistshub@gmail.com
c164d54f8fd02d4a2faf4337c691176cb3244813
11d3f0fcf4a968a6b612f2b85d242cbbbabc7e07
/services/convert-document/test.py
ba6972a0ac5a807125ea05f14912997425a99161
[ "MIT" ]
permissive
OpenUpSA/eskom-enquiry
21d728e6e7ceceed84fe646a439d9aee8c4222d5
71dfa8aa00688a83df24a4afaeb9e5639565bbbb
refs/heads/master
2023-05-11T11:23:18.561070
2018-08-24T12:51:55
2018-08-24T12:51:55
137,739,474
0
1
MIT
2023-05-01T19:47:30
2018-06-18T10:40:42
Python
UTF-8
Python
false
false
654
py
import os import sys import signal import requests from multiprocessing import Pool signal.signal(signal.SIGINT, signal.SIG_IGN) url = os.environ.get('UNOSERVICE_URL') def request(i): path = sys.argv[1] files = {'file': open(path, 'rb')} data = {'extension': 'docx'} # print('send request') res = requests.post(url, files=files, data=data) # message = res.text if res.status_code != 200 else '' print(res.status_code, res.content[:20]) # print(res.content == open(path, 'rb').read()) pool = Pool(20) try: pool.map(request, range(10000)) except KeyboardInterrupt: pool.terminate() pool.join() # request(5)
[ "friedrich@pudo.org" ]
friedrich@pudo.org
1c7a40531f47be5347ae4c77918c932197355793
982fac39f5a2232f3976789bbc265cdc2cc8be08
/bqskit/compiler/search/generator.py
81d1f4eeac8e6ce93567fb2d735539f9377ff711
[ "LicenseRef-scancode-unknown-license-reference", "BSD-2-Clause" ]
permissive
mtreinish/bqskit
8516575eef46241e426f78b96d047987e6c9b2df
3083218c2f4e3c3ce4ba027d12caa30c384d7665
refs/heads/master
2023-08-06T20:11:01.278308
2021-08-09T19:55:30
2021-08-09T19:55:30
null
0
0
null
null
null
null
UTF-8
Python
false
false
884
py
"""This module implements the LayerGenerator base class.""" from __future__ import annotations import abc from typing import Any from bqskit.ir.circuit import Circuit from bqskit.qis.state.state import StateVector from bqskit.qis.unitary.unitarymatrix import UnitaryMatrix class LayerGenerator(abc.ABC): """ The LayerGenerator base class. Search based synthesis uses the layer generator to generate the root node and the successors of a node. """ @abc.abstractmethod def gen_initial_layer( self, target: UnitaryMatrix | StateVector, data: dict[str, Any], ) -> Circuit: """Generate the initial layer for search.""" @abc.abstractmethod def gen_successors( self, circuit: Circuit, data: dict[str, Any], ) -> list[Circuit]: """Generate the successors of a circuit node."""
[ "edyounis123@gmail.com" ]
edyounis123@gmail.com
b2b5040251b52f2a7d00a3fcd69aff1df9c96f97
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2408/60648/267250.py
42fd2a73853aa61e8e1101e5966296a105b18cc4
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
793
py
class Solution: def numPrimeArrangements(self, n: int) -> int: # (sum(prime)) 的阶乘 乘以 (n - sum(prime)) 的阶乘 def countPrimes(n: int) -> int: if n < 2: return 0 prime = [1] * (n + 1) prime[0] = prime[1] = 0 for i in range(2, int(n**0.5) +1): if prime[i] == 1: prime[i*i:n + 1:i] = [0]*len(prime[i*i:n + 1:i]) return sum(prime) def func(n): if n == 0 or n == 1: return 1 else: return (n * func(n - 1)) return func(countPrimes(n)) * func(n - countPrimes(n)) % (10**9 + 7) if __name__=="__main__": s=int(input()) x=Solution().numPrimeArrangements(s) print(x)
[ "1069583789@qq.com" ]
1069583789@qq.com
b09d38372acd5147033c1441a46c1221bd6cffee
5442f2f71e36419ad23894d3dd527837029e42f3
/ramda/memoize.py
90f43e8a162f1d4455b1acae4b5aa05196ba2aa1
[ "MIT" ]
permissive
zequequiel/ramda.py
5e6a23765598550ecaf1a76b785f4fde7bc11ea4
eac054163de535520659ce6269536355d5e89865
refs/heads/master
2020-09-11T16:43:07.989407
2019-09-06T12:07:26
2019-09-06T12:10:43
null
0
0
null
null
null
null
UTF-8
Python
false
false
533
py
import hashlib import json from ramda.memoize_with import memoize_with def memoize(f): """Creates a new function that, when invoked, caches the result of calling fn for a given argument set and returns the result. Subsequent calls to the memoized fn with the same argument set will not result in an additional call to fn; instead, the cached result for that set of arguments will be returned""" def hash(*args): return hashlib.sha256(json.dumps(args).encode("utf-8")).hexdigest() return memoize_with(hash, f)
[ "slava.ganzin@gmail.com" ]
slava.ganzin@gmail.com
6907cd3956eefc7814e16763b87275820f02964b
f0d713996eb095bcdc701f3fab0a8110b8541cbb
/xdSKkXQkkMroNzq8C_8.py
6a7c8f49d0986abe2427cee6cbaff07659418e65
[]
no_license
daniel-reich/turbo-robot
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
a7a25c63097674c0a81675eed7e6b763785f1c41
refs/heads/main
2023-03-26T01:55:14.210264
2021-03-23T16:08:01
2021-03-23T16:08:01
350,773,815
0
0
null
null
null
null
UTF-8
Python
false
false
521
py
""" Create a function that counts how many D's are in a sentence. ### Examples count_d("My friend Dylan got distracted in school.") ➞ 4 count_d("Debris was scattered all over the yard.") ➞ 3 count_d("The rodents hibernated in their den.") ➞ 3 ### Notes * Your function must be case-insensitive. * Remember to `return` the result. * Check the **Resources** for help. """ def count_d(sentence): d = 'd' D = 'D' count = sentence.count(d) + sentence.count(D) return count
[ "daniel.reich@danielreichs-MacBook-Pro.local" ]
daniel.reich@danielreichs-MacBook-Pro.local
78d5a9d6714ee5729fc953d355fd9ece21e45447
b507751bb8adbf0c3270e399dab00259371afd05
/setup.py
aa38ce6ee4a06ff0d1640bb6106e9f645240be61
[]
no_license
sdkwe/pywe-component-token
2c7bd3fe05b95dcd70b5c49f516e8936519b9a73
020d1be06ffd0d8a79e5cb34dd9ecb4a09d740ed
refs/heads/master
2020-03-07T19:05:16.951405
2018-05-01T20:10:31
2018-05-01T20:10:31
127,661,479
0
0
null
null
null
null
UTF-8
Python
false
false
1,205
py
# -*- coding: utf-8 -*- from setuptools import setup version = '1.1.0' setup( name='pywe-component-token', version=version, keywords='Wechat Weixin Component Token', description='Wechat Component Token Module for Python.', long_description=open('README.rst').read(), url='https://github.com/sdkwe/pywe-component-token', author='Hackathon', author_email='kimi.huang@brightcells.com', packages=['pywe_component_token'], py_modules=[], install_requires=['pywe_base', 'pywe_component_ticket', 'pywe_exception', 'pywe_storage'], classifiers=[ 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
[ "brightcells@gmail.com" ]
brightcells@gmail.com
69380cac821df4b434fe8e0bbea627355081cb50
161fd6370ffa0b35ecd50719d6266224da597ee0
/Python/Django/sports/views2.py
23bfca83cec0e4ed4b02022eb70bc98072091a6a
[]
no_license
ebergstein/DojoAssignments
a30fd8b36442bff2a4253902a591ad11f191fc12
3ad9ac65073c733ead32b93ce4be19af5369fccf
refs/heads/master
2021-06-19T09:48:23.100713
2017-06-30T04:24:35
2017-06-30T04:24:35
82,743,546
0
1
null
null
null
null
UTF-8
Python
false
false
472
py
from django.shortcuts import render, redirect from .models import League, Team, Player from . import team_maker def index(request): context = { "leagues": League.objects.filter(name__contains="Womens"), """"teams": Team.objects.all(), "players": Player.objects.all(),""" } return render(request, "leagues/index.html", context) def make_data(request): team_maker.gen_leagues(10) team_maker.gen_teams(50) team_maker.gen_players(200) return redirect("index")
[ "ebergstein@sbcglobal.net" ]
ebergstein@sbcglobal.net
e2cc167ab3cd9c985b8979c1c3ad79f7754354a2
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03494/s213100148.py
504f8df2d6619e1e1087cee14a9f5a8c697d6b08
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
173
py
N = int(input()) A = list(map(int,input().split())) def X(x): y = 0 while x%2==0: x = x/2 y = y+1 return y a = [X(A[i]) for i in range(0,N)] print(min(a))
[ "66529651+Aastha2104@users.noreply.github.com" ]
66529651+Aastha2104@users.noreply.github.com
7e006593e39f695a39c0508dbad1b2e8fa0889c9
90be755a741d6c93dd59d4acef8b27b4cf93ff54
/src/elsia/scripts/coarse2fine.py
4012618330283e3d1b0f119612ad5aaeaa7aa7c6
[]
no_license
karry3775/Elsia_ws
05aa5786a6f3f64b70c7ceafead6d72d4ca18bab
031f8006e9a439d9947be5ed288a666f20fca3a7
refs/heads/master
2023-02-21T05:21:10.842475
2021-01-23T14:58:57
2021-01-23T15:21:46
326,032,434
0
0
null
null
null
null
UTF-8
Python
false
false
5,529
py
#!/usr/bin/env python import rospy from nav_msgs.msg import Odometry # to get the laser cross track and abs yaw from std_msgs.msg import String # to get the block_val message from geometry_msgs.msg import Twist # for cmd_vel from tf.transformations import euler_from_quaternion, quaternion_from_euler import math as m import time rospy.init_node("coarse2fine_act_prop_node") aisle_pub = rospy.Publisher("/aisle_odom", Odometry, queue_size=10) # global variables abs_yaw = 0.0 ct_dist = 0.0 f = 63.06 # 50 # mm alpha = 0.264583 # 0.20977 lat = 4.0 # 7.6 # m (total cross track distance) column_gap = 2.25 # m # Global estimates X_aisle = [] Y_aisle = [] odom_depth = 0.0 depth_thresh = column_gap / 3 last_laser_odom_x = 0.0 vel = 0.0 last_time_stamp = time.time() def laser_cb(msg): global ct_dist y = msg.pose.pose.position.y ct_dist = -y def aisle_ct_cb(msg): global ct_dist y = msg.pose.pose.position.y ct_dist = -y def abs_yaw_cb(msg): global abs_yaw ori = msg.pose.pose.orientation (_, _, yaw) = euler_from_quaternion([ori.x, ori.y, ori.z, ori.w]) abs_yaw = yaw def cmd_vel_cb(msg): global vel vel = msg.linear.x def getDepth(l_r, px): camera_front_offset = 0.1 ct_camera = ct_dist - (camera_front_offset) * m.sin(abs_yaw) if l_r == "l": print("it is l") # find xa and ya xa = px * (alpha/1000) * m.cos(abs_yaw) - \ (f/1000) * m.sin(abs_yaw) + ct_camera ya = px * (alpha/1000) * m.sin(abs_yaw) + (f/1000) * m.cos(abs_yaw) depth = ((-(lat/2) - ct_camera) * ya) / (xa - ct_camera) else: # find xa and ya xa = px * (alpha/1000) * m.cos(abs_yaw) - \ (f/1000) * m.sin(abs_yaw) + ct_camera ya = px * (alpha/1000) * m.sin(abs_yaw) + (f/1000) * m.cos(abs_yaw) print("it is r") depth = (((lat/2) - ct_camera) * ya) / (xa - ct_camera) depth = depth + (camera_front_offset) * m.cos(abs_yaw) print("depth value for abs_yaw = {}, ct_camera = {}, px = {} is: {}".format( abs_yaw, ct_camera, px, depth)) return depth def ignoreErratic(odom_depth, odom_depth_1, odom_depth_0): if abs(odom_depth - odom_depth_1) > 2: odom_depth = odom_depth_0 else: odom_depth = odom_depth_1 return odom_depth def block_val_cb(msg): global X_aisle, Y_aisle, odom_depth, last_laser_odom_x, last_time_stamp ##################Find delta based on cmd_vel############################### cur_time_stamp = time.time() delta_time = cur_time_stamp - last_time_stamp last_time_stamp = cur_time_stamp delta_x = vel * delta_time * m.cos(abs_yaw) ############################################################################ # msg is of the type string separated by commas --> block_count, px, l_r data = msg.data.split(",") print("####################################################") # print("data: {}".format(data)) block_count = float(data[0]) px = float(data[1]) # in pixels which is basically the x value l_r = data[2] px0 = float(data[3]) # second last value l_r0 = data[4] # second last value print("original px was: {}".format(px)) px = px - 200 px0 = px0 - 200 depth = getDepth(l_r, px) depth0 = getDepth(l_r0, px0) odom_depth_1 = (block_count * column_gap) - depth + column_gap odom_depth_0 = ((block_count - 1) * column_gap) - depth0 + column_gap odom_depth_final = ignoreErratic(odom_depth, odom_depth_1, odom_depth_0) print("prev_odom_depth : {}, odom_depth_0 : {}, odom_depth_1 : {}".format( odom_depth, odom_depth_0, odom_depth_1)) # manually setting odom_depth to be weight = 0.98 odom_depth = (1 - weight) * odom_depth_final + \ weight * (odom_depth + delta_x) print("depth : {}, odom_depth : {}".format(depth, odom_depth)) # append to the trajectory estimates Y_aisle.append(-ct_dist) X_aisle.append(odom_depth) # publish to aisle_odom q = quaternion_from_euler(0, 0, abs_yaw) odom_msg = Odometry() odom_msg.pose.pose.position.x = odom_depth odom_msg.pose.pose.position.y = -ct_dist # putting raw aisle odometry as the z value odom_msg.pose.pose.position.z = 0.0#odom_depth_1 (we were obviously using this for some analytics, but for lets put the correct value here) odom_msg.pose.pose.orientation.x = q[0] odom_msg.pose.pose.orientation.y = q[1] odom_msg.pose.pose.orientation.z = q[2] odom_msg.pose.pose.orientation.w = q[3] odom_msg.header.stamp = rospy.Time.now() odom_msg.header.frame_id="/odom" odom_msg.child_frame_id="/aisle_link" aisle_pub.publish(odom_msg) if __name__ == "__main__": try: abs_yaw_sub = rospy.Subscriber( "/abs_orientation_odom", Odometry, abs_yaw_cb) # using the ceil's yaw # abs_yaw_sub = rospy.Subscriber("/ground_truth/state", Odometry, abs_yaw_cb) # using gt yaw # laser_sub = rospy.Subscriber("/odom_rf2o_corrected_ceil", Odometry, laser_cb)# using the ceil's y(ct_dist) # using the aisle cross track aisle_ct_sub = rospy.Subscriber("/aisle_ct", Odometry, aisle_ct_cb) # laser_sub = rospy.Subscriber("/ground_truth/state", Odometry, laser_cb) # using gt y(ct_dist) block_val_sub = rospy.Subscriber("/block_val", String, block_val_cb) cmd_vel_sub = rospy.Subscriber("/jacky/cmd_vel", Twist, cmd_vel_cb) rospy.spin() except rospy.ROSInterruptException: pass
[ "kartikprakash3775@gmail.com" ]
kartikprakash3775@gmail.com
73f12fed22111fd0e30c62b8b0e51fb76df8a6bd
c1d5aeaa30418507610d3f1364ef24759b41e86d
/tango_with_django_project/settings.py
a73a709671e694d9d025eba696db1505d946ba6b
[]
no_license
kydkang/workspace8
7c368f3549e40dd62bf5aa126747aa83f14f09a1
bc17b6a1cdf0e4b025e844eb5cf33f53a629875f
refs/heads/master
2020-03-12T13:49:50.047181
2018-04-23T06:44:12
2018-04-23T06:44:12
130,651,578
0
0
null
null
null
null
UTF-8
Python
false
false
3,145
py
""" Django settings for tango_with_django_project project. Generated by 'django-admin startproject' using Django 2.0.4. For more information on this file, see https://docs.djangoproject.com/en/2.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.0/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'm)7(y*!)ut0)qk)o9(=pr@(b77@!70h^vjogh26us72@6b%x)r' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'tango_with_django_project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'tango_with_django_project.wsgi.application' # Database # https://docs.djangoproject.com/en/2.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_URL = '/static/'
[ "you@example.com" ]
you@example.com
fc8dd96f4980bb48684fbfa0034cb7b99912c982
96dcea595e7c16cec07b3f649afd65f3660a0bad
/tests/components/nextdns/test_init.py
fb9ea74509e9554bd31f080eb5f5aa471a463f55
[ "Apache-2.0" ]
permissive
home-assistant/core
3455eac2e9d925c92d30178643b1aaccf3a6484f
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
refs/heads/dev
2023-08-31T15:41:06.299469
2023-08-31T14:50:53
2023-08-31T14:50:53
12,888,993
35,501
20,617
Apache-2.0
2023-09-14T21:50:15
2013-09-17T07:29:48
Python
UTF-8
Python
false
false
1,806
py
"""Test init of NextDNS integration.""" from unittest.mock import patch from nextdns import ApiError from homeassistant.components.nextdns.const import CONF_PROFILE_ID, DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_API_KEY, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from . import init_integration from tests.common import MockConfigEntry async def test_async_setup_entry(hass: HomeAssistant) -> None: """Test a successful setup entry.""" await init_integration(hass) state = hass.states.get("sensor.fake_profile_dns_queries_blocked_ratio") assert state is not None assert state.state != STATE_UNAVAILABLE assert state.state == "20.0" async def test_config_not_ready(hass: HomeAssistant) -> None: """Test for setup failure if the connection to the service fails.""" entry = MockConfigEntry( domain=DOMAIN, title="Fake Profile", unique_id="xyz12", data={CONF_API_KEY: "fake_api_key", CONF_PROFILE_ID: "xyz12"}, ) with patch( "homeassistant.components.nextdns.NextDns.get_profiles", side_effect=ApiError("API Error"), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) assert entry.state is ConfigEntryState.SETUP_RETRY async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" entry = await init_integration(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert entry.state is ConfigEntryState.LOADED assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.NOT_LOADED assert not hass.data.get(DOMAIN)
[ "noreply@github.com" ]
home-assistant.noreply@github.com
c268cd0eb69a97954d3fe683c3c669a4afee24df
02e9e67f6fd0d7dd6c88620f78923afa0ea639d1
/Classes/Examples/Lesson 2/animal_tester.py
adaf99c2bc0d252784ad1bd762666685158ece31
[]
no_license
fbhs-cs/PreAPCS-classcode
55823b0f16333c35650785b4dd92ae03cd6b4768
ef1355c04fcdbb3c3d4e16c4aae20acfbc833c1c
refs/heads/master
2021-03-27T19:08:49.479105
2018-04-06T21:21:23
2018-04-06T21:21:23
103,399,299
0
0
null
null
null
null
UTF-8
Python
false
false
448
py
import animal def main(): print("Creating dogs...") try: dog1 = animal.Dog(name="Drama",gender="female",age=14,breed="Chow/Lab") dog2 = animal.Dog(name="Santa's Little Helper",gender="male",age=3,breed="Greyhound") dog3 = animal.Dog(name="Einstein",gender="male",age=38,breed="Sheepdog") except: print("Something is wrong with your __init__ method in Dog") if __name__ == "__main__": main()
[ "cpurdy@flourbluffschools.net" ]
cpurdy@flourbluffschools.net
0dc63cb0e85c77c53207e0d69ae4bae808320782
ce7c414f098e3ea6674bec05d40345d0047bdbe5
/basic07.py
4d9fdf47767336fb17e20427de88584fafe1803e
[]
no_license
jun-yoshiyoshi/python_plactice100
0545783d199c04ebd5b53b354c5dc50c52a99b6e
466dc39341cad50594c957ae60d5d00c254d06f7
refs/heads/main
2023-08-11T06:29:41.711413
2021-09-14T00:59:38
2021-09-14T00:59:38
406,175,270
0
0
null
null
null
null
UTF-8
Python
false
false
303
py
# if,elif,else fizzbuzz ワンライナー for i in range(1, 31): if i % 15 == 0: print('fizzbuzz') elif i % 3 == 0: print('fizz') elif i % 5 == 0: print('buzz') else: print(i) for i in range(1, 51): print("Fizz"*(i % 3 < 1)+"Buzz"*(i % 5 < 1) or i)
[ "yoshi.jun.yoshi.jun0621@gmail.com" ]
yoshi.jun.yoshi.jun0621@gmail.com
41c70140030ead7bcb3232f412ecfb56956c7900
22ccc673a522b52f2678b6ac96e3ff2a104864ff
/digest/migrations/0015_auto_20150731_0859.py
fbd6d24eea0abcd05e835877f6ddb37710feb92a
[]
no_license
ivlevdenis/pythondigest
07e448da149d92f37b8ce3bd01b645ace1fa0888
f8ccc44808a26960fb69a4c4c3491df3e6d3d24e
refs/heads/master
2021-01-18T02:09:42.121559
2016-05-15T22:44:34
2016-05-15T22:44:34
58,350,368
0
0
null
2016-05-09T05:21:39
2016-05-09T05:21:39
null
UTF-8
Python
false
false
486
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [('digest', '0014_auto_20150731_0859'), ] operations = [migrations.AlterField( model_name='item', name='tags', field=models.ManyToManyField(to='digest.Tag', verbose_name='\u0422\u044d\u0433\u0438', blank=True), ), ]
[ "sapronov.alexander92@gmail.com" ]
sapronov.alexander92@gmail.com
865946e4d753262386d009e877aa101002220064
48cb50826c7774f320c9b4d51a287dcb4e805d8e
/jsk_pcl_ros_utils/cfg/PolygonArrayAreaLikelihood.cfg
5c6645133f23ae642c1b94f2a27d657fd33ee98b
[]
no_license
huangxliang/jsk_recognition
921d28771f5a1c1dde6dcdaa9289dafdde81b310
8c6b46d0ca9ccb00f033ceef305b35edafb32162
refs/heads/master
2021-01-18T12:56:05.945787
2016-03-11T05:50:08
2016-03-11T05:50:08
null
0
0
null
null
null
null
UTF-8
Python
false
false
505
cfg
#!/usr/bin/env python # set up parameters that we care about PACKAGE = 'jsk_pcl_ros_utils' try: import imp imp.find_module(PACKAGE) from dynamic_reconfigure.parameter_generator_catkin import *; except: import roslib; roslib.load_manifest(PACKAGE) from dynamic_reconfigure.parameter_generator import *; from math import pi gen = ParameterGenerator () gen.add("area", double_t, 0, "", 1.0, 0.0, 10.0) exit (gen.generate (PACKAGE, "jsk_pcl_ros_utils", "PolygonArrayAreaLikelihood"))
[ "garaemon@gmail.com" ]
garaemon@gmail.com
ebf1ee4cdfea3e7fa89adbca475840bc63d22534
dc5d8dbbb4c6f296a95bea7069d13de38db00ac6
/lines.py
2d7e386e0f375c07c2fc1d2ac950771ef6d3ee0d
[]
no_license
the-isf-academy/drawing
906591afa279529cb7a6208183188b01651b2fcb
652fea4754d3a465b55aaeb6673d989402a06350
refs/heads/master
2021-10-14T11:34:57.447129
2021-09-28T02:45:15
2021-09-28T02:45:15
205,507,729
0
0
null
null
null
null
UTF-8
Python
false
false
4,585
py
# lines.py # by Chris Proctor # Helper functions for playing with how the turtle draws # ============================================================================= # ! Advanced ! # ============================================================================= # This module contains some fancy code that we don't expect you to understand # yet. That's ok--as long as we know how to use code, we don't have to # understand everything about it. (Do you understand everything about # MacOS?) Check out the README for documentation on how to use this code. # Of course, if you want to dig into this module, feel free. You can ask a # teacher about it if you're interested. # ============================================================================= from itertools import cycle from turtle import Turtle, pendown, penup, pencolor class Segmenter: """ Breaks a distance (length) into segments, which are yielded one at a time. Whatever's left over at the end gets yielded too. If start_at is given, the pattern is offset by this much. For example: >>> from drawing.lines import Segmenter >>> list(Segmenter([1, 5]).segment(20)) [1, 5, 1, 5, 1, 5, 1, 1] """ def __init__(self, pattern): "Should be initialized with a pattern like [(10, penup), (20, pendown)]" self.pattern = pattern self.remainder = 0 self.remainder_state = None self.pattern_cycle = cycle(pattern) def segment(self, length): """ Segments `length` into chunks according to the pattern, yielding each chunk along with a boolean indicating whether there is more coming """ if self.remainder > 0: if length > self.remainder: yield self.remainder, self.remainder_state length -= self.remainder self.remainder = 0 else: yield length, self.remainder_state self.remainder -= length length = 0 if length > 0: for (seg, state) in self.pattern_cycle: if length >= seg: yield seg, state length -= seg else: if length > 0: yield length, state self.remainder = seg - length self.remainder_state = state return def go_segmented(turtle, distance): "This is the fake go function that we're going to inject into the turtle" for seg, state in turtle.segmenter.segment(distance): state() turtle.true_go(seg) def color_setter_factory(color): "Returns a function that sets the pencolor" def set_color(): pencolor(color) return set_color class dashes: """ A context manager which causes a code block to draw with dashes. This is accomplished by briefly hacking the Turtle. Sorry! """ def __init__(self, spacing=20): self.spacing = spacing def __enter__(self): Turtle.segmenter = Segmenter([(self.spacing, pendown), (self.spacing, penup)]) Turtle.true_go = Turtle._go Turtle._go = go_segmented def __exit__(self, exc_type, exc_value, traceback): Turtle._go = Turtle.true_go del Turtle.true_go class dots: "A context manager which causes a code block to draw with dots" def __init__(self, spacing=10): self.spacing = spacing def __enter__(self): Turtle.segmenter = Segmenter([(1, pendown), (self.spacing, penup)]) Turtle.true_go = Turtle._go Turtle._go = go_segmented def __exit__(self, exc_type, exc_value, traceback): Turtle._go = Turtle.true_go del Turtle.true_go class rainbow: "A context manager which causes a code block to draw in rainbow colors" default_colors = ['red', 'orange', 'yellow', 'green', 'blue', 'purple'] def __init__(self, spacing=10, colors=None): self.spacing = spacing self.colors = colors or rainbow.default_colors def __enter__(self): Turtle.segmenter = Segmenter([(self.spacing, color_setter_factory(color)) for color in self.colors]) Turtle.true_go = Turtle._go Turtle._go = go_segmented def __exit__(self, exc_type, exc_value, traceback): Turtle._go = Turtle.true_go del Turtle.true_go if __name__ == '__main__': from turtle import * pensize(6) with rainbow(): for i in range(100): forward(i) right(2 * 360/(i+1))
[ "chris.proctor@gmail.com" ]
chris.proctor@gmail.com
03f40f445a153af0590c13f0c05606a782782605
4c9580b2e09e2b000e27a1c9021b12cf2747f56a
/chapter02/app02/urls.py
23e633effc2aff2db92d1f3dca1fc3e48b57a431
[]
no_license
jzplyy/xiaoyue_mall
69072c0657a6878a4cf799b8c8218cc7d88c8d12
4f9353d6857d1bd7dc54151ca8b34dcb4671b8dc
refs/heads/master
2023-06-26T02:48:03.103635
2021-07-22T15:51:07
2021-07-22T15:51:07
388,514,311
1
0
null
null
null
null
UTF-8
Python
false
false
139
py
from django.urls import path, re_path from app02 import views urlpatterns = [ path('blog-list/', views.blog, {'blog_id': 3}), ]
[ "jzplyy@126.com" ]
jzplyy@126.com
c8b0206da89a30aca71601e901c93e63d45782b8
ffdcd340fdef833bfd9af89d779845ba2991a08c
/customer/migrations/0150_auto_20190325_1616.py
c47d4b86a52930c61ecb4fef98dc24e10b8ed1a2
[]
no_license
calvinti12/Goat
64a122f697e06855bb53c37c8b7472a14c1030a3
2993dc48296cc1c6dd41651c05752647f074cb70
refs/heads/master
2020-07-03T19:16:26.942485
2019-08-12T20:17:43
2019-08-12T20:17:43
null
0
0
null
null
null
null
UTF-8
Python
false
false
485
py
# Generated by Django 2.1.4 on 2019-03-25 20:16 import datetime from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('customer', '0149_auto_20190323_1312'), ] operations = [ migrations.AlterField( model_name='orderrequest', name='date', field=models.DateTimeField(default=datetime.datetime(2019, 3, 25, 16, 15, 55, 214289), verbose_name='Order Placed'), ), ]
[ "jermol@mtgfirst.com" ]
jermol@mtgfirst.com
42eec63fcd92eef17a0881c4d0abc24e596ff8f6
26d5c795d8aa83bf5cb3f228675ff51e2f704f57
/scripts/bqexport
bfef9fe73be2b701637137c11c7a7d955f03f311
[]
no_license
binarymachines/mercury
8e13bb10c67a056fe88e02f558d73f1f1b95d028
db3e2425f4e77a44a97c740f7fff90312a1bd33f
refs/heads/master
2023-07-08T11:35:26.867494
2023-06-25T00:46:23
2023-06-25T00:46:23
94,708,610
2
6
null
2023-02-15T21:50:06
2017-06-18T19:31:50
Python
UTF-8
Python
false
false
4,413
#!/usr/bin/env python ''' Usage: bqexport [-p] <project> <dataset> --table <table> --bucket <google_bucket> --format=<fmt> [--delimiter=<delimiter>] [--directory=<directory>] bqexport [-p] <project> <dataset> --table-list-file <tables> --bucket <google_bucket> --format=<fmt> [--delimiter=<delimiter>] [--directory=<directory>] Options: -p,--preview : show (but do not execute) export command ''' import os, sys import json from snap import common import docopt import sh from sh import bq # Google Cloud CLI must already be installed class EXPORT_FORMAT(object): CSV = 'csv' JSON = 'json' def extract_data(source_table_designator, target_designator, export_format, delimiter): try: if export_format == EXPORT_FORMAT.CSV: result = bq.extract('--field_delimiter', delimiter, '--destination_format', 'CSV', source_table_designator, target_designator) print('\n### export of "%s" to "%s" complete.\n' % (source_table_designator, target_designator), file=sys.stderr) else: # export JSON records result = bq.extract('--destination_format', 'NEWLINE_DELIMITED_JSON', source_table_designator, target_designator) print('\n### export of "%s" to "%s" complete.\n' % (source_table_designator, target_designator), file=sys.stderr) except Exception as err: print('!!! error exporting table data.', file=sys.stderr) print(err, file=sys.stderr) def main(args): export_format = args['--format'] if export_format == EXPORT_FORMAT.CSV: if args.get('--delimiter') is None: print('### csv chosen as the export format, but no delimiter specified. Defaulting to comma.', file=sys.stderr) elif export_format != EXPORT_FORMAT.JSON: print('!!! supported export formats are "csv" and "json".') return tables = [] if args.get('--table'): tables.append(args['<table>']) elif args.get('--table-list-file'): table_list_file = args['<tables>'] with open(table_list_file) as f: for line in f: tables.append(line.lstrip().rstrip()) project_name = args['<project>'] dataset = args['<dataset>'] bucket = args['<google_bucket>'] delimiter = ',' if args.get('--delimiter') is not None: delimiter = args['--delimiter'] preview_mode = False if args.get('--preview'): preview_mode = True print('\n### running bqex in preview mode.\n', file=sys.stderr) if args.get('--directory') is not None: bucket_directory = args['--directory'] else: bucket_directory = '' for table_name in tables: source_table_designator = '{project}:{dataset}.{table}'.format(project=project_name, dataset=dataset, table=table_name) filename = '%s_*.%s' % (table_name, export_format) path_string = os.path.join(bucket, bucket_directory, filename) target_designator = 'gs://%s' % path_string if preview_mode: if export_format == EXPORT_FORMAT.CSV: print(bq.extract.bake('--field_delimiter', '\'%s\'' % delimiter, '--destination_format', 'CSV', source_table_designator, target_designator)) else: print(bq.extract.bake('--destination_format', 'NEWLINE_DELIMITED_JSON', source_table_designator, target_designator)) else: extract_data(source_table_designator, target_designator, export_format, delimiter) print('\n### exiting.', file=sys.stderr) if __name__ == '__main__': args = docopt.docopt(__doc__) main(args)
[ "binarymachineshop@gmail.com" ]
binarymachineshop@gmail.com
9ee758cd3a968a7234a34b8742d1648503bc6124
1e1f303cf81da16dec2aa2a5e04c0f3e420ffae8
/scripts/pypi/tests/build_pypi_package_test.py
cbda69b39a3e0d89bb656211bced2608121c5b8d
[ "MIT" ]
permissive
brianjo/pyre-check
ba56e727dafb0c626b6dd0ba414c0df94e762475
faae20632480948d943d094895c1a2c025e9d82a
refs/heads/master
2021-08-07T08:43:15.473504
2020-12-08T18:46:48
2020-12-08T18:46:48
250,374,200
0
0
MIT
2020-03-26T21:20:21
2020-03-26T21:20:20
null
UTF-8
Python
false
false
2,854
py
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os import tempfile import unittest from pathlib import Path from unittest.mock import Mock, patch from ..build_pypi_package import ( MODULE_NAME, add_init_files, patch_version, sync_pysa_stubs, sync_python_files, validate_version, ) class TestArgumentValidationMethods(unittest.TestCase): def test_validate_version(self) -> None: validate_version("0.0.01") with self.assertRaises(ValueError): validate_version("x0.0.01") class TestCreatingWheel(unittest.TestCase): def setUp(self) -> None: self.pyre_directory: Path = Path(__file__).resolve().parent.parent.parent.parent def test_create_init_files(self) -> None: with tempfile.TemporaryDirectory() as build_root: path = Path(build_root) add_init_files(path) # Assert the expected __init__ files are present init_files = [str(path) for path in path.glob("**/*.py")] self.assertTrue(build_root + "/pyre_check/__init__.py" in init_files) self.assertTrue(build_root + "/pyre_check/client/__init__.py" in init_files) self.assertTrue(build_root + "/pyre_check/tools/__init__.py" in init_files) self.assertTrue( build_root + "/pyre_check/tools/upgrade/__init__.py" in init_files ) def test_sync_files(self) -> None: with tempfile.TemporaryDirectory() as build_root: build_path = Path(build_root) add_init_files(build_path) sync_python_files(self.pyre_directory, build_path) command_directory = build_path / "pyre_check/client/commands" self.assertTrue(command_directory.is_dir()) @patch("subprocess.run") def test_rsync(self, subprocess_run: Mock) -> None: with tempfile.TemporaryDirectory() as build_root: build_path = Path(build_root) add_init_files(build_path) sync_pysa_stubs(self.pyre_directory, build_path) args, _ = subprocess_run.call_args expected_args = [ "rsync", "-avm", "--filter=+ */", build_root, ] self.assertTrue(all(x in args[0] for x in expected_args)) subprocess_run.assert_called() def test_patch_version(self) -> None: with tempfile.TemporaryDirectory() as build_root: build_path = Path(build_root) add_init_files(build_path) patch_version("0.0.21", build_path) path = build_path / MODULE_NAME / "client/version.py" self.assertTrue(path.is_file())
[ "facebook-github-bot@users.noreply.github.com" ]
facebook-github-bot@users.noreply.github.com
588945412a87dad60b0a723b7af48d15a27e0609
b3b5902f0cd292defdc1acccd8fa5c7890e8ba5d
/SPORTS/sports_spiders/sports_spiders/pipelines.py
4b51184e5232bc55a5bcf189c34fc18983dce0fa
[]
no_license
headrun/SWIFT
83d3d926244e472813ef79b304ac1639750904a3
68bdd09d83ee2967a2378375d9b2cb6232a3a0cf
refs/heads/master
2023-01-25T00:29:57.810604
2020-12-01T01:22:16
2020-12-01T01:22:16
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,221
py
# Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import MySQLdb import os import copy from scrapy import signals from sports_spiders import configUtils from sports_spiders import game_utils as gu from sports_spiders.vtv_utils import VTV_SERVER_DIR from datetime import datetime DELEM = '_' STEPS = '..' UTILS_CFG = 'game_utils.cfg' CONFIG = os.path.join(VTV_SERVER_DIR, UTILS_CFG) STATS_DIR = os.path.join(STEPS, STEPS, 'SPORTS_STATS_DIR') class SportsGames(object): def __init__(self): self.gids_file = None self.spider_class = None self.conn = None self.cursor = None self.hash_conf = None self.spider_name = None self.items_log = None @classmethod def from_crawler(cls, crawler): pipeline = cls() crawler.signals.connect(pipeline.spider_opened, signals.spider_opened) crawler.signals.connect(pipeline.spider_closed, signals.spider_closed) return pipeline def spider_opened(self, spider): today = datetime.now() today = today.strftime("%Y-%m-%d") if hasattr(spider, 'spider_type'): self.spider_name = spider.name + DELEM \ + spider.spider_type \ + DELEM + today else: self.spider_name = spider.name + DELEM + today self.hash_conf = configUtils.readConfFile(CONFIG) self.conn = MySQLdb.connect(db=self.hash_conf['DB_NAME'], host=self.hash_conf['HOST'], user=self.hash_conf['USER'], charset="utf8", use_unicode=True, passwd="root") self.cursor = self.conn.cursor() self.spider_class = spider.__class__.__name__ self.gids_file = os.path.join( STATS_DIR, self.spider_class + '_gids.pickle') log_name = os.path.join(STATS_DIR, self.spider_name) self.items_log = open(log_name, 'a+') def spider_closed(self, spider): self.conn.close() def write_log(self, item): items_dict = copy.deepcopy(item._values) items_dict['spider_class'] = self.spider_class self.items_log.write(str(datetime.now()) + ': ' + str(items_dict) + '\n\n') def process_item(self, item, spider): self.write_log(item) sports_item = gu.SportsdbSetup( item, self.cursor, self.spider_class, self.gids_file, self.hash_conf) if item.get('result_type', '') and \ 'standings' in item.get('result_type', ''): sports_item.populate_standings() sports_item.clean() return if item.get('result_type', '') and \ 'roster' in item.get('result_type', ''): sports_item.populate_rosters() sports_item.clean() return sports_item.process_record() sports_item.clean() return item class CheckDB(object): def __init__(self): pass def process_item(self, item, spider): pass
[ "charan@headrun.com" ]
charan@headrun.com
bf0e511132c09ac1f4357a51bc4e9d9d89c5c22a
df541a802b2dfa89d3aab14af627358dc7c76e6e
/APP自动化/StoneUIFramework/testcase/空间/test3_团队人事任免/TeamAssignJob003.py
0c43aa4c91f7c63a4792fccfbf00acf0681abc82
[]
no_license
gupan2018/PyAutomation
de966aff91f750c7207c9d3f3dfb488698492342
230aebe3eca5799c621673afb647d35a175c74f1
refs/heads/master
2021-09-07T19:44:20.710574
2017-12-22T15:58:23
2017-12-22T15:58:23
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,445
py
__author__ = 'Administrator' # -*- coding: utf-8 -*- import unittest from time import sleep import logging from StoneUIFramework.public.common.Connect import Connect from StoneUIFramework.public.common.publicfunction import Tools from StoneUIFramework.config.globalparam import GlobalParam from StoneUIFramework.public.handle.space.SPACEHANDLE5 import _SPACEHANDLE5 from StoneUIFramework.testcase.空间.test3_团队人事任免.TeamAssignJob import TeamAssignJob from StoneUIFramework.public.common.datainfo import DataInfo #团队人事任免 class team_Assign(unittest.TestCase): @classmethod#装饰器,类方法 def setUpClass(self):#最开始执行 #建立连接信息 cnn = Connect() self.driver = cnn.connect() #创建工具类 self.tools = Tools(self.driver)#tools工具 #创建_SPACEHANDLE5公有定位控件对象 self.handle = _SPACEHANDLE5(self.driver) #创建读取配置信息对象 cf = GlobalParam('config','path_file.conf') #获取截图路径、日志路径、日志名 self.screen_path = cf.getParam('space',"path_003")#通过配置文件获取截图的路径 self.log_path = cf.getParam('space',"log")#通过配置文件获取日志的路径 self.logfile = cf.getParam('space',"logfile")#日志文件名 #创建TeamAssignJob和Closespace对象 self.SpaceTa = TeamAssignJob() sleep(1) #测试数据 d = DataInfo()#创建DataInfo()对象 self.spacename = d.cell("test003",2,1)#测试空间123 def test_teamassign(self): """团队人事任免""" try: # self.tools.coverUpdate(self.log_path,self.screen_path)#覆盖更新日志,覆盖更新截图 self.tools.getLog(self.logfile)#打印日志 #1.空间首页 self.handle.Kjlb_click() self.tools.getScreenShot(self.screen_path,"空间首页") #2.选择空间:测试空间123 self.handle.Kjlb_browseorgspaceByName_click(self.spacename) #3.任免+移除 self.SpaceTa.teamAssignJob(self.driver) logging.info("success@@!!!!!!!")#宣布成功 except Exception as err: self.tools.getScreenShot(self.screen_path,"ExceptionShot") logging.error("Error Information TeamAssignJob Outside : %s"%err) raise err finally: self.driver.quit()
[ "610077670@qq.com" ]
610077670@qq.com
fe582f20576126607ef96485a1246221704d6f63
313b64057838ae28ebca3d8bfff191cb13b80c61
/101/Q110.py
30d49ef376657c2ad1d4fa9dd4f9ec5c2c727348
[]
no_license
veblush/Euler
e78961fb378be8e7c25070131e0adb72a83381f2
de54a77da41b3b58642055169bf2ea4090dbefb8
refs/heads/master
2016-08-05T09:12:16.775008
2013-03-13T08:31:11
2013-03-13T08:31:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
875
py
import math import bisect # ready prime array primes = [2] for i in xrange(3, 1000+1): ii = int(math.sqrt(i)) composite = False for p in primes: if p > ii: break if i % p == 0: composite = True break if not composite: primes.append(i) f = lambda a: reduce(lambda x, y: x*y, (k+1 for p, k in a)) M = 4000000 m = (M-1)*2 q = [ (4, [(2, 2)]) ] qs = set([4]) while True: x, a = q.pop(0) qs.remove(x) if f(a) > m: print x, f(a), a print reduce(lambda x, y: x*y, (p**(k/2) for p, k in a)) break w = [] for i in range(len(a)+1): t = x * primes[i] * primes[i] if i < len(a): e = a[:i] + [(a[i][0], a[i][1]+2)] + a[i+1:] else: e = a + [(primes[i], 2)] w.append((t, e)) w = sorted(w, key=lambda x: x[0]) for k in w: if k[0] not in qs: bisect.insort_right(q, k) qs.add(k[0])
[ "veblush+git@gmail.com" ]
veblush+git@gmail.com
775f2b2ed2fdb72a82f704927dee417183e0c016
c77d8dd4042d29150da277184a06834fb551c953
/env/lib/python3.9/site-packages/split_settings/tools.py
307d347a226f30c00a2e6f81dd9b49cd11a66cb6
[]
no_license
josephaw1022/centralizedAPI
9978858ff6e906337062ab73b7e36576b0796a39
5850a7869519250912279c3a78d6b9585c9591de
refs/heads/master
2023-06-12T17:12:20.128798
2021-07-05T21:24:52
2021-07-05T21:24:52
383,256,639
0
0
null
null
null
null
UTF-8
Python
false
false
3,737
py
# -*- coding: utf-8 -*- """ Organize Django settings into multiple files and directories. Easily override and modify settings. Use wildcards and optional settings files. """ import glob import inspect import os import sys from importlib.util import module_from_spec, spec_from_file_location __all__ = ('optional', 'include') # noqa: WPS410 #: Special magic attribute that is sometimes set by `uwsgi` / `gunicord`. _INCLUDED_FILE = '__included_file__' def optional(filename: str) -> str: """ This functions is used for compatibility reasons. It masks the old `optional` class with the name error. Now `invalid-name` is removed from `pylint`. Args: filename: the filename to be optional. Returns: New instance of :class:`_Optional`. """ return _Optional(filename) class _Optional(str): # noqa: WPS600 """ Wrap a file path with this class to mark it as optional. Optional paths don't raise an :class:`IOError` if file is not found. """ def include(*args: str, **kwargs) -> None: # noqa: WPS210, WPS231, C901 """ Used for including Django project settings from multiple files. Usage: .. code:: python from split_settings.tools import optional, include include( 'components/base.py', 'components/database.py', optional('local_settings.py'), scope=globals(), # optional scope ) Args: *args: File paths (``glob`` - compatible wildcards can be used). **kwargs: Settings context: ``scope=globals()`` or ``None``. Raises: IOError: if a required settings file is not found. """ # we are getting globals() from previous frame # globals - it is caller's globals() scope = kwargs.pop('scope', inspect.stack()[1][0].f_globals) scope.setdefault('__included_files__', []) included_files = scope.get('__included_files__') including_file = scope.get( _INCLUDED_FILE, scope['__file__'].rstrip('c'), ) conf_path = os.path.dirname(including_file) for conf_file in args: saved_included_file = scope.get(_INCLUDED_FILE) pattern = os.path.join(conf_path, conf_file) # find files per pattern, raise an error if not found # (unless file is optional) files_to_include = glob.glob(pattern) if not files_to_include and not isinstance(conf_file, _Optional): raise IOError('No such file: {0}'.format(pattern)) for included_file in files_to_include: included_file = os.path.abspath(included_file) # noqa: WPS440 if included_file in included_files: continue included_files.append(included_file) scope[_INCLUDED_FILE] = included_file with open(included_file, 'rb') as to_compile: compiled_code = compile( # noqa: WPS421 to_compile.read(), included_file, 'exec', ) exec(compiled_code, scope) # noqa: S102, WPS421 # Adds dummy modules to sys.modules to make runserver autoreload # work with settings components: rel_path = os.path.relpath(included_file) module_name = '_split_settings.{0}'.format( rel_path[:rel_path.rfind('.')].replace('/', '.'), ) spec = spec_from_file_location( module_name, included_file, ) module = module_from_spec(spec) sys.modules[module_name] = module if saved_included_file: scope[_INCLUDED_FILE] = saved_included_file elif _INCLUDED_FILE in scope: scope.pop(_INCLUDED_FILE)
[ "josephsims1@gmail.com" ]
josephsims1@gmail.com
bcae5eb42cbe4e4b38d8b5de33efb5ba30dcb142
a836c17c1e8cfcc79f85a3f05e1a5c126e85da75
/login/views.py
587513fa07253c62b1d6c55ed1fa9cb5fad30159
[]
no_license
Rabidza/hackathon
949729a0b8f0c5d1f18e054700a032630613e991
807399892f43fb67a26837080e49fb1773ddee8c
refs/heads/master
2021-01-10T16:10:34.371846
2016-01-06T06:34:25
2016-01-06T06:34:25
45,219,066
0
0
null
null
null
null
UTF-8
Python
false
false
1,186
py
#views.py from login.forms import * from django.contrib.auth.decorators import login_required from django.contrib.auth import logout from django.views.decorators.csrf import csrf_protect from django.shortcuts import render_to_response from django.http import HttpResponseRedirect from django.template import RequestContext @csrf_protect def register(request): if request.method == 'POST': form = RegistrationForm(request.POST) if form.is_valid(): user = User.objects.create_user( username=form.cleaned_data['username'], password=form.cleaned_data['password'], ) return HttpResponseRedirect('/real_page/') else: form = RegistrationForm() variables = RequestContext(request, { 'form': form }) return render_to_response( 'deroubaix/register.html', variables, ) def register_success(request): return render_to_response( 'deroubaix/success.html', ) def logout_page(request): logout(request) return HttpResponseRedirect('/') @login_required def home(request): return render_to_response( 'home.html', { 'user': request.user } )
[ "neillhenning@gmail.com" ]
neillhenning@gmail.com
a6bcd8507b54ca4a44c6428e7a49a7801f35cd74
13d0ad57a2f5deb83593e73843be7cbeeaad8d3d
/medium/knight_probability.py
be06b5a4f1c93083f6686ced4cbb0d22268fbe2f
[]
no_license
mwong33/leet-code-practice
b21f277d73b30df9e681499733baad07979480a1
9c0e6294bf3b3614b185f0760906abad60f8d9b6
refs/heads/main
2023-03-29T20:35:43.841662
2021-03-31T22:05:44
2021-03-31T22:05:44
317,382,193
0
0
null
null
null
null
UTF-8
Python
false
false
1,466
py
class Solution: # Top Down Memo - O(N*N*K) time O(N*N*K) space def knightProbability(self, N: int, K: int, r: int, c: int) -> float: return self.knightProbabilityMemo(N, K, r, c, 0, {}) def knightProbabilityMemo(self, grid_size, total_moves, row, col, move_count, cache): if (row, col, move_count) in cache: return cache[(row, col, move_count)] # Base Cases if move_count == total_moves: return 1 # Get Number of Options for current position valid_positions = self.getValidMovesList(grid_size, row, col) # Try all valid options and get their probabilites to stay on board probability = 0 for new_position in valid_positions: probability += (1/8) * self.knightProbabilityMemo(grid_size, total_moves, new_position[0], new_position[1], move_count+1, cache) cache[(row, col, move_count)] = probability return probability def getValidMovesList(self, grid_size, x, y): valid_positions = [] for dx, dy in [(1,2), (-1,2), (-1,-2), (1,-2), (2,1), (-2,1), (2,-1), (-2,-1)]: nx, ny = x + dx, y + dy # Check if we are out of bounds if nx < 0 or ny < 0 or nx >= grid_size or ny >= grid_size: continue valid_positions.append((nx, ny)) return valid_positions
[ "noreply@github.com" ]
mwong33.noreply@github.com
b49a0876dd1c49261f74128ad2221410df781ed8
67d76057aee86c43d32e0b74f3ac94d521ee03d8
/tests/pyre.pkg/descriptors/timestamps.py
a53130b8d4f65d34e6904ffdfb98461d080e6fdd
[ "BSD-3-Clause" ]
permissive
jlmaurer/pyre
0f94b1855bf029210f07c528747221751e37687f
6af38a83621d7d6228d147b4bb94f97fbb10f6e2
refs/heads/master
2023-05-25T04:33:19.907452
2020-06-18T14:07:54
2020-06-18T14:07:54
273,362,988
0
0
NOASSERTION
2021-06-10T23:42:14
2020-06-18T23:50:28
null
UTF-8
Python
false
false
1,135
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # michael a.g. aïvázis # orthologue # (c) 1998-2020 all rights reserved # """ Verify that time conversions work as expected """ def test(): import pyre.descriptors # create a descriptor time = pyre.descriptors.timestamp() # casts are not implemented yet magic = time.coerce('1992-12-21 13:30:00') # check assert magic.hour == 13 assert magic.minute == 30 assert magic.second == 0 # now one with a different input format time = pyre.descriptors.time(format='%Y/%m/%d %H|%M|%S') # try again magic = time.coerce(value='1992/12/21 13|30|00') # check assert magic.hour == 13 assert magic.minute == 30 assert magic.second == 0 # how about one try: # with the wrong format time.coerce(value='13-30-00') assert False # it should fail except time.CastingError: # so no problem pass return # main if __name__ == "__main__": # skip pyre initialization since we don't rely on the executive pyre_noboot = True # do... test() # end of file
[ "michael.aivazis@para-sim.com" ]
michael.aivazis@para-sim.com
b894061a7d8848b8131261f6320a605aa72345cb
59b0ebc4249f20edd0e87dc63784c6e8c138c7fd
/.history/anagrams.1_20180607000217.py
f4fa839c7c5fce48d1434066880e74d167a6f952
[]
no_license
Los4U/first_python_programs
f397da10be3ef525995f3f220e3b60012a6accaa
c3fc33a38c84abd292cb2e86de63e09434fc7fc4
refs/heads/master
2020-03-22T08:09:40.426118
2018-07-04T17:17:58
2018-07-04T17:17:58
139,748,883
0
0
null
null
null
null
UTF-8
Python
false
false
538
py
import sys file = open(str(sys.argv[1]), "r") words = file.read().splitlines() p2 = 1 for word in words: print(words.index(word)) i = 0 for i in range(i, len(words)): if sorted(word) == sorted(words[i]): #print(i) print(str(i) + ": " + word + " - " + words[i]) i = i + 1 words.index #for p2 in range (p2, len(words)-1): # if sorted(str(word) == sorted(word[p2])): # print(str(word) + ":" + str(p2) + ":" + word[p2]) file.close()
[ "inz.kamil.wos@gmail.com" ]
inz.kamil.wos@gmail.com
5549cfd4d5d6343f8d3430fa4093b7f1d0b8fd9a
05e634a232574f676434dfa8e4183f3d0a1a4bc9
/paddlecv/ppcv/ops/output/tracker.py
0df393bc437d7098472b6cb7eebcfd7a5c2d1fc1
[ "Apache-2.0" ]
permissive
PaddlePaddle/models
67ac00d93c5255ac64a9d80ae5be2e8927e47cee
8042c21b690ffc0162095e749a41b94dd38732da
refs/heads/release/2.4
2023-09-04T15:23:59.543625
2023-07-20T11:54:16
2023-07-20T11:54:16
88,868,842
7,633
3,597
Apache-2.0
2023-09-05T23:23:54
2017-04-20T13:30:15
Python
UTF-8
Python
false
false
3,870
py
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import numpy as np import math import glob import paddle import cv2 import json from collections import defaultdict from .base import OutputBaseOp from .detection import draw_det from ppcv.utils.logger import setup_logger from ppcv.core.workspace import register from PIL import Image, ImageDraw, ImageFile logger = setup_logger('TrackerOutput') def write_mot_results(filename, results, data_type='mot', num_classes=1): # support single and multi classes if data_type in ['mot', 'mcmot']: save_format = '{frame},{id},{x1},{y1},{w},{h},{score},{cls_id},-1,-1\n' elif data_type == 'kitti': save_format = '{frame} {id} car 0 0 -10 {x1} {y1} {x2} {y2} -10 -10 -10 -1000 -1000 -1000 -10\n' else: raise ValueError(data_type) frame_id, tk_bboxes, tk_scores, tk_ids, tk_cls_ids = results frame_id = -1 if data_type == 'kitti' else frame_id with open(filename, 'w') as f: for bbox, score, tk_id, cls_id in zip(tk_bboxes, tk_scores, tk_ids, tk_cls_ids): if tk_id < 0: continue if data_type == 'mot': cls_id = -1 x1, y1, x2, y2 = bbox w, h = x2 - x1, y2 - y1 line = save_format.format( frame=frame_id, id=tk_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h, score=score, cls_id=cls_id) f.write(line) @register class TrackerOutput(OutputBaseOp): def __init__(self, model_cfg, env_cfg): super(TrackerOutput, self).__init__(model_cfg, env_cfg) def __call__(self, inputs): total_res = [] vis_images = [] for res in inputs: fn, image, tk_bboxes, tk_scores, tk_ids, tk_cls_ids, tk_cls_names = list( res.values())[:7] tk_names = [ '{} {}'.format(tk_cls_name, tk_id) for tk_id, tk_cls_name in zip(tk_ids, tk_cls_names) ] image = draw_det(image, tk_bboxes, tk_scores, tk_names, tk_ids) res.pop('input.image') if self.frame_id != -1: res.update({'frame_id': self.frame_id}) logger.info(res) if self.save_img: vis_images.append(image) if self.save_res or self.return_res: total_res.append(res) if self.save_res: video_name = fn.split('/')[-1].split('.')[0] output_dir = os.path.join(self.output_dir, video_name) if not os.path.exists(output_dir): os.makedirs(output_dir, exist_ok=True) out_path = os.path.join(output_dir, '{}.txt'.format(self.frame_id)) logger.info('Save output result to {}'.format(out_path)) write_mot_results( out_path, [self.frame_id, tk_bboxes, tk_scores, tk_ids, tk_cls_ids]) if self.return_res: if vis_images: for i, vis_im in enumerate(vis_images): total_res[i].update({'output': vis_im}) return total_res return
[ "noreply@github.com" ]
PaddlePaddle.noreply@github.com
4c8eb4e0b011ab53434d6d5b85b39b91903957ab
4a76ac7ad1aaeec44729ab6d5b121b1cae0d910c
/Week 2/FindTheMedian.py
f1017afb1254cfc9663bbd2a334cfc369fdfa9d9
[]
no_license
kalmad99/CompetitiveProgramming
2d825e839faa9e13ef43dbb45498bd3eef6723ab
6cbb1f12f7670d0016fa2af8f2dd597d9123070d
refs/heads/main
2023-03-25T20:18:23.389396
2021-03-24T21:36:52
2021-03-24T21:36:52
325,816,614
0
0
null
null
null
null
UTF-8
Python
false
false
907
py
def findMedian(arr): negarr = [] posarr = [] finarr = [] for i in arr: if i < 0: negarr.append(i) else: posarr.append(i) for i in range(len(negarr)): negarr[i] = -1 * negarr[i] negarr = sorting(negarr) for i in range(len(negarr) - 1, -1, -1): negarr[i] = -1 * negarr[i] posarr = sorting(posarr) for i in negarr: finarr.append(i) for i in posarr: finarr.append(i) return finarr[len(arr) // 2] def sorting(arr): output = [0 for i in range(len(arr))] counter = [0 for i in range(20001)] for i in arr: counter[i] += 1 for i in range(-10000, 10000): counter[i] += counter[i - 1] for i in range(len(arr)): output[counter[arr[i]] - 1] = arr[i] counter[arr[i]] -= 1 return output nums = [0, 1, 2, 4, 6, 5, 3] print(findMedian(nums))
[ "kalemesfin12@gmail.com" ]
kalemesfin12@gmail.com
b6efb199a5f5c5dfbf9ef558876f865598c298da
56f428833bac273c180dd95bafd0a8da992349c1
/bin/Utils/GetFiles.py
6b9e2aa465828987020f0a802346239f3c3a6ea5
[ "BSD-2-Clause" ]
permissive
LlianeFR/craft
8e6ac764a76812628c1d3dc0be75c7aceea771f4
0d1e5e5c12fa817cf411ab4fc4a9d1815432e461
refs/heads/master
2020-03-25T20:19:23.376298
2018-08-06T09:32:30
2018-08-06T09:32:56
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,960
py
# -*- coding: utf-8 -*- # Copyright Hannah von Reth <vonreth@kde.org> # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS # OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. ### fetch functions from CraftCore import CraftCore from CraftDebug import deprecated import utils import os import urllib import subprocess import sys @deprecated("Utils.GetFiles.getFile") def getFiles(urls, destdir, suffix='', filenames=''): """download files from 'url' into 'destdir'""" CraftCore.log.debug("getfiles called. urls: %s, filenames: %s, suffix: %s" % (urls, filenames, suffix)) # make sure distfiles dir exists if (not os.path.exists(destdir)): os.makedirs(destdir) if type(urls) == list: urlList = urls else: urlList = urls.split() if filenames == '': filenames = [os.path.basename(x) for x in urlList] if type(filenames) == list: filenameList = filenames else: filenameList = filenames.split() dlist = list(zip(urlList, filenameList)) for url, filename in dlist: if (not getFile(url + suffix, destdir, filename)): return False return True def getFile(url, destdir, filename='') -> bool: """download file from 'url' into 'destdir'""" CraftCore.log.debug("getFile called. url: %s" % url) if url == "": CraftCore.log.error("fetch: no url given") return False pUrl = urllib.parse.urlparse(url) if not filename: filename = os.path.basename(pUrl.path) if pUrl.scheme == "s3": return s3File(url, destdir, filename) # curl and wget basically only work when we have a cert store on windows if not CraftCore.compiler.isWindows or os.path.exists(os.path.join(CraftCore.standardDirs.etcDir(), "cacert.pem")): if CraftCore.cache.findApplication("wget"): return wgetFile(url, destdir, filename) if CraftCore.cache.findApplication("curl"): return curlFile(url, destdir, filename) if os.path.exists(os.path.join(destdir, filename)): return True powershell = CraftCore.cache.findApplication("powershell") if powershell: filename = os.path.join(destdir, filename) return utils.system([powershell, "-NoProfile", "-ExecutionPolicy", "ByPass", "-Command", f"(new-object net.webclient).DownloadFile(\"{url}\", \"{filename}\")"]) else: def dlProgress(count, blockSize, totalSize): if totalSize != -1: percent = int(count * blockSize * 100 / totalSize) utils.printProgress(percent) else: sys.stdout.write(("\r%s bytes downloaded" % (count * blockSize))) sys.stdout.flush() try: urllib.request.urlretrieve(url, filename=os.path.join(destdir, filename), reporthook=dlProgress if CraftCore.debug.verbose() >= 0 else None) except Exception as e: CraftCore.log.warning(e) return False if CraftCore.debug.verbose() >= 0: sys.stdout.write("\n") sys.stdout.flush() return True def curlFile(url, destdir, filename=''): """download file with curl from 'url' into 'destdir', if filename is given to the file specified""" curl = CraftCore.cache.findApplication("curl") command = [curl, "-C", "-", "--retry", "10", "-L", "--ftp-ssl", "--fail"] cert = os.path.join(CraftCore.standardDirs.etcDir(), "cacert.pem") if os.path.exists(cert): command += ["--cacert", cert] # the default of 20 might not be enough for sourceforge ... command += ["--max-redirs", "50"] command += ["-o", os.path.join(destdir, filename)] command += [url] CraftCore.log.debug("curlfile called") if not CraftCore.settings.getboolean("ContinuousIntegration", "Enabled", False) and CraftCore.debug.verbose() < 1 and CraftCore.cache.checkCommandOutputFor(curl, "--progress-bar"): command += ["--progress-bar"] CraftCore.log.info(f"curl {url}") return utils.system(command, displayProgress=True, logCommand=False, stderr=subprocess.STDOUT) else: if CraftCore.debug.verbose() > 0: command += ["-v"] return utils.system(command) def wgetFile(url, destdir, filename=''): """download file with wget from 'url' into 'destdir', if filename is given to the file specified""" wget = CraftCore.cache.findApplication("wget") command = [wget, "-c", "-t", "10"] cert = os.path.join(CraftCore.standardDirs.etcDir(), "cacert.pem") if os.path.exists(cert): command += ["--ca-certificate", cert] # the default of 20 might not be enough for sourceforge ... command += ["--max-redirect", "50"] if CraftCore.settings.getboolean("General", "EMERGE_NO_PASSIVE_FTP", False): command += ["--no-passive-ftp"] if not filename: command += ["-P", destdir] else: command += ["-O", os.path.join(destdir, filename)] command += [url] CraftCore.log.debug("wgetfile called") if not CraftCore.settings.getboolean("ContinuousIntegration", "Enabled", False) and CraftCore.debug.verbose() < 1 and CraftCore.cache.checkCommandOutputFor(wget, "--show-progress"): command += ["-q", "--show-progress"] CraftCore.log.info(f"wget {url}") return utils.system(command, displayProgress=True, logCommand=False, stderr=subprocess.STDOUT) else: return utils.system(command) def s3File(url, destdir, filename): aws = CraftCore.cache.findApplication("aws") if not aws: CraftCore.log.critical("aws not found, please install awscli. \"pip install awscli\" ") return False return utils.system([aws, "s3", "cp", url, os.path.join(destdir, filename)])
[ "vonreth@kde.org" ]
vonreth@kde.org
5a67bbde46a7bcb4dcf509d2e277e112851804a4
f185d98c2d56c4c212c023ad71514e2ad398950b
/nv/resources/avatars.py
50dd2d40463da4535700a2f9831c1f3783d5851f
[ "MIT" ]
permissive
new-valley/new-valley
24b9591eba21ed85634d55d6ac36f0eb25d27198
8810739cab52ad4dea2f4005a59b8b7afea1e2db
refs/heads/dev
2022-12-27T12:26:36.144077
2018-12-02T18:51:21
2018-12-02T18:51:21
156,936,213
0
0
MIT
2022-12-08T01:17:15
2018-11-10T01:27:24
Python
UTF-8
Python
false
false
2,314
py
from flask import request from flask_restful import ( Resource, ) from flask_jwt_extended import ( jwt_required, get_jwt_identity, ) from webargs.flaskparser import parser from webargs.fields import ( Str, Int, ) from webargs import validate from nv.models import ( Avatar, ) from nv.schemas import ( AvatarSchema, ) from nv.util import ( mk_errors, ) from nv.permissions import ( CreateAvatar, EditAvatar, DeleteAvatar, ) from nv.database import db from nv.resources.common import ( parse_get_coll_args, generic_get_coll, generic_get, generic_post, generic_put, generic_delete, get_user, get_obj, check_permissions, ) class AvatarsRes(Resource): def get(self): args = parse_get_coll_args(request) objs = generic_get_coll( full_query=Avatar.query, schema=AvatarSchema(many=True), **args, ) return objs @jwt_required def post(self): user = get_user(username=get_jwt_identity()) check_permissions(user, [ CreateAvatar(), ]) ret = generic_post( schema=AvatarSchema(), data=request.form, ) return ret class AvatarRes(Resource): def get(self, avatar_id): ret = generic_get( obj=Avatar.query.get(avatar_id), schema=AvatarSchema(), ) return ret @jwt_required def delete(self, avatar_id): user = get_user(username=get_jwt_identity()) avatar = get_obj(Avatar.query.filter_by(avatar_id=avatar_id), 'avatar does not exist') check_permissions(user, [ DeleteAvatar(avatar), ]) ret = generic_delete( obj=Avatar.query.get(avatar_id), ) return ret @jwt_required def put(self, avatar_id): user = get_user(username=get_jwt_identity()) avatar = get_obj(Avatar.query.filter_by(avatar_id=avatar_id), 'avatar does not exist') check_permissions(user, [ EditAvatar(avatar, attributes=set(request.form)), ]) ret = generic_put( obj=Avatar.query.get(avatar_id), schema=AvatarSchema(), data=request.form ) return ret
[ "erik.perillo@gmail.com" ]
erik.perillo@gmail.com
86b34b358fa22363f9d0c994feca0b1566cf675e
f9be6f15af272fce4565a74d85dfe3298ea22315
/aiogram/types/game.py
f5861d90c825331214bf4649639d797f96a9e959
[ "MIT" ]
permissive
hellboi-atul/aiogram
7654ca127b9b709b8700cb1755674a895abb7d05
3440ab3c96e31384cbcf515e75904dcade6f5fd0
refs/heads/dev-2.x
2023-01-27T12:09:21.177136
2020-12-05T13:28:56
2020-12-05T13:28:56
318,952,603
1
0
MIT
2020-12-06T04:44:05
2020-12-06T04:44:04
null
UTF-8
Python
false
false
725
py
import typing from . import base from . import fields from .animation import Animation from .message_entity import MessageEntity from .photo_size import PhotoSize class Game(base.TelegramObject): """ This object represents a game. Use BotFather to create and edit games, their short names will act as unique identifiers. https://core.telegram.org/bots/api#game """ title: base.String = fields.Field() description: base.String = fields.Field() photo: typing.List[PhotoSize] = fields.ListField(base=PhotoSize) text: base.String = fields.Field() text_entities: typing.List[MessageEntity] = fields.ListField(base=MessageEntity) animation: Animation = fields.Field(base=Animation)
[ "jroot.junior@gmail.com" ]
jroot.junior@gmail.com
6212f545fdf293959684ff38828655f7139ad795
3d5035d6b6ece6beca77ee625b6f1b4a906c4c3a
/project-addons/res_partner_farm_data/models/__init__.py
2813df0338d6631c828c32d88235013156f216d1
[]
no_license
fadeldamen/CMNT_00033_2015_COOP_IV
98ba3fd4ca5df17651f251c76aec80b92c497603
f1f0027b25dffe1281de956c146340dd825cbe9b
refs/heads/master
2020-04-07T22:00:58.920747
2016-02-02T17:39:22
2016-02-02T17:39:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,261
py
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2015 Comunitea All Rights Reserved # $Jesús Ventosinos Mayor <jesus@comunitea.com>$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import yearly_data from . import employee_count from . import cow_count from . import res_partner from . import res_company from . import cost_imputation from . import output_quota from . import lot from . import account_fiscalyear from . import stock from . import product
[ "jesus@comunitea.com" ]
jesus@comunitea.com
e224ca5de74c0f709f85248fb16c8941f8f53647
191a7f83d964f74a2b3c7faeb4fc47d9c63d521f
/.history/main_20210523135820.py
e8c2172120c7256018689f40de758c86f45f3e5b
[]
no_license
AndreLiu1225/Kinder-Values-Survey
2a317feee8d5b17c27da2b2116742656e35d8ab9
090c27da0c822abb7dfc0ec6e13ae1b3dcb7bbf3
refs/heads/master
2023-05-03T00:26:00.481423
2021-06-04T03:24:19
2021-06-04T03:24:19
371,989,154
0
0
null
null
null
null
UTF-8
Python
false
false
2,322
py
from flask import Flask from flask_wtf import FlaskForm from wtforms import StringField, TextField, SubmitField, IntegerField, SelectField, RadioField from wtforms.validators import DataRequired, Email, EqualTo, Length, ValidationError class MCQ(FlaskForm): age = IntegerField("Age", [DataRequired()]) profession = StringField("Profession", [DataRequired(), Length(max=30, min=2)]) power = RadioField("Defining goal: social status and prestige, control or dominance over people and resources." ,choices=[('Yes','I want to be dominant'), ('No', 'Dominance over others is not the main priority')]) tradition = RadioField("Defining goal: respect, commitment, and acceptance of the customs and ideas that one’s culture or religion provides." ,choices=[('Yes', 'I would contribute to the survival and uniqueness of traditon'), ('No', 'I am always open and ready to change')]) achievement = RadioField("Defining goal: personal success through demonstrating competence according to social standards." ,choices=[('Yes', "I want to demonstrate competence in prevailing cultural standards and obtain social approval.'), ('No', 'I may want to achieve excellence, but it doesn't need to be socially approved")]) stimulation = RadioField("Defining goal: excitement, novelty, and challenge in life." ,choices=[('Yes', 'I want a challenging and exciting life.'), ('No', 'I prefer a life with lower amounts of stress.')]) self_direction = RadioField("Defining goal: independent thought and action–choosing, creating, exploring." ,choices=[('Yes', 'I like freedom in thought and expression.'), ('No', 'Nah')]) hedonism = RadioField("Defining goal: pleasure or sensuous gratification for oneself." ,choices=[('Yes', 'My pleasure and satisfaction are of utmost priority'), ('No', 'Welfare of others is also important.')]) conformity = RadioField("Defining goal: restraint of actions, inclinations, and impulses likely to upset or harm others and violate social expectations or norms." ,choices=[('Yes', 'I do care about how others view me and follow the social norms'), ''])
[ "andreliu2004@gmail.com" ]
andreliu2004@gmail.com
09fbe7c3b04b86dd5917185ce79e3bbccb6ef515
8d3fd439c6d5a52eda578847545234b2ebdc4f3b
/机器学习百科/pycode/numpy实战.py
5f899edd24ae4ec602d0f7df5ce3d9c76f5df722
[]
no_license
gm-p/practice_demo
d1530dcdb3de95832f1fa5b6e30c75e7ca6acc05
7eaa825fc634ad21aea48713133c0266a44ac54a
refs/heads/main
2023-03-31T23:01:02.302579
2021-04-04T13:29:50
2021-04-04T13:29:50
354,193,496
0
0
null
null
null
null
UTF-8
Python
false
false
7,759
py
# -*- coding: utf-8 -*- """ Created on Mon Jan 28 22:19:13 2019 https://mp.weixin.qq.com/s?__biz=Mzg5NzAxMDgwNg==&mid=2247484006&idx=1&sn=1d0b49c0200e901915a99d29f0dadc79&chksm=c0791f7ff70e966929ed3a9b1358beb3b31a1106084f4332176476eafebf3a407fff2150d5b3&scene=21#wechat_redirect @author: guangming.pan """ # 1.1列表转为矩阵 import numpy as np array = np.array([ [1, 3, 5], [4, 6, 9] ]) print(array) # 1.2维度 print('number of dim:', array.ndim) # 1.3行数和列数 print('shape:', array.shape) #1.4元素个数 print('size:', array.size) #2 Numpy创建array # 2.1 一维array创建 a = np.array([2, 23, 4], dtype=np.int32) # np.int默认为int32 print(a) print(a.dtype) # 2.2 多维array创建 a = np.array([[2, 3, 4], [3, 4, 5]]) print(a) # 生成2行3列的矩阵 # 2.3创建全零数组 a = np.zeros((3, 4)) print(a) # 生成3行4列的全零矩阵 # 2.4创建全一数组 a = np.ones((3, 4), dtype=np.int) print(a) # 2.5创建全空数组 a = np.empty((3, 4)) # 每个值都是接近于0的值 print(a) # 2.6 创建连续数组 a = np.arange(10, 21, 2) # 10-20的数据,步长为2 print(a) # 2.7 reshape操作 b = a.reshape((2, 3)) print(b) # 2.8 创建连续型数据 a = np.linspace(1, 10, 20) # 开始端1,结束端10,且分割成20个数据,生成线段 print(a) # 2.9 linspace的reshape操作 b = a.reshape((5, 4)) print(b) #3.Numpy的基本运算 # 3.1 一维矩阵运算 a = np.array([10, 20, 30, 40]) b = np.arange(4) print(a, b) c = a - b print(c) print(a*b) #若用a.dot(b),则为各维之和 # 在Numpy中,想要求出矩阵中各个元素的乘方需要依赖双星符号** c = b**2 print(c) c = np.sin(a) print(c) print(b < 2) a = np.array([1, 1, 4, 3]) b = np.arange(4) print(a == b) # 3.2 多维矩阵运算 a = np.array([[1, 1], [0, 1]]) b = np.arange(4).reshape((2, 2)) print(a) print(b) # 多维度矩阵乘法 c = a.dot(b) #第一种乘法 print(c) c = np.dot(a, b) # 第二种乘法 print(c) # 多维矩阵乘法不能直接使用'*'号 a = np.random.random((2, 4)) print(a) print(np.sum(a)) print(np.min(a)) print(np.max(a)) print("sum = ", np.sum(a, axis=1)) print("min = ", np.min(a, axis=0)) print("max = ", np.max(a, axis=1)) ''' 如果你需要对行或列进行查找,就需要对axis赋值 axis = 0,将会以列作为查找单元 axis = 1,将会以行作为查找单元 ''' # 3.3 基本计算 A = np.arange(2, 14).reshape((3, 4)) print(A) # 最小元素索引 print(np.argmin(A)) # 最大元素索引 print(np.argmax(A)) # 求整个矩阵的均值 print(np.mean(A)) print(np.average(A)) print(np.median(A)) # 累加 print(np.cumsum(A)) print(A.mean()) # 中位数A)) # 累差运算 B = np.array([[3, 5, 9], [4, 8, 10]]) print(np.diff(B)) C = np.array([[0, 5, 9], [4, 0, 10]]) print(np.nonzero(B)) # 将所有非零元素的行和列坐标分开,重构成两个分别关于行和列的矩阵 print(np.nonzero(C)) # 仿照列表排序 A = np.arange(14, 2, -1).reshape((3, 4)) # -1表示反向递减一个步长 print(A) print(np.sort(A)) # 只对每行进行递增排序 print(np.transpose(A)) # 矩阵转置 print(A.T) # 矩阵转置 print(A) print(np.clip(A, 5, 9)) ''' clip(Array, Array_min, Array_max) 若Array_min < X < Array_max 则保持原数不变, 否则,若X < Array_min, 则 X = Array_min 若X > Array_max, 则 X = Array_max ''' # 4.Numpy索引与切片 A = np.arange(3, 15) print(A) print(A[3]) B = A.reshape(3, 4) print(B) print(B[2]) print(B[0][2]) print(B[0, 2]) # list 切片 print(B[1, 1:3]) for row in B: print(row) # 打印列,则进行转置即可 for column in B.T: print(column) # 多维转一维 A = np.arange(3, 15).reshape((3, 4)) print(A) print(A.flatten()) # flat是一个迭代器,本身是一个Object属性 for item in A.flat: print(item) # Numpy array合并 # 5.1数组合并 A = np.array([1, 1, 1]) B = np.array([2, 2, 2]) print(np.vstack((A, B))) # vertical stack 上下合并 C = np.vstack((A, B)) print(C) print(A.shape, B.shape, C.shape) D = np.hstack((A, B)) # horizontal stack左右合并 print(D) print(A.shape, B.shape, D.shape) # 5.2 数组转置为矩阵 print(A[np.newaxis, :]) print(A[np.newaxis, :].shape) print(A[:, np.newaxis]) # 5.3多个矩阵合并 # concatenate的第一个例子 print("--------------") print(A[:, np.newaxis].shape) A = A[:, np.newaxis] # 数组转为矩阵 B = B[:, np.newaxis] # 数组转为矩阵 C = np.concatenate((A, B, B, A), axis=0) # axis=0纵向合并 print(C) C = np.concatenate((A, B), axis=1) # axis=1横向合并 print(C) # concatenate的第二个例子 a = np.arange(8).reshape(2, 4) b = np.arange(8).reshape(2, 4) print(a) print(b) c = np.concatenate((a, b), axis=0) # axis=0多个矩阵纵向合并 print(c) c = np.concatenate((a, b), axis=1) # axis=1多个矩阵横向合并 print(c) # 6. Numpy array分割 #6.1 构造3行4列矩阵 A = np.arange(12).reshape((3, 4)) print(A) #6.2 等量分割 print(np.split(A, 2, axis=1)) #纵向分割同横向合并的axis print(np.split(A, 3, axis=0)) #横向分割同纵向合并axis #6.3 不等量分割 print(np.array_split(A, 3, axis=1)) #6.4 其他分割方式 print(np.vsplit(A, 3)) #横向分割,等价于pirnt(np.split(A, 3, axis=0)) print(np.hsplit(A, 2)) #纵向分割,等价于print(np.split(A, 2, axis=1)) # 7.Numpy copy 与 = # 7.1 =赋值方式会带来关联性 a = np.arange(4) print(a) b = a c = a d = b a[0] = 11 print(a) print(b) print(c) print(d) print(b is a) print(c is a) print(d is a) d[1:3] = [22, 33] print(a) print(b) print(c) # 7.2 copy()赋值方式没有关联性 a = np.arange(4) print(a) b = a.copy() print(b) a[3] = 44 print(a) print(b) # 8.广播机制 a = np.array([[0, 0, 0], [10, 10, 10], [20, 20, 20], [30, 30, 30]]) b = np.array([0, 1, 2]) print(a + b) b = np.tile([0, 1, 2], (4, 1)) # 对[0,1,2]行重复3次,列重复1次 print(a + b) # 9.常用函数 # 9.1 np.bincount() x = np.array([1, 2, 3, 3, 0, 1, 4]) print(np.bincount(x)) #统计索引出现次数 w = np.array([0.3, 0.5, 0.7, 0.6, 0.1, -0.9, 1]) print(np.bincount(x, weights=w)) # minlength当所给的bin数量多于实际从x中得到的bin数量后,后面没有访问到的设置为0即可 print(np.bincount(x, minlength=7)) # 9.2 np.argmax() # 函数原型为:numpy.argmax(a, axis=None, out=None) # 功能:返回沿轴axis最大值的索引 x = [[1, 3, 3], [7, 5, 2]] print(np.argmax(x)) # axis=0表示按列操作,对比当前列,找出最大值的索引 x = [[1, 3, 3], [7, 5, 2]] print(np.argmax(x, axis=0)) # axis=1 表示按行操作,对比当前行,找出最大值的索引 print(np.argmax(x, axis=1)) # 最大元素重复返回第一个 x = np.array([1, 3, 2, 3, 0, 1, 0]) print(x.argmax()) x = np.array([1, 2, 3, 3, 0, 1, 4]) print(np.argmax(np.bincount(x))) # 9.4求取精度 # 取指定位置的精度 print(np.around([-0.6, 1.2798, 2.357, 9.67, 13], decimals=0)) # 负数进位取绝对值大的 print(np.around([1.2798, 2.357, 9.67, 13], decimals=1)) print(np.around([1.2798, 2.357, 9.67, 13], decimals=2)) print(np.around([1, 2, 5, 6, 56], decimals=-1)) # -1表示看一位数进位 print(np.around([1, 2, 5, 50, 56, 190], decimals=-2)) # 计算沿指定轴第N维的离散差值 x = np.arange(1, 16).reshape((3, 5)) print(x) print(np.diff(x, axis=1)) # 默认axis=1 print(np.diff(x, axis=0)) # 取整 print(np.floor([-0.6, -1.4, -0.1, -1.8, 0, 1.4, 1.7])) # 负数取整,跟around一样,向左 # 取上限 print(np.ceil([1.2, 1.5, 1.8, 2.1, 2.0, -0.5, -0.6, -0.3])) # 查找 x = np.array([[1, 0], # 利用np.where实现小于0的值用0填充,大于0的数不变 [2, -2], [-2, 1]]) print(x) print(np.where(x>0, x, 0))
[ "abc" ]
abc
80ad83afd181b73ca8f260ec4eb78f2354f20a7f
98efe1aee73bd9fbec640132e6fb2e54ff444904
/loldib/getratings/models/NA/na_azir/na_azir_top.py
914fc01c70d432f9190f4176ab17fef32b18fec6
[ "Apache-2.0" ]
permissive
koliupy/loldib
be4a1702c26546d6ae1b4a14943a416f73171718
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
refs/heads/master
2021-07-04T03:34:43.615423
2017-09-21T15:44:10
2017-09-21T15:44:10
104,359,388
0
0
null
null
null
null
UTF-8
Python
false
false
6,269
py
from getratings.models.ratings import Ratings class NA_Azir_Top_Aatrox(Ratings): pass class NA_Azir_Top_Ahri(Ratings): pass class NA_Azir_Top_Akali(Ratings): pass class NA_Azir_Top_Alistar(Ratings): pass class NA_Azir_Top_Amumu(Ratings): pass class NA_Azir_Top_Anivia(Ratings): pass class NA_Azir_Top_Annie(Ratings): pass class NA_Azir_Top_Ashe(Ratings): pass class NA_Azir_Top_AurelionSol(Ratings): pass class NA_Azir_Top_Azir(Ratings): pass class NA_Azir_Top_Bard(Ratings): pass class NA_Azir_Top_Blitzcrank(Ratings): pass class NA_Azir_Top_Brand(Ratings): pass class NA_Azir_Top_Braum(Ratings): pass class NA_Azir_Top_Caitlyn(Ratings): pass class NA_Azir_Top_Camille(Ratings): pass class NA_Azir_Top_Cassiopeia(Ratings): pass class NA_Azir_Top_Chogath(Ratings): pass class NA_Azir_Top_Corki(Ratings): pass class NA_Azir_Top_Darius(Ratings): pass class NA_Azir_Top_Diana(Ratings): pass class NA_Azir_Top_Draven(Ratings): pass class NA_Azir_Top_DrMundo(Ratings): pass class NA_Azir_Top_Ekko(Ratings): pass class NA_Azir_Top_Elise(Ratings): pass class NA_Azir_Top_Evelynn(Ratings): pass class NA_Azir_Top_Ezreal(Ratings): pass class NA_Azir_Top_Fiddlesticks(Ratings): pass class NA_Azir_Top_Fiora(Ratings): pass class NA_Azir_Top_Fizz(Ratings): pass class NA_Azir_Top_Galio(Ratings): pass class NA_Azir_Top_Gangplank(Ratings): pass class NA_Azir_Top_Garen(Ratings): pass class NA_Azir_Top_Gnar(Ratings): pass class NA_Azir_Top_Gragas(Ratings): pass class NA_Azir_Top_Graves(Ratings): pass class NA_Azir_Top_Hecarim(Ratings): pass class NA_Azir_Top_Heimerdinger(Ratings): pass class NA_Azir_Top_Illaoi(Ratings): pass class NA_Azir_Top_Irelia(Ratings): pass class NA_Azir_Top_Ivern(Ratings): pass class NA_Azir_Top_Janna(Ratings): pass class NA_Azir_Top_JarvanIV(Ratings): pass class NA_Azir_Top_Jax(Ratings): pass class NA_Azir_Top_Jayce(Ratings): pass class NA_Azir_Top_Jhin(Ratings): pass class NA_Azir_Top_Jinx(Ratings): pass class NA_Azir_Top_Kalista(Ratings): pass class NA_Azir_Top_Karma(Ratings): pass class NA_Azir_Top_Karthus(Ratings): pass class NA_Azir_Top_Kassadin(Ratings): pass class NA_Azir_Top_Katarina(Ratings): pass class NA_Azir_Top_Kayle(Ratings): pass class NA_Azir_Top_Kayn(Ratings): pass class NA_Azir_Top_Kennen(Ratings): pass class NA_Azir_Top_Khazix(Ratings): pass class NA_Azir_Top_Kindred(Ratings): pass class NA_Azir_Top_Kled(Ratings): pass class NA_Azir_Top_KogMaw(Ratings): pass class NA_Azir_Top_Leblanc(Ratings): pass class NA_Azir_Top_LeeSin(Ratings): pass class NA_Azir_Top_Leona(Ratings): pass class NA_Azir_Top_Lissandra(Ratings): pass class NA_Azir_Top_Lucian(Ratings): pass class NA_Azir_Top_Lulu(Ratings): pass class NA_Azir_Top_Lux(Ratings): pass class NA_Azir_Top_Malphite(Ratings): pass class NA_Azir_Top_Malzahar(Ratings): pass class NA_Azir_Top_Maokai(Ratings): pass class NA_Azir_Top_MasterYi(Ratings): pass class NA_Azir_Top_MissFortune(Ratings): pass class NA_Azir_Top_MonkeyKing(Ratings): pass class NA_Azir_Top_Mordekaiser(Ratings): pass class NA_Azir_Top_Morgana(Ratings): pass class NA_Azir_Top_Nami(Ratings): pass class NA_Azir_Top_Nasus(Ratings): pass class NA_Azir_Top_Nautilus(Ratings): pass class NA_Azir_Top_Nidalee(Ratings): pass class NA_Azir_Top_Nocturne(Ratings): pass class NA_Azir_Top_Nunu(Ratings): pass class NA_Azir_Top_Olaf(Ratings): pass class NA_Azir_Top_Orianna(Ratings): pass class NA_Azir_Top_Ornn(Ratings): pass class NA_Azir_Top_Pantheon(Ratings): pass class NA_Azir_Top_Poppy(Ratings): pass class NA_Azir_Top_Quinn(Ratings): pass class NA_Azir_Top_Rakan(Ratings): pass class NA_Azir_Top_Rammus(Ratings): pass class NA_Azir_Top_RekSai(Ratings): pass class NA_Azir_Top_Renekton(Ratings): pass class NA_Azir_Top_Rengar(Ratings): pass class NA_Azir_Top_Riven(Ratings): pass class NA_Azir_Top_Rumble(Ratings): pass class NA_Azir_Top_Ryze(Ratings): pass class NA_Azir_Top_Sejuani(Ratings): pass class NA_Azir_Top_Shaco(Ratings): pass class NA_Azir_Top_Shen(Ratings): pass class NA_Azir_Top_Shyvana(Ratings): pass class NA_Azir_Top_Singed(Ratings): pass class NA_Azir_Top_Sion(Ratings): pass class NA_Azir_Top_Sivir(Ratings): pass class NA_Azir_Top_Skarner(Ratings): pass class NA_Azir_Top_Sona(Ratings): pass class NA_Azir_Top_Soraka(Ratings): pass class NA_Azir_Top_Swain(Ratings): pass class NA_Azir_Top_Syndra(Ratings): pass class NA_Azir_Top_TahmKench(Ratings): pass class NA_Azir_Top_Taliyah(Ratings): pass class NA_Azir_Top_Talon(Ratings): pass class NA_Azir_Top_Taric(Ratings): pass class NA_Azir_Top_Teemo(Ratings): pass class NA_Azir_Top_Thresh(Ratings): pass class NA_Azir_Top_Tristana(Ratings): pass class NA_Azir_Top_Trundle(Ratings): pass class NA_Azir_Top_Tryndamere(Ratings): pass class NA_Azir_Top_TwistedFate(Ratings): pass class NA_Azir_Top_Twitch(Ratings): pass class NA_Azir_Top_Udyr(Ratings): pass class NA_Azir_Top_Urgot(Ratings): pass class NA_Azir_Top_Varus(Ratings): pass class NA_Azir_Top_Vayne(Ratings): pass class NA_Azir_Top_Veigar(Ratings): pass class NA_Azir_Top_Velkoz(Ratings): pass class NA_Azir_Top_Vi(Ratings): pass class NA_Azir_Top_Viktor(Ratings): pass class NA_Azir_Top_Vladimir(Ratings): pass class NA_Azir_Top_Volibear(Ratings): pass class NA_Azir_Top_Warwick(Ratings): pass class NA_Azir_Top_Xayah(Ratings): pass class NA_Azir_Top_Xerath(Ratings): pass class NA_Azir_Top_XinZhao(Ratings): pass class NA_Azir_Top_Yasuo(Ratings): pass class NA_Azir_Top_Yorick(Ratings): pass class NA_Azir_Top_Zac(Ratings): pass class NA_Azir_Top_Zed(Ratings): pass class NA_Azir_Top_Ziggs(Ratings): pass class NA_Azir_Top_Zilean(Ratings): pass class NA_Azir_Top_Zyra(Ratings): pass
[ "noreply@github.com" ]
koliupy.noreply@github.com
300860ec08d763f75ed23d5aaa30370d6baf1713
6c5971b878d245fdca10d68ca653c3e72470d0f3
/pyesmon/daemon.py
b47a20151c9d3d1e44b88bbb63d46c8f7d5fbfe2
[]
no_license
patirot/LustrePerfMon
417328674680e8693707e0dc4b93dd597409fb4c
4a351f6190a713ba4b861a9d22fb9240c3261266
refs/heads/master
2023-08-02T08:05:37.742882
2021-09-30T18:49:12
2021-09-30T18:49:12
null
0
0
null
null
null
null
UTF-8
Python
false
false
482
py
# Copyright (c) 2017 DataDirect Networks, Inc. # All Rights Reserved. # Author: lixi@ddn.com """ Library for daemon process """ import logging SHUTTING_DOWN = False EXIT_REASON = "unkown reason" def signal_handler(signum, frame): """ Singal hander """ # pylint: disable=global-statement,unused-argument global SHUTTING_DOWN, EXIT_REASON SHUTTING_DOWN = True EXIT_REASON = "got signal %d" % signum logging.error("exiting because %s", EXIT_REASON)
[ "lixi@ddn.com" ]
lixi@ddn.com
0a9eff2aa9d6f96402d906eb1b805de2ca963c30
a2fec2dada04b2c7cd69f5e186efea99e98bd32e
/leetcode/maximum-product-subarray.py
07cc57beb127beb0fd6d99c5f146e9a2c6aa210b
[]
no_license
IcyCC/effective_note
363ed9c2eb986151caef7134815be424e53fc592
d55074032217c48e3d872d4524ba6cea94613b86
refs/heads/master
2022-02-13T21:42:54.871413
2022-01-24T02:10:11
2022-01-24T02:10:11
168,490,684
78
7
null
null
null
null
UTF-8
Python
false
false
500
py
class Solution: def maxProduct(self, nums: List[int]) -> int: if not nums : return 0 t_min = nums[0] t_max = nums[0] res = nums[0] for i in nums[1:]: if i < 0: # 遇到负数反转 t_max, t_min = t_min, t_max t_max = max(i, i*t_max) # 核心思路 要么一直乘过来 要么从i开始乘 t_min = min(i, i*t_min) # 负数 res = max (res ,t_max) return res
[ "1103232282@qq.com" ]
1103232282@qq.com
b2549b3de94c275ed7aad85c9d88802e3d7deead
b43e73898400662b68e522c84286a79e359a3390
/cybox/test/objects/win_registry_key_test.py
ca9afc432a92efa097a04e28519119d64ec90b74
[ "BSD-3-Clause" ]
permissive
emmanvg/python-cybox
fc8f44200a02dc956c81da60b4a051984d03ac5b
e58649356e21720cf79bb09ac9ceaf73283c0f26
refs/heads/master
2021-01-20T01:11:24.264142
2017-10-26T01:19:56
2017-10-26T01:19:56
89,234,015
1
0
null
2017-04-24T11:48:24
2017-04-24T11:48:24
null
UTF-8
Python
false
false
2,087
py
# Copyright (c) 2015, The MITRE Corporation. All rights reserved. # See LICENSE.txt for complete terms. import unittest from mixbox.vendor.six import u from cybox.compat import long from cybox.objects.win_registry_key_object import WinRegistryKey from cybox.test.objects import ObjectTestCase class TestWinRegistryKey(ObjectTestCase, unittest.TestCase): object_type = "WindowsRegistryKeyObjectType" klass = WinRegistryKey _full_dict = { 'key': u("\\SOFTWARE\\Microsoft\\Windows\\Windows Error Reporting"), 'hive': u("HKEY_LOCAL_MACHINE"), 'number_values': 6, 'values': [ { 'name': u("Disabled"), 'data': u("1"), 'datatype': u("REG_DWORD"), 'byte_runs': [{'length': 1, 'byte_run_data': u("A")}], }, { 'name': u("ErrorPort"), 'data': u("\\WindowsErrorReportingServicePort"), 'datatype': u("REG_SZ"), }, ], 'modified_time': u("2013-08-08T15:15:15-04:00"), 'creator_username': u("gback"), 'handle_list': [ { 'name': u("RegHandle"), 'pointer_count': long(1), 'type': u("RegistryKey"), 'xsi:type': u('WindowsHandleObjectType'), }, ], 'number_subkeys': 1, 'subkeys': [ { 'key': u("Consent"), 'number_values': 1, 'values': [ { 'name': u("NewUserDefaultConsent"), 'data': u("1"), 'datatype': u("REG_DWORD"), }, ], 'xsi:type': 'WindowsRegistryKeyObjectType', }, ], 'byte_runs': [ {'length': 4, 'byte_run_data': u("z!%f")}, {'offset': 0x1000, 'length': 8, 'byte_run_data': u("%40V.,2@")}, ], 'xsi:type': object_type, } if __name__ == "__main__": unittest.main()
[ "gback@mitre.org" ]
gback@mitre.org
f0072159501e0c427e68bdfa0438e86855b9b9a1
058f6cf55de8b72a7cdd6e592d40243a91431bde
/tests/parser/static/test_match_assignments/test_match_assignments_2.py
8f5c218a9b77384a83595bb1205e81efc452285e
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
LLNL/FPChecker
85e8ebf1d321b3208acee7ddfda2d8878a238535
e665ef0f050316f6bc4dfc64c1f17355403e771b
refs/heads/master
2023-08-30T23:24:43.749418
2022-04-14T19:57:44
2022-04-14T19:57:44
177,033,795
24
6
Apache-2.0
2022-09-19T00:09:50
2019-03-21T22:34:14
Python
UTF-8
Python
false
false
2,464
py
import os import pathlib import sys import subprocess sys.path.insert(1, str(pathlib.Path(__file__).parent.absolute())+"/../../../../parser") #sys.path.insert(1, '/usr/workspace/wsa/laguna/fpchecker/FPChecker/parser') from tokenizer import Tokenizer from instrument import Instrument RUNTIME='../../../../src/Runtime_parser.h' prog_2 = """ __device__ double *p_new; __device__ double *bvc; __device__ double *e_old; __device__ double fabs(double x); __device__ double p_cut; __device__ double *vnewc; __device__ double eosvmax; __device__ double pmin; __device__ void comp(int i) { p_new[i] = bvc[i] * e_old[i] ; if ( fabs(p_new[i]) < p_cut ) p_new[i] = 0.0 ; if ( vnewc[i] >= eosvmax ) p_new[i] = 0.0 ; if ( p_new[i] < pmin ) p_new[i] = pmin ;; } """ def setup_module(module): THIS_DIR = os.path.dirname(os.path.abspath(__file__)) os.chdir(THIS_DIR) def teardown_module(module): cmd = ["rm -f *.o *.ii *.cu"] cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) def preprocessFile(prog_name: str): cmd = ['nvcc -E '+prog_name+'.cu -o '+prog_name+'.ii'] cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) def createFile(prog: str, prog_name: str): with open(prog_name+'.cu', 'w') as fd: fd.write(prog) fd.write('\n') preprocessFile(prog_name) def instrument(prog_name: str): pass preFileName = prog_name+'.ii' sourceFileName = prog_name+'.cu' inst = Instrument(preFileName, sourceFileName) inst.deprocess() inst.findDeviceDeclarations() inst.findAssigments() inst.produceInstrumentedLines() inst.instrument() def compileProggram(prog_name: str): cmd = ['nvcc -std=c++11 -c -include '+RUNTIME+' '+prog_name+'_inst.cu'] cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) def countInstrumentationCalls(prog_name: str): ret = 0 with open(prog_name+'_inst.cu', 'r') as fd: for l in fd.readlines(): for w in l.split(): if '_FPC_CHECK_' in w: ret += 1 return ret def inst_program(prog: str, prog_name: str, num_inst: int): try: createFile(prog, prog_name) instrument(prog_name) compileProggram(prog_name) n = countInstrumentationCalls(prog_name) assert n == num_inst return True except Exception as e: print(e) return False def test_1(): os.environ['FPC_VERBOSE'] = '1' assert inst_program(prog_2, 'prog_2', 1) if __name__ == '__main__': test_1()
[ "ilaguna@llnl.gov" ]
ilaguna@llnl.gov
6e360b41dcec06167dd4382fe2834712cd879592
58ec75465a2a6f8500b220bba92d9268e9f77f01
/blog/views.py
b6552b9681fbe39c099a9ebc15e912b63800a1b8
[]
no_license
Rayhun/Django-blog
797e13524aad95d7677a4675b3d9921ad56c6064
8aadfb6a6e6246f1dd979fc11f5b21436ac2bfa1
refs/heads/master
2023-08-28T07:17:34.479268
2021-11-04T14:07:04
2021-11-04T14:07:04
296,093,670
2
0
null
null
null
null
UTF-8
Python
false
false
5,996
py
from datetime import datetime from ipware import get_client_ip import json, urllib from django.shortcuts import render, redirect, get_object_or_404 from django.views.generic import TemplateView, DetailView from .models import BlogPost, BlogComment, IpStore from .forms import CommentForm, SignUpForm from django.contrib.auth import login, logout class UserCreateView(TemplateView): template_name = 'signup.html' def get(self, request): form = SignUpForm() return render(request, self.template_name, {'form': form}) def post(self, request): form = SignUpForm(request.POST) if form.is_valid(): form.save() login(request, form.instance) return redirect('/') return render(request, self.template_name, {'form': form}) class BlogHomePageView(TemplateView): template_name = 'home.html' model = BlogPost def get_context_data(self, **kwargs): context = super(BlogHomePageView, self).get_context_data(**kwargs) context['all_post'] = self.model.objects.all() context['last_post'] = self.model.objects.last() context['first_four'] = self.model.objects.all().order_by('-id')[1:5] context['popular_post'] = self.model.objects.all().order_by( '-total_view' )[:2] context['hot_blog'] = self.model.objects.filter(is_hot=True).order_by('-id')[:4] context['featured_blog'] = self.model.objects.filter(is_featured=True).order_by('-id')[:4] clint_ip, is_routable = get_client_ip(self.request) if clint_ip is None: clint_ip = "0.0.0.0" else: if is_routable: ip_type = "Public" else: ip_type = "Private" clint_ip = "103.230.106.25" url = "http://ip-api.com/json/" + clint_ip response = urllib.request.urlopen(url) data = json.loads(response.read()) try: my_ip = IpStore.objects.get(ip_name=clint_ip) except Exception as e: try: IpStore.objects.create( ip_name=clint_ip, ip_type=ip_type, city=data['city'], region=data['regionName'], country=data['country'], lat=data['lat'], lon=data['lon'], timezone=data['timezone'], zip_code=data['zip'], isp=data['isp'], org=data['org'], query=data['query'], status=data['status'], ass=data['as'], countryCode=data['countryCode'] ) except Exception as e: IpStore.objects.create( ip_name=clint_ip, ip_type=ip_type, city="Unknown", region="Unknown", country="Unknown", lat="Unknown", lon="Unknown", timezone="Unknown", zip_code="Unknown", isp="Unknown", org="Unknown", query="Unknown", status="Unknown", ass="Unknown", countryCode="Unknown" ) my_ip = IpStore.objects.get(ip_name=clint_ip) context['ip_address'] = my_ip return context class BlogCommentLikeView(TemplateView): model = BlogComment def get(self, request, *args, **kwargs): comment_id = self.kwargs['comment_id'] comment = self.model.objects.get(id=comment_id) comment.like += 1 comment.save() return redirect('/blog/post/' + str(comment.blog_post.id)) class BlogDetails(DetailView): template_name = 'details.html' model = BlogPost def get_context_data(self, *args, **kwargs): context = super().get_context_data(*args, **kwargs) context['comment'] = BlogComment.objects.filter( post=self.object, parent__isnull=True ) context['comment_count'] = BlogComment.objects.filter( post=self.object ).count() context['form'] = CommentForm() context['replay_blog'] = BlogComment.objects.filter(parent=20) self.object.total_view += 1 self.object.last_seen = datetime.now() self.object.save() return context def post(self, request, pk): object = self.model.objects.get(pk=pk) try: replay_comment = int(request.POST.get('replay_comment')) replay = BlogComment.objects.get(pk=replay_comment) except Exception as e: replay_comment = None replay = None try: clint_ip, is_routable = get_client_ip(self.request) comment_id = int(request.POST.get('comment_id')) cmt_lik = get_object_or_404(BlogComment, pk=comment_id) # cmt_lik.like.add('20') comment = BlogComment.objects.get(pk=comment_id) comment.total_like += 1 comment.save() return redirect('blog_details', pk=pk) except Exception as e: comment = None form = CommentForm(request.POST, request.FILES) if form.is_valid(): user = form.save(commit=False) user.post = object user.parent = replay user.save() return redirect('blog_details', pk=pk) return render(request, self.template_name) class SearchView(TemplateView): template_name = 'search_page.html' def get_context_data(self, *args, **kwargs): context = super().get_context_data(*args, **kwargs) query = self.request.GET.get('search') context['query'] = query context['all_post'] = BlogPost.objects.filter( title__icontains=query ) return context
[ "rayhunkhan27@gmail.com" ]
rayhunkhan27@gmail.com
c13786863d7eb85259610ae6c03d43f058ff954f
5e4ddf4a8dac912a7679b0a6babe1b42d5d018e9
/python2/prac/pracmodules/cs_recognition/src/cs.py
361ad0fbe503585194797fb73ca9be2325110c30
[ "BSD-2-Clause" ]
permissive
danielnyga/prac
b686d9655c56175057db2af0002348c99a7de2ee
7add712590dd52dd8c1692554f49b271447ef03f
refs/heads/master
2020-05-30T07:10:58.260570
2018-02-28T17:30:29
2018-02-28T17:30:29
45,621,476
4
2
null
null
null
null
UTF-8
Python
false
false
4,364
py
# PROBABILISTIC ROBOT ACTION CORES # # (C) 2012-2013 by Daniel Nyga (nyga@cs.tum.edu) # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import os from dnutils import logs from pracmln.mln.base import parse_mln from pracmln.mln.util import colorize from pracmln.utils.project import MLNProject from pracmln.utils.visualization import get_cond_prob_png from prac.core import locations as pracloc from prac.core.base import PRACModule, PRACPIPE from prac.core.inference import PRACInferenceStep from prac.pracutils.utils import prac_heading logger = logs.getlogger(__name__, logs.DEBUG) class ControlStructureIdentification(PRACModule): ''' PRACModule used to identify control structures in natural-language instructions ''' @PRACPIPE def __call__(self, pracinference, **params): # ====================================================================== # Initialization # ====================================================================== logger.debug('inference on {}'.format(self.name)) if self.prac.verbose > 0: print prac_heading('Recognizing Control Structures') if params.get('project', None) is None: # load default project projectpath = os.path.join(pracloc.pracmodules, self.name, self.defproject) ac_project = MLNProject.open(projectpath) else: logger.info(colorize('Loading Project from params', (None, 'cyan', True), True)) projectpath = os.path.join(params.get('projectpath', None) or os.path.join(pracloc.pracmodules, self.name), params.get('project').name) ac_project = params.get('project') dbs = pracinference.inference_steps[-1].output_dbs mlntext = ac_project.mlns.get(ac_project.queryconf['mln'], None) mln = parse_mln(mlntext, searchpaths=[self.module_path], projectpath=projectpath, logic=ac_project.queryconf.get('logic', 'FirstOrderLogic'), grammar=ac_project.queryconf.get('grammar', 'PRACGrammar')) inf_step = PRACInferenceStep(pracinference, self) pngs = {} for i, db in enumerate(dbs): db_ = db.copy() # ====================================================================== # Inference # ====================================================================== infer = self.mlnquery(config=ac_project.queryconf, db=db, mln=mln) result_db = infer.resultdb if self.prac.verbose == 2: print print prac_heading('INFERENCE RESULTS') infer.write() # ========================================================== # Postprocessing # ========================================================== for q in result_db.query('event(?w,?ac)'): db_ << 'event({},{})'.format(q['?w'],q['?ac']) for q in result_db.query('condition(?w)'): db_ << 'condition({})'.format(q['?w']) inf_step.output_dbs.append(db_) pngs['CS' + str(i)] = get_cond_prob_png(ac_project.queryconf.get('queries', ''), dbs, filename=self.name) inf_step.png = pngs inf_step.applied_settings = ac_project.queryconf.config return inf_step
[ "mareikep@cs.uni-bremen.de" ]
mareikep@cs.uni-bremen.de
2a6b7dcf34814b4cf52d13c7b049621f96bb6f85
cb324b8e92765c535765bbb88aa69878ce2e4fe3
/regtests/list/if_empty.py
fad3c9e5d5f0cd8f09b8e56e51887962654316cd
[ "BSD-3-Clause" ]
permissive
pombredanne/Rusthon
f47756c6ae465c60012e63e02ea1e912c3b391fb
343c0b2b097b18fa910f616ec2f6c09048fe92d0
refs/heads/master
2021-01-17T21:24:29.744692
2016-09-10T10:53:59
2016-09-10T10:53:59
40,818,721
1
0
null
2016-09-10T10:54:00
2015-08-16T13:20:47
Python
UTF-8
Python
false
false
581
py
from runtime import * """if empty list then false""" class A: pass def main(): d = [] #if d: ## this is not allowed, and will raise an error at runtime if len(d): err1 = 1 else: err1 = 0 if len([]): err2 = 1 else: err2 = 0 d.append('xxx') if len(d): err3 = 0 else: err3 = 1 assert( err1 == 0 ) assert( err2 == 0 ) assert( err3 == 0 ) a = A() ok = False #if a: ## this is not allowed, and will raise an error at runtime if a is not None: ok = True assert ok a.x = [] if len(a.x): err4 = 1 else: err4 = 0 assert( err4 == 0 ) main()
[ "goatman.py@gmail.com" ]
goatman.py@gmail.com
3a9191bee9d44079e2dbeb83c998458f78dd8a94
73a2917525e56ac548563b39ead045a19d4861a1
/tutorial_11.py
b650f21148e9fdbe67e2fb3fbf193671ac46ce23
[]
no_license
VsevolodM95/Python_training
468e80e5ed6dbec627b08970eb6f2d6febfcd0b5
120b2142a7f627a1f98db46a8b531d6e35013090
refs/heads/master
2021-01-15T14:28:36.144487
2014-07-11T23:14:39
2014-07-11T23:14:39
null
0
0
null
null
null
null
UTF-8
Python
false
false
239
py
#tutorial #12 #the same as 11, but more rational name = raw_input("What's my name?") height = raw_input("How tall am I?") age = raw_input("How old am I?"); print "My name is %r, I'm %r tall and I'm %r years old." % (name, height, age)
[ "myemail" ]
myemail
140ccc842c6d7721c31339b91f2e8e6c3a027af8
3417c37060ec8f7089754baace34d8051572570a
/Python-dev/Task3/app/models/score.py
228822793642df91258d2585422b3e11fe1a7c6e
[]
no_license
rrkas/SID2021
8a2baa41b8e4868dce0d4ac75533109bc5e717a5
09d2a771ca1914928fbb78a0dac58c1bb4d0dd7b
refs/heads/master
2023-06-26T13:58:35.246291
2021-07-25T07:34:57
2021-07-25T07:34:57
381,122,559
0
0
null
null
null
null
UTF-8
Python
false
false
289
py
class Score: def __init__(self, name=None, regd_num=None, score=0, id=-1): self.id = id self.name = name self.regd_num = regd_num self.score = score def __repr__(self): return f"Score({self.id}, {self.name}, {self.regd_num}, {self.score})"
[ "rrka79wal@gmail.com" ]
rrka79wal@gmail.com
f6346a64d9a515475908d829accc6318b2aab55b
5c1643e0e03969b91dba6f0d6428719b75b2d7e1
/basic/string2.py
31df9c32eb94b536d06ee0696cb753b5ab5edbe0
[ "Apache-2.0" ]
permissive
KobiBeef/google-python-exercises
2ff0ba89f0618087301b32e8318d751690975c76
e5d67d2a198bcf26611df21b0306cd8c42630fdc
refs/heads/master
2021-01-10T16:49:25.235238
2015-10-02T05:33:48
2015-10-02T05:33:48
43,537,376
0
0
null
null
null
null
UTF-8
Python
false
false
3,295
py
#!/usr/bin/python2.4 -tt # Copyright 2010 Google Inc. # Licensed under the Apache License, Version 2.0 # http://www.apache.org/licenses/LICENSE-2.0 # Google's Python Class # http://code.google.com/edu/languages/google-python-class/ # Additional basic string exercises # D. verbing # Given a string, if its length is at least 3, # add 'ing' to its end. # Unless it already ends in 'ing', in which case # add 'ly' instead. # If the string length is less than 3, leave it unchanged. # Return the resulting string. def verbing(s): # +++your code here+++ if len(s) > 3 and s[-3:] == 'ing': return s + 'ly' elif len(s) > 3: return s + 'ing' elif len(s) < 3: return s # test(verbing('hail'), 'hailing') # test(verbing('swiming'), 'swimingly') # if len(s) >= 3: # if s[-3:] != 'ing' : s = s + 'ing' # else: # return s + 'ly' # return s # E. not_bad # Given a string, find the first appearance of the # substring 'not' and 'bad'. If the 'bad' follows # the 'not', replace the whole 'not'...'bad' substring # with 'good'. # Return the resulting string. # So 'This dinner is not that bad!' yields: # This dinner is good! def not_bad(s): # +++your code here+++ n = s.find('not') b = s.find('bad') if n > b: return s elif b == 0: return s else: return s[0:n] + 'good' + s[b+3:] # not_bad('This movie is not so bad'), 'This movie is good') # F. front_back # Consider dividing a string into two halves. # If the length is even, the front and back halves are the same length. # If the length is odd, we'll say that the extra char goes in the front half. # e.g. 'abcde', the front half is 'abc', the back half 'de'. # Given 2 strings, a and b, return a string of the form # a-front + b-front + a-back + b-back def front_back(a, b): # +++your code here+++ test = len(a) / 2 test1 = len(b) / 2 r = 0 if len(a) % 2 == 1 and len(b) % 2 == 1: r += 1 return a[0:test+r] + b[0:test1+r] + a[test+r:] + b[test1+r:] if len(a) % 2 !=1 and len(b) % 2 == 1: r += 1 return a[0:test] + b[0:test1+r] + a[test:] + b[test1+r:] else: return a[0:test] + b[0:test1] + a[test:] + b[test1:] return # ('abcd', 'xy'), 'abxcdy') # test(front_back('abcde', 'xyz'), 'abcxydez') # Simple provided test() function used in main() to print # what each function returns vs. what it's supposed to return. def test(got, expected): if got == expected: prefix = ' OK ' else: prefix = ' X ' print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected)) # main() calls the above functions with interesting inputs, # using the above test() to check if the result is correct or not. def main(): print 'verbing' test(verbing('hail'), 'hailing') test(verbing('swiming'), 'swimingly') test(verbing('do'), 'do') print print 'not_bad' test(not_bad('This movie is not so bad'), 'This movie is good') test(not_bad('This dinner is not that bad!'), 'This dinner is good!') test(not_bad('This tea is not hot'), 'This tea is not hot') test(not_bad("It's bad yet not"), "It's bad yet not") print print 'front_back' test(front_back('abcd', 'xy'), 'abxcdy') test(front_back('abcde', 'xyz'), 'abcxydez') test(front_back('Kitten', 'Donut'), 'KitDontenut') if __name__ == '__main__': main()
[ "ezekielbacungan@gmail.com" ]
ezekielbacungan@gmail.com
6376fc530b0e76bd4f87940ec6ad2bb947e107f8
a7596165a29e5186bc6c4718e3b6e835939b105d
/apps/impala/src/impala/conf.py
ebefe16ed4f96697220a65c9051ecf7c7974f66a
[ "Apache-2.0" ]
permissive
lockhart39/HueQualityAndIngestionApp
f0c778665f0fbe699ec30e0df5e9f3ed8a9c3384
c75e55a43a8bdeb7aa0f5bf2101ec72b01dcac1c
refs/heads/master
2021-08-20T00:31:29.481333
2017-11-27T19:22:16
2017-11-27T19:22:16
112,237,923
1
0
null
null
null
null
UTF-8
Python
false
false
6,295
py
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import sys import socket from django.utils.translation import ugettext_lazy as _t, ugettext as _ from desktop.conf import default_ssl_cacerts, default_ssl_validate, AUTH_USERNAME as DEFAULT_AUTH_USERNAME,\ AUTH_PASSWORD as DEFAULT_AUTH_PASSWORD from desktop.lib.conf import ConfigSection, Config, coerce_bool, coerce_csv, coerce_password_from_script from desktop.lib.exceptions import StructuredThriftTransportException from impala.settings import NICE_NAME LOG = logging.getLogger(__name__) SERVER_HOST = Config( key="server_host", help=_t("Host of the Impala Server."), default="localhost") SERVER_PORT = Config( key="server_port", help=_t("Port of the Impala Server."), default=21050, type=int) IMPALA_PRINCIPAL=Config( key='impala_principal', help=_t("Kerberos principal name for Impala. Typically 'impala/hostname.foo.com'."), type=str, default="impala/%s" % socket.getfqdn()) IMPERSONATION_ENABLED=Config( key='impersonation_enabled', help=_t("Turn on/off impersonation mechanism when talking to Impala."), type=coerce_bool, default=False) QUERYCACHE_ROWS=Config( key='querycache_rows', help=_t("Number of initial rows of a resultset to ask Impala to cache in order to" " support re-fetching them for downloading them." " Set to 0 for disabling the option and backward compatibility."), type=int, default=50000) SERVER_CONN_TIMEOUT = Config( key='server_conn_timeout', default=120, type=int, help=_t('Timeout in seconds for Thrift calls.')) CLOSE_QUERIES = Config( key="close_queries", help=_t("Hue will try to close the Impala query when the user leaves the editor page. " "This will free all the query resources in Impala, but also make its results inaccessible."), type=coerce_bool, default=True ) QUERY_TIMEOUT_S = Config( key="query_timeout_s", help=_t("If QUERY_TIMEOUT_S > 0, the query will be timed out (i.e. cancelled) if Impala does not do any work" " (compute or send back results) for that query within QUERY_TIMEOUT_S seconds."), type=int, default=600 ) SESSION_TIMEOUT_S = Config( key="session_timeout_s", help=_t("If SESSION_TIMEOUT_S > 0, the session will be timed out (i.e. cancelled) if Impala does not do any work" " (compute or send back results) for that session within QUERY_TIMEOUT_S seconds."), type=int, default=12 * 60 * 60 ) CONFIG_WHITELIST = Config( key='config_whitelist', default='debug_action,explain_level,mem_limit,optimize_partition_key_scans,query_timeout_s,request_pool', type=coerce_csv, help=_t('A comma-separated list of white-listed Impala configuration properties that users are authorized to set.') ) SSL = ConfigSection( key='ssl', help=_t('SSL configuration for the server.'), members=dict( ENABLED = Config( key="enabled", help=_t("SSL communication enabled for this server."), type=coerce_bool, default=False ), CACERTS = Config( key="cacerts", help=_t("Path to Certificate Authority certificates."), type=str, dynamic_default=default_ssl_cacerts, ), KEY = Config( key="key", help=_t("Path to the private key file, e.g. /etc/hue/key.pem"), type=str, default=None ), CERT = Config( key="cert", help=_t("Path to the public certificate file, e.g. /etc/hue/cert.pem"), type=str, default=None ), VALIDATE = Config( key="validate", help=_t("Choose whether Hue should validate certificates received from the server."), type=coerce_bool, dynamic_default=default_ssl_validate, ) ) ) def get_auth_username(): """Get from top level default from desktop""" return DEFAULT_AUTH_USERNAME.get() AUTH_USERNAME = Config( key="auth_username", help=_t("Auth username of the hue user used for authentications."), private=True, dynamic_default=get_auth_username) def get_auth_password(): """Get from script or backward compatibility""" password = AUTH_PASSWORD_SCRIPT.get() if password: return password return DEFAULT_AUTH_PASSWORD.get() AUTH_PASSWORD = Config( key="auth_password", help=_t("LDAP/PAM/.. password of the hue user used for authentications."), private=True, dynamic_default=get_auth_password) AUTH_PASSWORD_SCRIPT = Config( key="auth_password_script", help=_t("Execute this script to produce the auth password. This will be used when `auth_password` is not set."), private=True, type=coerce_password_from_script, default=None) def config_validator(user): # dbms is dependent on beeswax.conf (this file) # import in method to avoid circular dependency from beeswax.server import dbms from beeswax.server.dbms import get_query_server_config res = [] try: try: if not 'test' in sys.argv: # Avoid tests hanging query_server = get_query_server_config(name='impala') server = dbms.get(user, query_server) server.execute_statement("SELECT 'Hello World!';") except StructuredThriftTransportException, ex: if 'TSocket read 0 bytes' in str(ex): # this message appears when authentication fails msg = "Failed to authenticate to Impalad, check authentication configurations." LOG.exception(msg) res.append((NICE_NAME, _(msg))) else: raise ex except Exception, ex: msg = "No available Impalad to send queries to." LOG.exception(msg) res.append((NICE_NAME, _(msg))) return res
[ "cloudera@quickstart.cloudera" ]
cloudera@quickstart.cloudera
e0ae885a4a52601d7b84938cb69ac26206faec5d
d9af28bb1f9eb67300f1dde7942f9b536cbcb1b8
/232.py
9b04d35b284fdf7b16016d547f5b8bccb6c278ae
[]
no_license
khanjason/leetcode
c78d415fd5794ffb7d15f68b8e2dc89b367317ab
d2c0b41bdd181bee999922be820d6ce16312b7ae
refs/heads/master
2021-12-26T17:28:32.706678
2021-09-09T13:56:09
2021-09-09T13:56:09
235,096,416
0
0
null
null
null
null
UTF-8
Python
false
false
941
py
class MyQueue: def __init__(self): """ Initialize your data structure here. """ self.q=[] def push(self, x: int) -> None: """ Push element x to the back of queue. """ el=[x] self.q=el+self.q def pop(self) -> int: """ Removes the element from in front of queue and returns that element. """ return self.q.pop() def peek(self) -> int: """ Get the front element. """ return self.q[len(self.q)-1] def empty(self) -> bool: """ Returns whether the queue is empty. """ if self.q==[]: return True return False # Your MyQueue object will be instantiated and called as such: # obj = MyQueue() # obj.push(x) # param_2 = obj.pop() # param_3 = obj.peek() # param_4 = obj.empty()
[ "noreply@github.com" ]
khanjason.noreply@github.com
3f3975822de1286c6ce6e3a90f622ab86cdb0abb
c9952dcac5658940508ddc139344a7243a591c87
/tests/lab07/test_ch07_t03_getting_there.py
78b92c4a065dbc622993d8e2f3a40b16d74c4cdc
[]
no_license
wongcyrus/ite3101_introduction_to_programming
5da1c15212528423b3df91997327fe148abef4de
7cd76d0861d5355db5a6e2e171735bee2e78f829
refs/heads/master
2023-08-31T17:27:06.193049
2023-08-21T08:30:26
2023-08-21T08:30:26
136,574,036
3
2
null
2023-08-21T08:30:28
2018-06-08T06:06:49
Python
UTF-8
Python
false
false
925
py
import types import unittest from tests.unit_test_helper.console_test_helper import * from tests.unit_test_helper import is_answer class TestOutput(unittest.TestCase): def test(self): if is_answer: from lab.lab07.ch07_t03_getting_there_ans import plane_ride_cost else: from lab.lab07.ch07_t03_getting_there import plane_ride_cost temp_globals, temp_locals, content, output = execfile("lab07/ch07_t03_getting_there.py") print(temp_locals) self.assertIsInstance(temp_locals['plane_ride_cost'], types.FunctionType) self.assertEqual(183, plane_ride_cost("Charlotte")) self.assertEqual(220, plane_ride_cost("Tampa")) self.assertEqual(222, plane_ride_cost("Pittsburgh")) self.assertEqual(475, plane_ride_cost("Los Angeles")) self.assertEqual(None, plane_ride_cost("")) if __name__ == '__main__': unittest.main()
[ "cywong@vtc.edu.hk" ]
cywong@vtc.edu.hk
b994d486bed5ada6105bd8330aec492f5aa07a05
32e01bc8b5883896a58bdb3c2443028cf61484cf
/RaptAutomation/Test_UI/K8s_SingleUser_SingleGpu/test_17_s3_image_classification_auto.py
44fad6b0452a6b74f4b82cc09e1338151c053a9c
[]
no_license
Idur69/RaptAutmationWin
df8a55a6c60467ea7d048b7a6856263d5624643c
1f4faffa75a9cc5bf72c20baec3893d611b9d565
refs/heads/master
2022-12-03T21:01:38.841645
2020-08-30T08:39:43
2020-08-30T08:39:43
291,431,583
0
0
null
null
null
null
UTF-8
Python
false
false
5,534
py
import datetime import unittest from telnetlib import EC from time import sleep from selenium import webdriver from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support.select import Select from selenium.webdriver.support.wait import WebDriverWait #from Src.EnvSetup.EnvironmentSetUp import EnvironmentSetup from Src.EnvSetup.cnfgurl import LoginUsers, Paths, EnvironmentSetup, Memory_and_Core_Percentages from Src.PageObject.Pages.Admin_Login import AdminLogin from Src.PageObject.Pages.MyUrl import Myurl from Test_UI.TestUtility.ScreenShots import SS class Kubernetes_Ui_S3_Image_Auto(EnvironmentSetup): def test_s3_image_auto(self): # Second browser driver driver1 = self.driver1 myurl = Myurl(self.driver1) myurl.access_url() driver1.implicitly_wait(10) print("This is Title name :", driver1.title) # ScreenShot Relative Path ss_path = '/K8s_UI/' # Creating object of screenshot utility ss = SS(driver1) # ------- Login Details ------------ user = LoginUsers() Admin = user.user1_name Pwd = user.user1_password expadmin = user.user1_expadmin exppass = user.user1_exppass # ------ S3 bucket ------------ paths = Paths() bkt_name = paths.Bucket_name bkt_keys = paths.Bucket_keys # -------flower path ----------- flower_path = paths.S3_Image_clf_path admin_login = AdminLogin(driver1) admin_login.set_login_uname(Admin) admin_login.set_login_upass(Pwd) sleep(3) # sub = self.driver1.find_element_by_tag_name("button") # sub.click() # setcookie = pickle.dump(self.driver1.get_cookies(), open("cookies.pkl", "wb")) # print("setcookievalue :", setcookie) admin_login.submit_login(Admin, Pwd) sleep(5) if Admin == expadmin and Pwd == exppass: print("Login successful") else: assert print("Invalid credentials") print("************ Image Auto *****************") # --------Frame work-------------- # f = self.driver1.find_element_by_class_name("f-image mxnet text-center") f = self.driver1.find_element_by_xpath("//img[@src='/images/tenserflow.png']") f.click() print("Selected Tensorflow") sleep(2) # --------if you have compound class name you should write like this----------- inception = self.driver1.find_element_by_xpath("//*[@class='card-body text-center font-weight-normal btnNext']") inception.click() sleep(1) print("Selected Inception") # -----------S3 bucket --------------- s3 = self.driver1.find_element(By.ID, 'r1') s3.click() sleep(1) bucketname = self.driver1.find_element(By.ID, 'bkt_name') bucketname.send_keys(bkt_name) sleep(2) bucketkeys = self.driver1.find_element(By.ID, 'bkt_keys') bucketkeys.send_keys(bkt_keys) sleep(2) # ----------GPU Auto -------- gpu = self.driver1.find_element(By.ID, 'r4') gpu.click() sleep(2) auto = self.driver1.find_element_by_id("r101") auto.click() sleep(2) # ------Screenshot-1----------- ss.ScreenShot(ss_path + "test_17_s3_image_auto_setupscreen.png") # -------------------- setup btn ----------------- setupbtn = self.driver1.find_element(By.ID, 'setupbtn') setupbtn.click() sleep(24) # -------Datsets & Training ---------------- traindir = self.driver1.find_element(By.ID, 'traindirectory') trdirectory = Select(traindir) trdirectory.select_by_visible_text("flower_classification") sleep(2) trinfile = self.driver1.find_element(By.ID, 'file_name') trfile = Select(trinfile) trfile.select_by_visible_text("retrain-new.py") sleep(2) # --------- Train -------------------- train = self.driver1.find_element_by_xpath("//a[@href='#train']") train.click() textpath = self.driver1.find_element_by_id("textVal") textpath.clear() textpath.send_keys(flower_path) sleep(2) Train = self.driver1.find_element(By.ID, 'train_id') Train.click() sleep(100) gpuTime = driver1.find_elements_by_id("gputime") for GpuUsage in gpuTime: assert isinstance(GpuUsage.text, object) print("Gpu Usage : ", str(GpuUsage.text)) sleep(240) # --------Elapsed Time ------------------- myElem = self.driver1.find_element_by_id("elapsedTime") myElem.click() sleep(1) # --------Screen shot-2 ----------- ss.ScreenShot(ss_path + "test_17_s3_image_auto_auto_elapsedtime.png") sleep(2) assert isinstance(myElem.text, object) print("Image classi fication Auto -", str(myElem.text)) for logs in driver1.get_log('browser'): print(logs) # ---------Logout ---------------- self.driver1.find_element_by_id("navbarDropdownMenuLink").click() logout = self.driver1.find_element_by_class_name("dropdown-item") logout.click() sleep(5) for Logedout in self.driver1.find_elements_by_xpath("//*[@class='alert alert-success']"): assert isinstance(Logedout.text, object) print(str(Logedout.text))
[ "shaikidurbasha469@gmail.com" ]
shaikidurbasha469@gmail.com
a3c3c4e911bc0abc6a69bd4872fab46bdb146b9d
da4d071cb7d2b3f46a8e393859d6d745f8e4fa96
/single_sign_on/single_sign_on/settings.py
4785912b185a5a2f57f84ee3d7fa2a3a8501efc7
[]
no_license
pnija/single_sign_on
59140395ac8bd451b719e4ad7627e33dc29a689a
145f007abbede74906faf4d5b37fd73254c31039
refs/heads/master
2022-12-17T01:46:26.482289
2018-10-01T14:31:21
2018-10-01T14:31:21
151,037,834
0
0
null
2022-05-25T00:50:13
2018-10-01T04:48:55
Python
UTF-8
Python
false
false
3,272
py
""" Django settings for single_sign_on project. Generated by 'django-admin startproject' using Django 2.1.1. For more information on this file, see https://docs.djangoproject.com/en/2.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.1/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '&4#s39l5uok=!h37vbhd$r*ap&erj$%x&@gor!7uasoj$d%z$h' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = ['*'] # SITE_ID = 1 # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', # 'django.contrib.sites', 'accounts' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'single_sign_on.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['templates'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'single_sign_on.wsgi.application' # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = '/static/' JWT_SECRET = '123456789' LOGIN_REDIRECT = '/' LOGOUT_REDIRECT = '/accounts/success/'
[ "nijap@techversantinfotech.com" ]
nijap@techversantinfotech.com
226ebe40297fd07a670ccb4b161a84312e59cd5f
be471cdee10e2273ce41631c4a58f16227f18b5b
/virtual/walt/virtual/setup/__init__.py
897f6407a59976eac4f37043ca18e8730ef3640e
[ "BSD-3-Clause" ]
permissive
dia38/walt-python-packages
d91d477c90dbc4bd134fdcc31d7cb404ef9885b8
e6fa1f166f45e73173195d57840d22bef87b88f5
refs/heads/master
2020-04-29T17:41:19.936575
2019-11-26T10:11:58
2019-11-26T10:11:58
176,303,546
0
0
BSD-3-Clause
2019-03-18T14:27:56
2019-03-18T14:27:56
null
UTF-8
Python
false
false
1,056
py
#!/usr/bin/env python import os, sys from walt.common.tools import failsafe_symlink from pkg_resources import resource_string from pathlib import Path SYSTEMD_SERVICE_FILES = [ "walt-vpn-server.service", "walt-vpn-server.socket" ] SYSTEMD_SERVICES_DIR = Path("/etc/systemd/system") def run(): if os.geteuid() != 0: sys.exit("This script must be run as root. Exiting.") for filename in SYSTEMD_SERVICE_FILES: service_file_content = resource_string(__name__, filename) service_file_path = SYSTEMD_SERVICES_DIR / filename if service_file_path.exists(): sys.exit('Virtual tools are already setup. Exiting.') service_file_path.write_bytes(service_file_content) if filename.endswith('.socket'): # the following is the same as running 'systemctl enable <unit>.socket' # on a system that is really running failsafe_symlink(str(service_file_path), str(SYSTEMD_SERVICES_DIR / "sockets.target.wants" / filename)) print('Done.')
[ "etienne.duble@imag.fr" ]
etienne.duble@imag.fr
1b4bf67859e3d5788fe48c327bd779ca472b00f8
fb49051e2cb4b9865fdde761a5892b46f5815a5c
/tutorial1/app/permissions.py
d3f8296625624a6a9e3fd99185f9ba5bb3b23814
[]
no_license
ashish1sasmal/Django-REST-Framework
0bb810e17c716750faac676218bdd44847891b52
ca8caa9b38d86086e78a28cfad2ca9cf0216cce1
refs/heads/master
2023-02-06T09:09:56.582366
2020-12-27T17:37:44
2020-12-27T17:37:44
321,983,890
0
0
null
null
null
null
UTF-8
Python
false
false
387
py
from rest_framework import permissions class IsOwnerOrReadOnly(permissions.BasePermission): def has_object_permission(self, request, view, obj): # Read permissions are allowed to any request, # so we'll always allow GET, HEAD or OPTIONS requests. if request.method in permissions.SAFE_METHODS: return True return obj.user == request.user
[ "ashishsasmal1@gmail.com" ]
ashishsasmal1@gmail.com
d0db361dd3d49dc29a842821913c2f0e8337c048
c20a7a651e63c1e7b1c5e6b5c65c8150898bbaf2
/OJ/LeetCode/63. Unique Paths II.py
face5e92689a3a4bc7c6ff313028f2f885493362
[]
no_license
Nobody0321/MyCodes
08dbc878ae1badf82afaf0c9fc608b70dfce5cea
b60e2b7a8f2ad604c7d28b21498991da60066dc3
refs/heads/master
2023-08-19T14:34:23.169792
2023-08-15T15:50:24
2023-08-15T15:50:24
175,770,050
0
0
null
null
null
null
UTF-8
Python
false
false
982
py
# 思路,类似unique path 1 但是遇到路障要将路径数置零 class Solution: def uniquePathsWithObstacles(self, obstacleGrid): if obstacleGrid == []: return None height = len(obstacleGrid) width = len(obstacleGrid[0]) dp = [[0] * width for _ in range(height)] for i in range(height): if obstacleGrid[i][0]: break else: dp[i][0] = 1 for i in range(width): if obstacleGrid[0][i]: break else: dp[0][i] = 1 for i in range(1,height): for j in range(1,width): if obstacleGrid[i][j]: dp[i][j] = 0 else: dp[i][j] = dp[i-1][j] + dp[i][j-1] return dp[height-1][width-1] if __name__ == "__main__": grid = [[0,0,0], [0,1,0], [0,0,0]] print(Solution().uniquePathsWithObstacles(grid))
[ "cyk19951@gmail.com" ]
cyk19951@gmail.com
8feff23b081e85387612f08d06a1a23ad765ae10
53fab060fa262e5d5026e0807d93c75fb81e67b9
/gaussiana/ch3_2019_03_01_18_32_46_384807.py
ae243968d9d574a99e555abaf06c22567dccc2a8
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
173
py
import math def calcula_gaussiana(x, mi, sigma): x = 1/(sigma*(2*math.pi)**(1/2)) y = math.exp(-0.5*((x-mi)/sigma)**2) f_x_mi_sigma = x*y return f_x_mi_sigma
[ "you@example.com" ]
you@example.com
bfd36c25bbcaa8e49bf606f5919191e197984ef1
243f64a13a1f496a08d05508ccf73c61e03a69de
/max_even_seq/subs/2017B/3.py
7de5e2179d22b93d5d7e963889ba0802ea33e9ee
[]
no_license
RazLandau/pybryt
5f3e946a99338fb159d6044098380bce2aacdc6f
8973b15fc48d1f278e7b8a3990d2f73a3bffb128
refs/heads/main
2023-08-19T12:30:46.622088
2021-10-14T18:35:21
2021-10-14T18:35:21
350,324,993
0
0
null
null
null
null
UTF-8
Python
false
false
207
py
def max_even_seq(n): cnt=0 seq=0 for i in str(n): if int(i)%2==0: cnt=cnt+1 if seq<cnt: seq=cnt else: cnt=0 return(seq)
[ "landau.raz@gmail.com" ]
landau.raz@gmail.com
aa7d69aee1bc7288dcc9116c62b689a7f9e24991
ec39ccb701d30c64e9541802e3f45aff61a16f09
/toontown/uberdog/ClientServicesManager.py
864a38a6b01ad165a11c91ef604473c9ad5679d6
[]
no_license
ronanwow1001/toontown-src-py3.0
f3089ea2b0a987e236df1ae0fad8f94c45e852e0
b7bf9673353a1a8231652d009ef00b11da4b0290
refs/heads/master
2020-12-07T05:03:48.795733
2020-01-11T19:07:49
2020-01-11T19:07:49
232,640,248
0
0
null
2020-01-08T19:18:50
2020-01-08T19:18:50
null
UTF-8
Python
false
false
3,418
py
from direct.directnotify.DirectNotifyGlobal import directNotify from direct.distributed.DistributedObjectGlobal import DistributedObjectGlobal import hmac from panda3d.core import * from otp.distributed.PotentialAvatar import PotentialAvatar from otp.otpbase import OTPGlobals from toontown.chat.ChatGlobals import WTSystem from toontown.chat.WhisperPopup import WhisperPopup class ClientServicesManager(DistributedObjectGlobal): notify = directNotify.newCategory('ClientServicesManager') # --- LOGIN LOGIC --- def performLogin(self, doneEvent): self.doneEvent = doneEvent self.systemMessageSfx = None token = self.cr.playToken or 'dev' token = token.encode('utf-8') # PY3 key = 'bG9sLndlLmNoYW5nZS50aGlzLnRvby5tdWNo' key = key.encode('utf-8') # PY3 digest_maker = hmac.new(key) digest_maker.update(token) clientKey = digest_maker.hexdigest() self.sendUpdate('login', [token, clientKey]) def acceptLogin(self, timestamp): messenger.send(self.doneEvent, [{'mode': 'success', 'timestamp': timestamp}]) # --- AVATARS LIST --- def requestAvatars(self): self.sendUpdate('requestAvatars') def setAvatars(self, avatars): avList = [] for avNum, avName, avDNA, avPosition, nameState in avatars: nameOpen = int(nameState == 1) names = [avName, '', '', ''] if nameState == 2: # PENDING names[1] = avName elif nameState == 3: # APPROVED names[2] = avName elif nameState == 4: # REJECTED names[3] = avName avList.append(PotentialAvatar(avNum, names, avDNA, avPosition, nameOpen)) self.cr.handleAvatarsList(avList) # --- AVATAR CREATION/DELETION --- def sendCreateAvatar(self, avDNA, _, index): self.sendUpdate('createAvatar', [avDNA.makeNetString(), index]) def createAvatarResp(self, avId): messenger.send('nameShopCreateAvatarDone', [avId]) def sendDeleteAvatar(self, avId): self.sendUpdate('deleteAvatar', [avId]) # No deleteAvatarResp; it just sends a setAvatars when the deed is done. # --- AVATAR NAMING --- def sendSetNameTyped(self, avId, name, callback): self._callback = callback self.sendUpdate('setNameTyped', [avId, name]) def setNameTypedResp(self, avId, status): self._callback(avId, status) def sendSetNamePattern(self, avId, p1, f1, p2, f2, p3, f3, p4, f4, callback): self._callback = callback self.sendUpdate('setNamePattern', [avId, p1, f1, p2, f2, p3, f3, p4, f4]) def setNamePatternResp(self, avId, status): self._callback(avId, status) def sendAcknowledgeAvatarName(self, avId, callback): self._callback = callback self.sendUpdate('acknowledgeAvatarName', [avId]) def acknowledgeAvatarNameResp(self): self._callback() # --- AVATAR CHOICE --- def sendChooseAvatar(self, avId): self.sendUpdate('chooseAvatar', [avId]) def systemMessage(self, message): whisper = WhisperPopup(message, OTPGlobals.getInterfaceFont(), WTSystem) whisper.manage(base.marginManager) if self.systemMessageSfx is None: self.systemMessageSfx = base.loader.loadSfx('phase_3/audio/sfx/clock03.ogg') base.playSfx(self.systemMessageSfx)
[ "nathanielfuhr@gmail.com" ]
nathanielfuhr@gmail.com
451cd7b8185e807bd043961430589cdeb1c46411
78f3fe4a148c86ce9b80411a3433a49ccfdc02dd
/2016/09/florida-millennials-20160915/graphic_config.py
abeb5e9f7aff3c2d8157284f6417f5027493d479
[]
no_license
nprapps/graphics-archive
54cfc4d4d670aca4d71839d70f23a8bf645c692f
fe92cd061730496cb95c9df8fa624505c3b291f8
refs/heads/master
2023-03-04T11:35:36.413216
2023-02-26T23:26:48
2023-02-26T23:26:48
22,472,848
16
7
null
null
null
null
UTF-8
Python
false
false
305
py
#!/usr/bin/env python import base_filters COPY_GOOGLE_DOC_KEY = '1ffq-ubcw70cvHmmrQFXNDvn79iRvVQLRnLZaA_pdltU' USE_ASSETS = False # Use these variables to override the default cache timeouts for this graphic # DEFAULT_MAX_AGE = 20 # ASSETS_MAX_AGE = 300 JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
[ "ahurt@npr.org" ]
ahurt@npr.org
f01e56e28adaf1a1870f5b9b221f031d2d9ca2ff
d21dbab3f374eb42a10f9ec7c434c1ca6fb2bff7
/Data Structures/Heap/1-qheap1_with_heapq.py
14cb88fd7ab7ac36671aa051029f95f47b91679f
[]
no_license
almamuncsit/HackerRank
5360ad1d54aa01075dba5527f6ae695e4c6d9c7a
6599cde4c7541ebf27bacff8af02dc0c3eaaa678
refs/heads/master
2021-07-06T00:33:25.912754
2021-01-13T09:09:29
2021-01-13T09:09:29
222,364,072
4
0
null
null
null
null
UTF-8
Python
false
false
2,016
py
class MinHeap: def __init__(self, items=[]): self.heap = [0] for item in items: self.push(item) def push(self, item): self.heap.append(item) self.__float_up(self.size() - 1) def peak(self): if self.heap[1]: return self.heap[1] else: return False def pop(self): if self.size() < 2: return False if self.size() == 2: return self.heap.pop() self.__swap(1, self.size() - 1) item = self.heap.pop() self.__max_heapify(1) return item def __swap(self, index1, index2): self.heap[index1], self.heap[index2] = self.heap[index2], self.heap[index1] def __float_up(self, index): parent = index // 2 if parent < 1: return elif self.heap[index] < self.heap[parent]: self.__swap(index, parent) self.__float_up(parent) def __max_heapify(self, index): smallest_idx = index left = index * 2 right = left + 1 if self.size() > left and self.heap[left] < self.heap[smallest_idx]: smallest_idx = left if self.size() > right and self.heap[right] < self.heap[smallest_idx]: smallest_idx = right if smallest_idx != index: self.__swap(index, smallest_idx) self.__max_heapify(smallest_idx) def size(self): return len(self.heap) def delete(self, item): for i in range(1, self.size()): if self.heap[i] == item: self.__swap(i, self.size() - 1) self.heap.pop() self.__max_heapify(i) break if __name__ == '__main__': heap = MinHeap([]) n = int(input()) for _ in range(n): val = list(map(int, input().split())) if val[0] == 1: heap.push(val[1]) elif val[0] == 2: heap.delete(val[1]) else: print(heap.peak())
[ "msarkar.cse@gmail.com" ]
msarkar.cse@gmail.com
f91bae1f75347a2dca6cbfcaefc9d5b228815bb5
b4a39921429d9045163d7a4d030951fecbfa352d
/setup.py
f1ecdc2b187860a10daf5c4d5210c312eac3adbd
[ "MIT" ]
permissive
MayuriKalokhe/Data_Science_Covid-19
814c7767e9d0b3cce27c2fa87b0407b7ea27166e
e4bd99ddb2d6b2467991867bfa8a658804689d9f
refs/heads/master
2022-12-17T17:39:36.342071
2020-09-15T21:22:50
2020-09-15T21:22:50
295,846,644
0
0
null
null
null
null
UTF-8
Python
false
false
232
py
from setuptools import find_packages, setup setup( name='src', packages=find_packages(), version='0.1.0', description='Applied Data Science for Covid-19 Tracking ', author='Mayuri Kalokhe', license='MIT', )
[ "you@example.com" ]
you@example.com
10cdf6ae6137753f16ca394c963e3e452646a544
d66818f4b951943553826a5f64413e90120e1fae
/hackerrank/Algorithms/Kangaroo/solution.py
c3eb95f215b9b283337e9308ae9ff6c5d375055a
[ "MIT" ]
permissive
HBinhCT/Q-project
0f80cd15c9945c43e2e17072416ddb6e4745e7fa
19923cbaa3c83c670527899ece5c3ad31bcebe65
refs/heads/master
2023-08-30T08:59:16.006567
2023-08-29T15:30:21
2023-08-29T15:30:21
247,630,603
8
1
MIT
2020-07-22T01:20:23
2020-03-16T06:48:02
Python
UTF-8
Python
false
false
487
py
#!/bin/python3 import os # Complete the kangaroo function below. def kangaroo(x1, v1, x2, v2): # x1 < x2 return 'YES' if v1 > v2 and not ((x2 - x1) % (v1 - v2)) else 'NO' if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') x1V1X2V2 = input().split() x1 = int(x1V1X2V2[0]) v1 = int(x1V1X2V2[1]) x2 = int(x1V1X2V2[2]) v2 = int(x1V1X2V2[3]) result = kangaroo(x1, v1, x2, v2) fptr.write(result + '\n') fptr.close()
[ "hbinhct@gmail.com" ]
hbinhct@gmail.com
584a7b924adeb54de34928fb9018d078d9f7b946
64cd483b18dd9027c40c35c9f3b5153d6735db64
/api_ec2/chatbot_api/resources/food.py
011d68778c0f43d9b472fb3bddc3e7122b0d301b
[]
no_license
taepd/sba-chatbot
a55673e170891d7e536a2e4017989e7874c28354
d58f4663c431870b5aca7412cf491f962a04f095
refs/heads/master
2023-04-23T20:49:26.947955
2021-05-19T14:58:16
2021-05-19T14:58:16
301,092,681
2
0
null
2021-05-19T14:58:17
2020-10-04T09:48:34
Python
UTF-8
Python
false
false
4,511
py
# from sqlalchemy import Column, Integer, Float, String, ForeignKey, create_engine # from sqlalchemy.dialects.mysql import DECIMAL, VARCHAR, LONGTEXT from typing import List from flask import request from flask_restful import Resource, reqparse from flask import jsonify import json import os import numpy as np import pandas as pd import joblib import konlpy # from eunjeon import Mecab from chatbot_api.ext.db import db, openSession from chatbot_api.util.file_handler import FileReader from chatbot_api.resources.order_review import OrderReviewDto class FoodDto(db.Model): __tablename__ = "food" __table_args__ = {'mysql_collate': 'utf8_general_ci'} # 한글 인코딩 food_id: int = db.Column(db.Integer, primary_key=True, index=True) food_name: str = db.Column(db.String(200)) price: int = db.Column(db.Integer) food_img: str = db.Column(db.String(1000)) # 최대 길이가 634정도였음 food_rev_avg: float = db.Column(db.Float) food_rev_cnt: float = db.Column(db.Integer) shop_id: int = db.Column(db.Integer, db.ForeignKey('shop.shop_id', ondelete="CASCADE")) # order_reviews = db.relationship('OrderReviewDto', backref='food', lazy='dynamic', cascade="all, delete, delete-orphan") order_reviews = db.relationship('OrderReviewDto', back_populates='foods', lazy='dynamic', cascade="all, delete, delete-orphan") def __init__(self, food_id, food_name, price, food_img, food_rev_avg, food_rev_cnt, shop_id): self.food_id = food_id self.food_name = food_name self.price = price self.food_img = food_img self.food_rev_avg = food_rev_avg self.food_rev_cnt = food_rev_cnt self.shop_id = shop_id def __repr__(self): return f'Food(food_id={self.food_id}, ' \ f'food_name={self.food_name}, ' \ f'price={self.price}, ' \ f'food_img={self.food_img}, ' \ f'food_rev_avg={self.food_rev_avg}, ' \ f'food_rev_cnt={self.food_rev_cnt}, ' \ f'shop_id="{self.shop_id}"' @property def json(self): return { 'food_id': self.food_id, 'food_name': self.food_name, 'price': self.price, 'food_img': self.food_img, 'food_rev_avg': self.food_rev_avg, 'food_rev_cnt': self.food_rev_cnt, 'shop_id': self.shop_id } class FoodVo: food_id: int = 0 food_name: str = '' price: int = 0 food_img: str = '' food_rev_avg: float = 0.0 food_rev_cnt: float = 0.0 shop_id: int = 0 class FoodDao(FoodDto): @classmethod def food_find_by_shopid(cls,shop_id): # sql = cls.query.filter_by(shop_id = shopid) sql = cls.query.filter_by(shop_id = shop_id) df = pd.read_sql(sql.statement, sql.session.bind) return json.loads(df.to_json(orient='records')) # return cls.query.filter_by(shop_id = shopid).all() @classmethod def food_find_by_foodid(cls,food_id): # sql = cls.query.filter_by(food_id = food_id) # df = pd.read_sql(sql.statement, sql.session.bind) return cls.query.filter_by(food_id = food_id).first() @classmethod def chat_food_find(cls, key): sql = cls.query.filter(FoodDto.food_name.like('%'+key+'%')).\ order_by(FoodDao.food_rev_cnt.desc()) df = pd.read_sql(sql.statement,sql.session.bind) df = df.head(1) return json.loads(df.to_json(orient='records')) class Food(Resource): @staticmethod def get(food_id : int): food = FoodDao.food_find_by_foodid(food_id) print(food) return food.json, 200 # ------------ 실행 영역 -------------- # if __name__ == '__main__': # chat = ChatbotService() # model = chat.load_model_from_file() # # import pdb # # # 데이터 일괄 입력 # df = pd.read_csv('./data/db/food.csv', sep=',', encoding='utf-8-sig') # df = df.replace(np.nan, '', regex=True) # shop_seoul = df.loc[df['shop_addr'].str.contains('서울', na=False)] # print(shop_seoul['shop_addr']) # shop_seoul.to_csv('./data/csv/important/shop(seoul).csv', sep=',', encoding='utf-8-sig', index=False) # pdb.set_trace() # 외래키 있으면 로딩 에러난다.. # Session = openSession() # session = Session() # session.bulk_insert_mappings(FoodDto, df.to_dict(orient="records")) # session.commit() # session.close()
[ "taepd1@gmail.com" ]
taepd1@gmail.com
19ed803340dab89ff9a3c6ced25d27987779f1f1
89f5a6b6ecb4aed9d5007bf2f500b7bfcb66c542
/saltcontainermap/modinstall.py
fa9ab9bce241d084bd7b8bcdbc84a37b87628ef0
[ "MIT" ]
permissive
merll/salt-container-map
a752a5bcff326ba7b3402a4397b73d9f8adcca66
0792e7bd1e80dfb370d783a45ca23db51ef28c22
refs/heads/master
2020-05-22T04:12:57.010357
2018-01-16T16:51:46
2018-01-16T16:51:46
38,863,690
5
2
null
null
null
null
UTF-8
Python
false
false
5,934
py
# -*- coding: utf-8 -*- import argparse from collections import namedtuple import logging import os import salt.config log = logging.getLogger() InstallEnvironment = namedtuple('InstallEnvironment', ['lib_path', 'salt_config', 'salt_extmods', 'salt_root']) def _expand_path(root, path): if os.path.isabs(path): return path return os.path.join(root, path) def _read_extmods(master_opts): root_dir = master_opts.get('root_dir') ext_mods = master_opts.get('extension_modules') if ext_mods: log.debug("Using extension_modules from master config: %s", ext_mods) return _expand_path(root_dir, ext_mods) module_dirs = master_opts.get('module_dirs') if module_dirs: log.debug("Using first module_dirs from master config: %s", module_dirs[0]) return _expand_path(root_dir, module_dirs[0]) return None def _read_root(master_opts): fs_base = master_opts.get('file_roots', {}).get('base') if fs_base: log.debug("Using salt filesystem base root from master config: %s", fs_base[0]) return fs_base[0] return None def link_module(salt_path, lib_dir, sub_dir, file_name): module_path = os.path.join(lib_dir, file_name) link_dir = os.path.join(salt_path, sub_dir) log.debug("setting up module in %s", link_dir) link_path = os.path.join(link_dir, file_name) init_path = os.path.join(link_dir, '__init__.py') if not os.path.isdir(link_dir): log.info("creating symlink %s -> %s", link_path, module_path) os.mkdir(link_dir) open(init_path, 'a').close() os.symlink(module_path, link_path) return True if os.path.lexists(link_path): if os.path.islink(link_path): link_target = os.readlink(link_path) if link_target != module_path: log.warning("File %s exists, but is not a symlink pointing to %s.", link_path, module_path) else: log.info("Found existing symlink.") else: log.warning("File %s exists, but is not a symbolic link.", link_path) return False log.info("creating symlink %s -> %s", link_path, module_path) if not os.path.exists(init_path): open(init_path, 'a').close() os.symlink(module_path, link_path) return True def unlink_module(salt_path, sub_dir, file_name): link_dir = os.path.join(salt_path, sub_dir) log.info("removing module from %s", link_dir) link_path = os.path.join(link_dir, file_name) if os.path.islink(link_path): os.unlink(link_path) return True return False def get_env(): parser = argparse.ArgumentParser(description="Installs symlinks to the modules in the Salt module directories.") parser.add_argument('-c', '--salt-config', default='/etc/salt/master', help="Path to the salt master configuration file.") parser.add_argument('--salt-extmods', help="Path for extension modules. If not set, the setting from the master " "config is used.") parser.add_argument('--salt-root', help="Path to the master file root, e.g. /srv/salt. If not set, looks up the " "base environment in the master configuration file.") output_group = parser.add_mutually_exclusive_group() output_group.add_argument('-v', '--verbose', action='store_true', help="Show debug messages.") output_group.add_argument('-q', '--quiet', action='store_true', help="Do not show any messages.") args = parser.parse_args() if not args.quiet: ch = logging.StreamHandler() formatter = logging.Formatter('%(levelname)s - %(message)s') ch.setFormatter(formatter) log.addHandler(ch) if args.verbose: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) salt_config = args.salt_config master_opts = salt.config.master_config(salt_config) if args.salt_extmods: salt_extmods = args.salt_extmods else: salt_extmods = _read_extmods(master_opts) if args.salt_root: salt_root = args.salt_root else: salt_root = _read_root(master_opts) if not os.path.isdir(salt_extmods): log.error("Extension module directory %s does not exist.", salt_extmods) parser.exit(status=1) if not os.path.isdir(salt_root): log.error("Master file root directory %s does not exist.", salt_root) parser.exit(status=1) return InstallEnvironment(os.path.dirname(__file__), salt_config, salt_extmods, salt_root) def install_modules(): env = get_env() res_extmod = link_module(env.salt_extmods, os.path.join(env.lib_path, 'extmods'), 'renderers', 'lazy_yaml.py') res_mod = link_module(env.salt_root, os.path.join(env.lib_path, 'modules'), '_modules', 'container_map.py') res_state = link_module(env.salt_root, os.path.join(env.lib_path, 'states'), '_states', 'container_map.py') if res_extmod: log.info("Installed master extension module. Please restart the salt master process for using it.") if res_mod and res_state: log.info("Installed minion modules. Distribute with 'saltutil.sync_all' or 'state.highstate'.") def remove_modules(): env = get_env() res_extmod = unlink_module(env.salt_extmods, 'renderers', 'lazy_yaml.py') res_mod = unlink_module(env.salt_root, '_modules', 'container_map.py') res_state = unlink_module(env.salt_root, '_states', 'container_map.py') if res_extmod: log.info("Removed master extension module. It will not be available after the master process is restarted.") if res_mod and res_state: log.info("Removed minion modules. 'saltutil.clear_cache' can be used for distributing the removal, but " "'saltutil.sync_all' should be run immediately afterwards if you have any other custom modules.")
[ "matthias@erll.de" ]
matthias@erll.de
e0b748369a429ce203da0543699f8fa7b8f011aa
5a711a4a49f94ae1497a723e29c981facf1d5f37
/tests/practice_centers/test_services.py
4d8d050dcd9f9685d0cc981749b8179fa27e3e89
[ "MIT" ]
permissive
fabien-roy/glo-2005-sportsapp
2509583fb47bce9cff1e211cb1ed7adebaf3fdd0
3b5b5f9cdcfe53d1e6e702609587068c4bd3310d
refs/heads/master
2023-04-09T07:30:31.512069
2020-05-10T23:21:42
2020-05-10T23:21:42
null
0
0
null
null
null
null
UTF-8
Python
false
false
570
py
from instance.practice_centers.services import PracticeCenterPopulationService from tests.practice_centers.mocks import practice_center_repository from tests.interfaces.test_basic import BasicTests class PracticeCenterPopulationServiceTests(BasicTests): def setUp(self): self.practice_center_population_service = PracticeCenterPopulationService( practice_center_repository) def test_db_populate_adds_fakes(self): self.practice_center_population_service.db_populate() self.assertTrue(practice_center_repository.add.called)
[ "fabienroy28@gmail.com" ]
fabienroy28@gmail.com
6cfeb76982ef4150fc264f81b7bd13f46fcbf673
941b25a0d0ccd25e4e64293defc2b50a61fccb01
/testBoard.py
115ca435b072bd140be722e11fea581ae4d714cc
[]
no_license
fanzhangg/sliding-tiles
c5a396818ec2d7449309f773df37a46ec7b41c8e
334bb7df76436aa9429ff6132db8a9ea1afce35f
refs/heads/master
2020-04-08T20:06:37.554387
2018-11-29T15:20:52
2018-11-29T15:20:52
null
0
0
null
null
null
null
UTF-8
Python
false
false
757
py
import unittest from Board import Board class testBoard(unittest.TestCase): board = Board(3, 3) def test_0_initialize_board(self): print("Start initialize board test") tiles_1 = (1, 2, 3, 4, 5, 6, 7, 8) self.assertEqual(self.board.initialize_board(tiles_1), None) tiles_2 = (1, 2, 3, 4, 5, 6, 7, 8, 9) self.assertRaises(IndexError("The size of tiles exceeds the expected size", self.board.initialize_board, tiles_2)) tiles_3 = (1, 2, 3, 4) self.assertRaises(IndexError("The size of tiles less than the expected size", self.board.initialize_board, tiles_3)) if __name__ == '__main__': unittest.main(exit=False)
[ "vanadiumzhang@gmail.com" ]
vanadiumzhang@gmail.com
1483a7fc4dd3bd86a8bf52283e9408cab2f23767
5145773cf66978c2ea449fe5b3b125d1b363361b
/4-HeaderExchange/producer.py
7942e1f6678ad779b55ca7de4223a741449cecc7
[]
no_license
MoeinGhbh/RabbitMQ-Type
62a0e297e914e88de56cdee2c893b36659e069c9
011e2368b2250393295ea9f0b42f397656b626a3
refs/heads/master
2022-07-18T07:03:09.385269
2020-05-20T17:36:11
2020-05-20T17:36:11
265,569,624
0
0
null
null
null
null
UTF-8
Python
false
false
1,330
py
#!/home/ubd/anaconda3/envs/test_env/bin/python import json import pika import sys if len(sys.argv) < 4: print("Call syntax: python SCRIPT_NAME PRODUCER_NAME MESSAGE_STRING HEADERS_VALUES") exit() producerName = sys.argv[1] messageText = sys.argv[2] headersValues = sys.argv[3:] connection = pika.BlockingConnection( pika.ConnectionParameters(host='localhost')) channel = connection.channel() channel.exchange_declare(exchange='exchange_headers', exchange_type='headers') headers = {} number = 1 for headerValue in headersValues: headers["K" + str(number)] = headerValue number = number + 1 data = { "producerName": producerName, "headersValues": headersValues, "headers": headers, "messageText": messageText } message = json.dumps(data) channel.basic_publish( exchange='exchange_headers', routing_key='', body=message, properties=pika.BasicProperties( delivery_mode=2, headers=headers )) print("Producer name: %s, headers: %s, message text:%s" % (producerName, headers, message)) connection.close() # python producer.py P1 'Message 1 from producer P1' A Producer name: P1, headers: {'K1': 'A'}, message text:{"producerName": "P1", "headersValues": ["A"], "headers": {"K1": "A"}, "messageText": "Message 1 from producer P1"}
[ "=" ]
=
83cc96a40f65ef2ec4447aa30734347ee89efba7
2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02
/PyTorch/built-in/cv/classification/Gluon_ResNet50_v1b_for_PyTorch/timm/utils/clip_grad.py
08d87f99e8c2c67f5a29b445f1e53bfd8c5a212c
[ "Apache-2.0", "MIT", "CC-BY-NC-4.0", "GPL-1.0-or-later", "LicenseRef-scancode-proprietary-license", "LGPL-2.0-or-later", "BSD-3-Clause", "BSD-2-Clause", "LicenseRef-scancode-generic-cla", "LicenseRef-scancode-unknown-license-reference" ]
permissive
Ascend/ModelZoo-PyTorch
4c89414b9e2582cef9926d4670108a090c839d2d
92acc188d3a0f634de58463b6676e70df83ef808
refs/heads/master
2023-07-19T12:40:00.512853
2023-07-17T02:48:18
2023-07-17T02:48:18
483,502,469
23
6
Apache-2.0
2022-10-15T09:29:12
2022-04-20T04:11:18
Python
UTF-8
Python
false
false
1,415
py
# Copyright 2019 Ross Wightman # Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import torch from timm.utils.agc import adaptive_clip_grad def dispatch_clip_grad(parameters, value: float, mode: str = 'norm', norm_type: float = 2.0): """ Dispatch to gradient clipping method Args: parameters (Iterable): model parameters to clip value (float): clipping value/factor/norm, mode dependant mode (str): clipping mode, one of 'norm', 'value', 'agc' norm_type (float): p-norm, default 2.0 """ if mode == 'norm': torch.nn.utils.clip_grad_norm_(parameters, value, norm_type=norm_type) elif mode == 'value': torch.nn.utils.clip_grad_value_(parameters, value) elif mode == 'agc': adaptive_clip_grad(parameters, value, norm_type=norm_type) else: assert False, f"Unknown clip mode ({mode})."
[ "wangjiangben@huawei.com" ]
wangjiangben@huawei.com
8c845b5cd02687c170cbbbc4d79dc39d69c912cb
d210853ba6d1f3b5383a09e1b553c19083d78014
/geo/backend/delivery/migrations/0008_deliverypickpoint.py
904ed647eda3b40aff2a045c20036106af2d58a4
[]
no_license
Hagen013/presidentwatches
f252c7995e39f6cffb6608e43f555abc32f6a9fc
b9ca72aef1db01262675274c83a5c5dff4d6e2da
refs/heads/master
2022-12-17T08:45:15.541869
2019-12-29T17:48:56
2019-12-29T17:48:56
162,160,435
0
0
null
2022-12-08T01:49:45
2018-12-17T16:36:05
HTML
UTF-8
Python
false
false
2,139
py
# -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-12-25 16:53 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('kladr', '0001_initial'), ('delivery', '0007_deliverysdekpoint'), ] operations = [ migrations.CreateModel( name='DeliveryPickPoint', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=512, verbose_name='Имя')), ('code', models.CharField(max_length=512, unique=True)), ('latitude', models.CharField(max_length=512)), ('longitude', models.CharField(max_length=512)), ('address', models.TextField()), ('description', models.TextField()), ('is_payment_by_card', models.BooleanField(default=True)), ('is_payment_by_cash', models.BooleanField(default=True)), ('pvz_type', models.PositiveIntegerField(choices=[(1, 'АПТ'), (2, 'ПВЗ')], default=1)), ('max_box_size', models.CharField(blank=True, max_length=512)), ('zone', models.IntegerField(choices=[(-1, 'НетЗоны'), (0, '0'), (1, '1'), (2, '2'), (3, '3'), (4, '4'), (5, '5'), (6, '6'), (7, '7')])), ('coefficient', models.FloatField(default=1.0)), ('tariff_type', models.PositiveIntegerField(choices=[(1, 'STANDART'), (2, 'PRIORITY')])), ('time_min', models.PositiveSmallIntegerField(default=None, null=True)), ('time_max', models.PositiveSmallIntegerField(default=None, null=True)), ('city', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='delivery.PickPointCityList')), ('kladr', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kladr.Kladr')), ], options={ 'abstract': False, }, ), ]
[ "=" ]
=
18c616bb07a09bb2bc5c408dd2006960ffaad576
63dc2b63afa8848842470c52f10b9066445f239c
/manage.py
c281068744a6d18bb1595aa256bc0bb90e717125
[]
no_license
wanderchan/hospital
724a5e03ef8d9d2fea8ef3da80646aefe9bdbd73
887a668f7d93f8271908facb37b4c6fef67e9700
refs/heads/master
2020-03-18T23:39:11.071002
2018-05-30T09:09:24
2018-05-30T09:09:24
129,866,541
0
0
null
null
null
null
UTF-8
Python
false
false
252
py
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "biyesheji.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
[ "you@example.com" ]
you@example.com
4456273aa82c8258872064b3a763b75cc4e52ad6
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2494/60617/259151.py
a17472a016cee01b922df70d039e6105a2e794df
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
910
py
def reversePair(): arr=eval(input()) print(merge(arr, 0, len(arr)-1)) def merge(arr, l , r): count=0 if l>=r: return count mid=(l+r)//2 count+=merge(arr, l, mid) count+=merge(arr, mid+1, r) j=mid+1 for i in range(l, mid+1): while j<=r and arr[i]>arr[j]*2: j+=1 count+=j-(mid+1) sort(arr, l, r) return count def sort(arr, l, r): start=l temp=[] if l>=r: return mid=(l+r)//2 j=mid+1 while start<=mid and j<=r: if arr[start]<=arr[j]: temp.append(arr[start]) start+=1 else: temp.append(arr[j]) j+=1 while start<=mid: temp.append(arr[start]) start+=1 while j<=r: temp.append(arr[j]) j+=1 for i in range(l, r+1): arr[i]=temp[i-l] return if __name__=='__main__': reversePair()
[ "1069583789@qq.com" ]
1069583789@qq.com
9d45f8122dec0c08974e10ec061fc337c8e6c17d
901b7d6e062d1cef5b48743c54953edae39a83ff
/backend/UserPost/migrations/0009_userpost_privacy.py
b60633a0256dfe938307b03eb8f0eea0aefb0fed
[]
no_license
KaziMotiour/U-Profile-Social-media
e1104e266ab22cac70c60ecd1f0fb2d33eb26961
81dedeb4e16e4045ac84bf261bd512e429e6186f
refs/heads/master
2023-06-09T00:28:24.390944
2021-06-29T17:00:13
2021-06-29T17:00:13
324,720,540
1
0
null
null
null
null
UTF-8
Python
false
false
505
py
# Generated by Django 3.1.5 on 2021-03-08 14:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('UserPost', '0008_auto_20210119_0307'), ] operations = [ migrations.AddField( model_name='userpost', name='privacy', field=models.CharField(blank=True, choices=[('public', 'public'), ('freind', 'freind'), ('onlyme', 'onlyme')], default='freind', max_length=20, null=True), ), ]
[ "kmatiour30@gmail.com" ]
kmatiour30@gmail.com
1c1cbb6fb019eda07db42749db4ea05edbf16e23
af3ec207381de315f4cb6dddba727d16d42d6c57
/dialogue-engine/test/programytest/storage/stores/sql/dao/test_map.py
d6ab20b3339a3459a181367867d45e52a42215eb
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
mcf-yuichi/cotoba-agent-oss
02a5554fe81ce21517f33229101013b6487f5404
ce60833915f484c4cbdc54b4b8222d64be4b6c0d
refs/heads/master
2023-01-12T20:07:34.364188
2020-11-11T00:55:16
2020-11-11T00:55:16
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,597
py
""" Copyright (c) 2020 COTOBA DESIGN, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import unittest from programy.storage.stores.sql.dao.map import Map class MapTests(unittest.TestCase): def test_init(self): lookup1 = Map(name='map', key="key", value="value") self.assertIsNotNone(lookup1) self.assertEqual("<Map(id='n/a', name='map', key='key', value='value')>", str(lookup1)) lookup2 = Map(id=1, name='map', key="key", value="value") self.assertIsNotNone(lookup2) self.assertEqual("<Map(id='1', name='map', key='key', value='value')>", str(lookup2))
[ "cliff@cotobadesign.com" ]
cliff@cotobadesign.com
0cad79d129e122280773519919929db546c04b35
d6a43cbb975c0d5dd2465d6f09c43767d35c121a
/tests/data/full.atom.py
9296e0f4f1015c1a3f5fe7650a6966818e2ae740
[ "BSD-3-Clause" ]
permissive
lemon24/reader
a76f5fd3f8dbf9d86e3627bbf9a60732414721cb
5e1682c9bfa36d341c03ab804adfb95cfc53f26e
refs/heads/master
2023-08-17T00:38:03.405077
2023-08-16T21:11:25
2023-08-16T21:11:25
115,272,183
349
24
BSD-3-Clause
2022-06-20T19:37:32
2017-12-24T15:36:43
Python
UTF-8
Python
false
false
2,327
py
import datetime from reader import Content from reader import Enclosure from reader._types import EntryData from reader._types import FeedData feed = FeedData( url=f'{url_base}full.atom', updated=datetime.datetime(2003, 12, 13, 18, 30, 2), title='Example Feed', link='http://example.org/', author='John Doe', subtitle='all your examples are belong to us', version='atom10', ) entries = [ EntryData( feed_url=feed.url, id='urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a', updated=datetime.datetime(2003, 12, 13, 18, 30, 2), title='Atom-Powered Robots Run Amok', link='http://example.org/2003/12/13/atom03', author='John Doe', published=datetime.datetime(2003, 12, 13, 17, 17, 51), summary='Some text.', content=( # the text/plain type comes from feedparser Content(value='content', type='text/plain'), Content(value='content with type', type='text/whatever'), Content(value='content with lang', type='text/plain', language='en'), ), enclosures=( # the text/html type comes from feedparser Enclosure(href='http://example.org/enclosure', type='text/html'), Enclosure( href='http://example.org/enclosure-with-type', type='text/whatever' ), Enclosure( href='http://example.org/enclosure-with-length', type='text/html', length=1000, ), Enclosure( href='http://example.org/enclosure-with-bad-length', type='text/html' ), ) + ( # feedparser resolves empty href to the base, # but only for Atom, and only if the base has a scheme(?); # document this (somewhat pointless) behavior (Enclosure(href=feed.url, type='text/html'),) if feed.url.startswith('http') else () ), ), EntryData( feed_url=feed.url, id='urn:uuid:00000000-cfb8-4ebb-aaaa-00000000000', updated=datetime.datetime(2003, 12, 13, 0, 0, 0), title='Atom-Powered Robots Run Amok Again', # link comes from feedparser link='urn:uuid:00000000-cfb8-4ebb-aaaa-00000000000', ), ]
[ "damian.adrian24@gmail.com" ]
damian.adrian24@gmail.com